├── .gitignore ├── .travis.yml ├── Dockerfile ├── LICENSE.md ├── Makefile ├── README.md ├── auth_test.sh ├── config ├── engine.config └── sasl.config ├── doc └── overview.edoc ├── erlsonPatch.txt ├── es_config ├── elasticsearch.yml ├── elasticsearch_cluster_example.yml ├── logging.yml ├── mappings │ └── sensorcloud │ │ ├── datapoint.json │ │ ├── group.json │ │ ├── pollinghistory.json │ │ ├── requests.json │ │ ├── resource.json │ │ ├── search_query.json │ │ ├── stream.json │ │ ├── suggestion.json │ │ ├── token.json │ │ ├── trigger.json │ │ ├── user.json │ │ ├── virtual_stream.json │ │ └── vsdatapoint.json └── templates │ └── sensorcloud.json ├── include ├── common.hrl ├── debug.hrl ├── field_restrictions.hrl ├── json.hrl ├── poller.hrl ├── pubsub.hrl ├── rabbit_common │ └── include │ │ ├── gm_specs.hrl │ │ ├── rabbit.hrl │ │ ├── rabbit_framing.hrl │ │ └── rabbit_msg_store.hrl └── state.hrl ├── javascripts ├── package.json └── receive.js ├── priv ├── dispatch.conf └── www │ └── stream.html ├── rebar ├── rebar.config ├── scripts ├── boot │ ├── iotf-backend │ ├── iotf-es │ └── iotf-rmq ├── install.sh ├── python │ ├── README.md │ ├── cgi-bin │ │ ├── humidity.py │ │ └── temperature.py │ ├── financialstreams │ ├── gamestreams │ ├── post_avg.py │ ├── poststreams.py │ └── weatherstreams ├── sensec.sh ├── sensec_light.sh └── travis-elasticsearch.sh ├── semantic-adapter ├── README.md ├── app │ ├── __init__.py │ ├── mimetype.py │ ├── semantic_adapter.py │ └── semantic_adapter_citypulse.py ├── gunicorn_run.sh ├── lib │ ├── __init__.py │ ├── broker.py │ ├── constants.py │ ├── semantics.py │ └── util.py ├── ontologies │ ├── SAO-v04.ttl │ └── ucum-instances.owl ├── pip-freeze.txt ├── templates │ └── index.html ├── test_pubsub │ ├── __init__.py │ ├── kb_uploader.py │ └── test_publisher.py └── web_run.py ├── src ├── analyse.erl ├── api_help.erl ├── config.erl ├── datapoints.erl ├── destructure_json.erl ├── engine.app.src ├── engine.erl ├── engine_app.erl ├── engine_sup.erl ├── groups.erl ├── lib_file.erl ├── lib_json.erl ├── openidc.erl ├── parser.erl ├── plus_srv.erl ├── poll_help.erl ├── poller.erl ├── polling_monitor.erl ├── polling_system.erl ├── pubsub │ ├── gen_virtual_stream_process.erl │ ├── resourceProcessMock.erl │ ├── streamProcess.erl │ ├── triggersProcess.erl │ └── virtual_stream_process_supervisor.erl ├── resource.erl ├── resources.erl ├── scoring.erl ├── search.erl ├── singleton.erl ├── static_resource.erl ├── stream_publisher.erl ├── stream_reciever.erl ├── streams.erl ├── suggest.erl ├── triggers.erl ├── triggers_lib.erl ├── users.erl ├── virtual_streams.erl └── vs_func_lib.erl └── test ├── config.spec ├── datapoints_tests.erl ├── gen_virtual_stream_process_tests.erl ├── http.erl ├── lib_json_tests.erl ├── poll_help_tests.erl ├── polling_system_tests.erl ├── resources_tests.erl ├── search_tests.erl ├── streams_tests.erl ├── suggest_tests.erl ├── test.erl ├── triggers_lib_tests.erl ├── triggers_tests.off ├── users_tests.erl ├── vs_func_lib_tests.erl └── vstreams_tests.erl /.gitignore: -------------------------------------------------------------------------------- 1 | # Ignore these 2 | ebin/ 3 | doc/* 4 | lib/ 5 | erl_crash.dump 6 | *~ 7 | TEST*.xml 8 | .project 9 | .settings 10 | *.swp 11 | priv/data 12 | priv/logs 13 | javascripts/node_modules 14 | *.pyc 15 | 16 | # Do not Ignore these 17 | !doc/overview.edoc 18 | /ebin 19 | .temp.log 20 | -------------------------------------------------------------------------------- /.travis.yml: -------------------------------------------------------------------------------- 1 | language: erlang 2 | env: R_HOME=/usr/lib/R 3 | before_install: 4 | - sudo apt-get install -qq xsltproc 5 | - sudo add-apt-repository "deb http://ftp.sunet.se/pub/lang/CRAN/bin/linux/ubuntu precise/" 6 | - sudo apt-get install r-base 7 | install: 8 | - make install 9 | services: 10 | - rabbitmq 11 | script: "make test_travis" 12 | before_script: 13 | - "curl https://raw.githubusercontent.com/EricssonResearch/iot-framework-engine/master/scripts/travis-elasticsearch.sh| bash" 14 | - "cd scripts/python/" 15 | - "python -m CGIHTTPServer 8001 &" 16 | - "python -m CGIHTTPServer 8002 &" 17 | - "cd ../../" 18 | otp_release: 19 | # - R16B02 20 | - R16B01 21 | # - R16B 22 | -------------------------------------------------------------------------------- /Dockerfile: -------------------------------------------------------------------------------- 1 | FROM ubuntu:14.04.2 2 | MAINTAINER Konstantinos Vandikas (konstantinos.vandikas@ericsson.com) 3 | 4 | # install git and checkout source-code from github 5 | RUN apt-get install -yq git software-properties-common curl wget 6 | WORKDIR /opt 7 | RUN git clone https://github.com/EricssonResearch/iot-framework-engine.git 8 | 9 | # configure repositories 10 | RUN add-apt-repository ppa:chris-lea/node.js 11 | RUN add-apt-repository "deb http://ftp.sunet.se/pub/lang/CRAN/bin/linux/ubuntu trusty/" 12 | 13 | # update/upgrade base system 14 | RUN apt-get update 15 | RUN apt-get -yq upgrade 16 | 17 | # install misc dependencies 18 | RUN apt-get install -yq xsltproc python-pip libpython-dev 19 | 20 | # install erlang 21 | RUN apt-get install -yq erlang 22 | 23 | # install nodejs 24 | RUN apt-get install -yq python-software-properties python g++ make 25 | RUN apt-get install -yq nodejs 26 | 27 | # install R 28 | RUN apt-get install -yq r-base --force-yes 29 | 30 | # install semantic-adapter 31 | WORKDIR /opt/iot-framework-engine 32 | RUN pip install -r semantic-adapter/pip-freeze.txt 33 | 34 | # compilation 35 | WORKDIR /opt/iot-framework-engine 36 | RUN make install 37 | 38 | # expose port 39 | EXPOSE 8000 40 | 41 | # Start the IoT-Framework 42 | CMD cd /opt/iot-framework-engine && ./scripts/sensec_light.sh start 43 | -------------------------------------------------------------------------------- /Makefile: -------------------------------------------------------------------------------- 1 | ################################################################################ 2 | ### Makefile for IoTFramework-engine ### 3 | ################################################################################ 4 | ################################################################################ 5 | ################################################################################ 6 | ### Variable assignment 7 | ################################################################################ 8 | ERL := erl 9 | REBAR := ./rebar 10 | ERL_CONFIG := -config config/engine.config -config config/sasl.config 11 | ERL_BOOT := -boot start_sasl -s reloader -s engine 12 | ERL_PA_FOLDERS := -pa ebin/ lib/*/ebin/ lib/*/bin/ 13 | TEST_RESULT_FOLDER := test-results 14 | ################################################################################ 15 | ################################################################################ 16 | ################################################################################ 17 | ### Dependency rules 18 | ### Do NOT touch this section! 19 | ### The commands in this sections should not be used in general, but can be used 20 | ### if there is need for it 21 | ################################################################################ 22 | compile: 23 | @$(REBAR) compile skip_deps=true 24 | 25 | ### get_libs will download and install all project libraries 26 | conf: compile_libs 27 | $(ERL) $(ERL_PA_FOLDERS) $(ERL_CONFIG) -s engine -s config 28 | make compile_libs 29 | 30 | get_libs: 31 | @@$(REBAR) get-deps 32 | wget "https://download.elasticsearch.org/elasticsearch/elasticsearch/elasticsearch-1.1.2.zip" 33 | unzip elasticsearch-1.1.2.zip -d lib 34 | rm elasticsearch-1.1.2.zip 35 | cp -r es_config/* lib/elasticsearch-1.1.2/config/ 36 | -git apply --directory=lib/erlson --whitespace=fix erlsonPatch.txt 37 | 38 | compile_libs: 39 | @$(REBAR) compile 40 | $(MAKE) -C lib/rabbitmq-server 41 | $(MAKE) -C lib/rabbitmq-erlang-client 42 | $(MAKE) -C lib/rErlang 43 | 44 | clean_emacs_vsn_files: 45 | rm -rf *~ 46 | rm -rf doc/*~ 47 | rm -rf include/*~ 48 | rm -rf priv/*~ 49 | rm -rf scripts/*~ 50 | rm -rf src/*~ 51 | rm -rf test/*~ 52 | ################################################################################ 53 | ################################################################################ 54 | ################################################################################ 55 | ### Command rules 56 | ### This section contains commands that can be used. 57 | ### This section can be edited if needed 58 | ################################################################################ 59 | 60 | ### Command: make 61 | ### Builds the entire project, excluding the dependencies. 62 | all: compile 63 | 64 | ### Command: make install 65 | ### Downloads all dependencies and builds the entire project 66 | install: get_libs conf 67 | (cd javascripts; npm install socket.io; npm install rabbit.js) 68 | 69 | install_linux_deps: 70 | sudo scripts/install.sh 71 | 72 | ### Command: make run 73 | ### Downloads all depenedencies, bulds entire project and runs the project. 74 | run: compile 75 | curl -XPUT localhost:9200/sensorcloud 76 | $(ERL) $(ERL_PA_FOLDERS) $(ERL_CONFIG) $(ERL_BOOT) -sname engine 77 | 78 | ### Command: make run_all 79 | ### Starts all parts of the system in one command. 80 | run_all: compile 81 | sudo scripts/sensec.sh start 82 | 83 | ### Command: make test_setup 84 | ### Runs all parts of the system except the erlang part. To be used prior to 'make test' for easy test setup 85 | test_setup: compile 86 | sudo scripts/sensec.sh test_setup 87 | 88 | ### Command: make stop_all 89 | ### Stops all parts of the system in one command. 90 | stop_all: 91 | sudo scripts/sensec.sh stop 92 | 93 | ### Command: make run_semantic_adapter 94 | run_semantic_adapter: 95 | sudo python semantic-adapter/web_run.py 96 | 97 | ### Command: make run_es 98 | ### Runs elastic search 99 | run_es: 100 | lib/elasticsearch-1.1.2/bin/elasticsearch 101 | 102 | ### Command: make run_nodejs 103 | ### Runs NodeJS 104 | run_nodejs: 105 | nodejs javascripts/receive.js 106 | 107 | ### Command: make run_fake_resource 108 | ### Runs the fake resources for polling 109 | run_fake_resource: 110 | (cd scripts/python/ && python -m CGIHTTPServer 8001 &) 111 | (cd scripts/python/ && python -m CGIHTTPServer 8002) 112 | 113 | ### Command: make run_rabbit 114 | ### Runs rabbitMQ server 115 | run_rabbit: 116 | sudo lib/rabbitmq-server/scripts/rabbitmq-server 117 | 118 | ### Command: make test 119 | ### Compile project resources (not libraries) and runs all eunit tests. 120 | test: compile 121 | -@mkdir $(TEST_RESULT_FOLDER) 122 | curl -XDELETE localhost:9200/sensorcloud 123 | curl -XPUT localhost:9200/sensorcloud 124 | $(ERL) $(ERL_PA_FOLDERS) $(ERL_CONFIG) $(ERL_BOOT) -sname engine -s test run 125 | 126 | test_travis: compile 127 | -@mkdir $(TEST_RESULT_FOLDER) 128 | curl -XPUT localhost:9200/sensorcloud 129 | $(ERL) $(ERL_PA_FOLDERS) $(ERL_CONFIG) $(ERL_BOOT) -sname engine -s test run 130 | 131 | test_datapoints: compile 132 | -@mkdir $(TEST_RESULT_FOLDER) 133 | curl -XDELETE localhost:9200/sensorcloud 134 | curl -XPUT localhost:9200/sensorcloud 135 | $(ERL) $(ERL_PA_FOLDERS) $(ERL_CONFIG) $(ERL_BOOT) -sname engine -eval 'test:run(datapoints)' 136 | 137 | test_json: compile 138 | -@mkdir $(TEST_RESULT_FOLDER) 139 | $(ERL) $(ERL_PA_FOLDERS) $(ERL_CONFIG) $(ERL_BOOT) -sname engine -eval 'test:run(lib_json)' 140 | 141 | test_resources: compile 142 | -@mkdir $(TEST_RESULT_FOLDER) 143 | curl -XDELETE localhost:9200/sensorcloud 144 | curl -XPUT localhost:9200/sensorcloud 145 | $(ERL) $(ERL_PA_FOLDERS) $(ERL_CONFIG) $(ERL_BOOT) -sname engine -eval 'test:run(resources)' 146 | 147 | test_streams: compile 148 | -@mkdir $(TEST_RESULT_FOLDER) 149 | curl -XDELETE localhost:9200/sensorcloud 150 | curl -XPUT localhost:9200/sensorcloud 151 | $(ERL) $(ERL_PA_FOLDERS) $(ERL_CONFIG) $(ERL_BOOT) -sname engine -eval 'test:run(streams)' 152 | 153 | test_users: compile 154 | -@mkdir $(TEST_RESULT_FOLDER) 155 | curl -XDELETE localhost:9200/sensorcloud 156 | curl -XPUT localhost:9200/sensorcloud 157 | $(ERL) $(ERL_PA_FOLDERS) $(ERL_CONFIG) $(ERL_BOOT) -sname engine -eval 'test:run(users)' 158 | 159 | test_poll: compile 160 | -@mkdir $(TEST_RESULT_FOLDER) 161 | curl -XDELETE localhost:9200/sensorcloud 162 | curl -XPUT localhost:9200/sensorcloud 163 | $(ERL) $(ERL_PA_FOLDERS) $(ERL_CONFIG) $(ERL_BOOT) -sname engine -eval 'test:run(poll_help)' 164 | 165 | test_poll_system: compile 166 | -@mkdir $(TEST_RESULT_FOLDER) 167 | curl -XDELETE localhost:9200/sensorcloud 168 | curl -XPUT localhost:9200/sensorcloud 169 | $(ERL) $(ERL_PA_FOLDERS) $(ERL_CONFIG) $(ERL_BOOT) -sname engine -eval 'test:run(polling_system)' 170 | 171 | test_search: compile 172 | -@mkdir $(TEST_RESULT_FOLDER) 173 | curl -XDELETE localhost:9200/sensorcloud 174 | curl -XPUT localhost:9200/sensorcloud 175 | $(ERL) $(ERL_PA_FOLDERS) $(ERL_CONFIG) $(ERL_BOOT) -sname engine -eval 'test:run(search)' 176 | 177 | test_triggers: compile 178 | -@mkdir $(TEST_RESULT_FOLDER) 179 | curl -XDELETE localhost:9200/sensorcloud 180 | curl -XPUT localhost:9200/sensorcloud 181 | $(ERL) $(ERL_PA_FOLDERS) $(ERL_CONFIG) $(ERL_BOOT) -sname engine -eval 'test:run(triggers)' 182 | 183 | 184 | test_vstreams: compile 185 | -@mkdir $(TEST_RESULT_FOLDER) 186 | curl -XDELETE localhost:9200/sensorcloud 187 | curl -XPUT localhost:9200/sensorcloud 188 | $(ERL) $(ERL_PA_FOLDERS) $(ERL_CONFIG) $(ERL_BOOT) -sname engine -eval 'test:run(gen_virtual_stream_process)' 189 | 190 | 191 | ### Command: make docs 192 | ### Genereats all of the documentation files 193 | docs: all 194 | ./rebar doc skip_deps=true 195 | 196 | ### Command: make clean 197 | ### Cleans the directory of the following things: 198 | ### * Emacs versioning files. 199 | ### * All erlang .beam files, including 'ebin' folder 200 | clean: clean_emacs_vsn_files 201 | @./rebar clean skip_deps=true 202 | rm -f erl_crash.dump 203 | rm -rf ebin/ 204 | rm -rf test-results/ 205 | 206 | ### Command: make clean_libs 207 | ### Cleans the directory of the following things: 208 | ### * All the downloaded libraries 209 | clean_libs: 210 | @./rebar delete-deps 211 | rm -rf lib/ 212 | 213 | ### Command: make clean_docs 214 | ### Cleans the directory of the following things: 215 | ### * All the documentation files except 'overview.edoc' 216 | clean_docs: 217 | find doc/ -type f -not -name 'overview.edoc' | xargs rm 218 | 219 | ### Command: make help 220 | ### Prints an explanation of the commands in this Makefile 221 | help: 222 | @echo "###################################################################" 223 | @echo "Commands:" 224 | @echo "" 225 | @echo "'make'" 226 | @echo "Compiles all the project sources. Does NOT compile libraries" 227 | @echo "" 228 | @echo "'make install'" 229 | @echo "Downloads and compiles all libraries" 230 | @echo "" 231 | @echo "'make install_linux_deps'" 232 | @echo "Installs all linux dependencies needed. Should only be necessary to do once on a system." 233 | @echo "" 234 | @echo "'make run'" 235 | @echo "Compiles and runs the otp app. Does NOT compile libraries" 236 | @echo "" 237 | @echo "'make run_all'" 238 | @echo "Compiles the system (not libraries) and runs ALL parts of the system." 239 | @echo "" 240 | @echo "'make stop_all'" 241 | @echo "Stops all parts of the system started with 'make run_all'" 242 | @echo "" 243 | @echo "'make run_all'" 244 | @echo "Compiles and runs all parts of the project. Does NOT compile libraries" 245 | @echo "" 246 | @echo "'make test_setup'" 247 | @echo "Easy setup of environment prior to running 'make test''" 248 | @echo "" 249 | @echo "'make stop_all'" 250 | @echo "Stops all parts of the project which was started with 'make run_all'" 251 | @echo "" 252 | @echo "'make run_es'" 253 | @echo "Runs the elastic search server" 254 | @echo "" 255 | @echo "'make run_nodejs'" 256 | @echo "Runs the elastic nodejs" 257 | @echo "" 258 | @echo "make run_rabbit" 259 | @echo "Runs the rabbitMQ server" 260 | @echo "" 261 | @echo "'make docs'" 262 | @echo "Generates documentation for the project" 263 | @echo "" 264 | @echo "'make clean'" 265 | @echo "Cleans all the project, including dependencies" 266 | @echo "" 267 | @echo "'make clean_libs'" 268 | @echo "Cleans all of the libraries" 269 | @echo "" 270 | @echo "'make clean_docs'" 271 | @echo "Cleans all of the documentation files, except for 'overview.edoc'" 272 | @echo "" 273 | @echo "'make help'" 274 | @echo "Prints an explanation of the commands in this Makefile" 275 | @echo "###################################################################" 276 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # IoT-Framework Engine 2 | 3 | The IoT-Framework is a computational engine for the Internet of Things (IoT). It was developed jointly by Ericsson Research, the Swedish Institute of Computer Science (SICS) and Uppsala University in the scope of Project CS 2013. This repository contains the server side of the system, while [IoT-Framework-GUI](https://github.com/EricssonResearch/iot-framework-gui) contains the graphical user interface. 4 | 5 | There are is an [Ericsson Research blog](http://www.ericsson.com/research-blog/) post that gives a brief intro to what the IoT-Framework is and what can be done using it: [A computational engine for the Internet of Things](https://www.ericsson.com/research-blog/internet-of-things/computational-engine-internet-things/) 6 | 7 | Moreover, there is a publication which describes the inner workings of the engine along with how to measure its performance. This publication has title "Performance Evaluation of an IoT Platform" and can be found [here](http://ieeexplore.ieee.org/xpl/articleDetails.jsp?tp=&arnumber=6982906&queryText%3Dvandikas). 8 | 9 | ## Demo 10 | 11 | You can check out a demo of the IoT-Framework here: [IoT-Framework demo](https://vimeo.com/98966770). Please mind that the demo assumes that you are using both the IoT-Framework Engine and the IoT-Framework-GUI. 12 | 13 | ## Installing the project 14 | 15 | 1. Download and compile the linux system dependencies, (only needed once per machine) 16 | 17 | make install_linux_deps 18 | 19 | 2. Download and compile the project dependencies, and compile the project sources 20 | 21 | make install 22 | 23 | ## Running the project 24 | 25 | 1. Run the application by using startup script (one of the commands below) 26 | 27 | make run_all 28 | 29 | or 30 | 31 | sudo ./scripts/sensec.sh start 32 | 33 | 2. Alternative run (type each in separate shells) 34 | 35 | make run_rabbit 36 | make run_es 37 | make run_nodejs 38 | % don't forget to export R_HOME for example 39 | export R_HOME="/usr/lib/R" 40 | make run 41 | 42 | 4. To shutdown either close each individual shell or run one of the commands below 43 | 44 | make stop_all 45 | 46 | or 47 | 48 | sudo ./scripts/sensec.sh stop 49 | 50 | ## Running the project via Docker 51 | 52 | 1. Retrieve the Dockerfile 53 | 2. Build the docker image 54 | 55 | docker build -t username/image_name:version home_directory 56 | (or more concretely) docker build -t costa@iot-f:v1 . 57 | 58 | 3. Run the image 59 | 60 | docker run -p 8080 -dti username/image_name:version 61 | (or more concretely) docker run -p -dti costa@iot-f:v1 62 | 63 | 4. Check if the system is running. This command should show if the system is running properly and also the port mapping that has been made for port 8080. 64 | 65 | docker ps 66 | 67 | 68 | ## Running tests 69 | 70 | 1. There are two ways of setting up the environment for testing. Either run the startup script by one of the below commands. 71 | 72 | make test_setup 73 | sudo ./scripts/sensec.sh test_setup 74 | 75 | 2. Or run each of the following commands in a separate shell 76 | 77 | make run_rabbit 78 | make run_es 79 | make run_nodejs 80 | make run_fake_resource 81 | 82 | 3. Run the tests 83 | 84 | make test 85 | 86 | ## Code Status 87 | 88 | [![Build Status](https://travis-ci.org/EricssonResearch/iot-framework-engine.svg)](https://travis-ci.org/EricssonResearch/iot-framework-engine) 89 | -------------------------------------------------------------------------------- /auth_test.sh: -------------------------------------------------------------------------------- 1 | #! /bin/bash 2 | 3 | ### Test Variables 4 | MY_ID="104295496712959038073" 5 | OTHER_PUBLIC_ID="108895705188930031602" 6 | OTHER_PRIVATE_ID="107908217220817548513" 7 | 8 | MY_STREAM_ID="ZLDDaaZATcK451vMMdHqWw" 9 | RANKED_STREAM="mh5KhW9sTbGbmVRCSItOhg" 10 | MY_ACCESS_TOKEN="ya29.WwDJorUu9ByRvxwAAAADBNA6_9H79kwom8mxZZOdHTvmWkLwwQyj-hV5iR0DPw" 11 | 12 | 13 | 14 | printf "##########################################################\n" 15 | printf "## OpenID Connect Authentication/Authorization Test ###\n" 16 | printf "##########################################################\n\n" 17 | 18 | 19 | printf "#### Rule 1 #### - " 20 | printf "Can MAKE anything with our own data except manipulating datapoints\n" 21 | 22 | printf "\n\nShould be OK -> Update my own user info\n" 23 | curl -XPUT "http://localhost:8000/users/$MY_ID" -H "Access-Token: $MY_ACCESS_TOKEN" -H "Content-Type: application/json" -d '{"description": "testing PUT with my own user"}' 24 | 25 | printf "\n\nShould be OK -> Get my updated user info\n" 26 | curl -XGET "http://localhost:8000/users/$MY_ID" -H "Access-Token: $MY_ACCESS_TOKEN" 27 | 28 | printf "\n\nShould FAIL -> CAN NOT create datapoints\n" 29 | curl -XPOST "http://localhost:8000/streams/$MY_STREAM_ID/data" -H "Access-Token: ya29.WgBV0eqjpIxktSEAAABq5sHdJVSN64Ij3ztnhD49PW6-oEyLJ7M7F3CGAxkKeEvx1Y8Quj-SuVSavRhNmFc" -H "Content-Type: application/json" -d '{"data":[{"timestamp":"2013-12-04T17:43:02.000","value":117.96},{"timestamp":"2013-12-04T17:47:02.000","value":117.96}]}' 30 | 31 | printf "\n\n---------------------------------------------------------------------------\n\n" 32 | 33 | 34 | 35 | printf "#### Rule 2 #### - " 36 | printf "Can NOT MAKE anything to private users\n" 37 | 38 | printf "\n\nShould FAIL -> Get user info of the IoT Framework's registered user\n" 39 | curl -XGET "http://localhost:8000/users/$OTHER_PRIVATE_ID" -H "Access-Token: $MY_ACCESS_TOKEN" 40 | printf "\n\nShould FAIL -> Update user info of the IoT Framework's registered user\n" 41 | curl -XPUT "http://localhost:8000/users/$OTHER_PRIVATE_ID" -H "Access-Token: $MY_ACCESS_TOKEN" -H "Content-Type: application/json" -d '{"description": testing PUT with my own user!}' 42 | 43 | printf "\n\n---------------------------------------------------------------------------\n\n" 44 | 45 | 46 | 47 | printf "#### Rule 3 #### - " 48 | printf "Can ONLY GET User/S/VS from other public users\n" 49 | 50 | printf "\n\nShould be OK -> Fetch public user's data profile\n" 51 | curl -XGET "http://localhost:8000/users/$OTHER_PUBLIC_ID" -H "Access-Token: $MY_ACCESS_TOKEN" 52 | 53 | printf "\n\nShould be OK -> Fetch public user's streams\n" 54 | curl -XGET "http://localhost:8000/users/$OTHER_PUBLIC_ID/streams" -H "Access-Token: $MY_ACCESS_TOKEN" 55 | 56 | printf "\n\nShould be OK -> Fetch public user's vstreams\n" 57 | curl -XGET "http://localhost:8000/users/$OTHER_PUBLIC_ID/vstreams" -H "Access-Token: $MY_ACCESS_TOKEN" 58 | 59 | printf "\n\nShould FAIL -> CAN NOT Fetch public user's triggers\n" 60 | curl -XGET "http://localhost:8000/users/$OTHER_PUBLIC_ID/triggers" -H "Access-Token: $MY_ACCESS_TOKEN" 61 | 62 | printf "\n\nShould FAIL -> CAN NOT Update a public user's stream\n" 63 | curl -XPUT "http://localhost:8000/streams/$MY_STREAM_ID" -H "Access-Token: $MY_ACCESS_TOKEN" -H "Content-Type: application/json" -d '{"name": "Now it is my stream"}' 64 | 65 | printf "\n\n---------------------------------------------------------------------------\n\n" 66 | 67 | 68 | 69 | printf "#### Rule 4 #### - " 70 | printf "Can PUT the ranking of other user's stream\n" 71 | 72 | printf "\n\nShould be OK -> Update the ranking of a stream, either if it is ours or not\n" 73 | curl -XPUT "http://localhost:8000/streams/$RANKED_STREAM/_rank" -H "Access-Token: $MY_ACCESS_TOKEN" -H "Content-Type: application/json" -d '{"stream_id":"$MY_STREAM_ID","value":1.0}' 74 | 75 | printf "\n\n---------------------------------------------------------------------------\n\n" 76 | 77 | 78 | 79 | printf "#### Rule 5 #### - " 80 | printf "Anything else is forbidden\n" 81 | 82 | printf "\n\nShould FAIL -> Remove other user's stream\n" 83 | curl -XDELETE "http://localhost:8000/streams/$MY_STREAM_ID/" -H "Access-Token: $MY_ACCESS_TOKEN" 84 | 85 | printf "\n\n---------------------------------------------------------------------------\n\n" 86 | 87 | 88 | 89 | printf "#### Rule 6 #### - " 90 | printf "Can GET or POST to Users / Streams / VStreams collections\n" 91 | 92 | printf "\n\nShould be OK -> Create a new stream\n" 93 | curl -XPOST "http://localhost:8000/streams" -H "Access-Token: $MY_ACCESS_TOKEN" -H "Content-Type: application/json" -d '{"name": "Stream Test 123", "description": "my new stream", "user_id": "$MY_ID"}' 94 | 95 | printf "\n\n---------------------------------------------------------------------------\n\n" 96 | 97 | printf "All tests done :)\n" 98 | -------------------------------------------------------------------------------- /config/engine.config: -------------------------------------------------------------------------------- 1 | %%-*- mode: erlang -*- 2 | [{engine, [ 3 | %% Specifies the log directory for the main application. 4 | {engine_log_dir, "priv/logs"}, %Relative from the engine application root or absolute path 5 | 6 | %% Specifies the cluster name for elastic search 7 | {es_cluster_name, "engine"}, 8 | 9 | %% Specifies the directory for the elastic search data 10 | {es_db_dir, "priv/data"}, %Relative from engine application root or absolute path 11 | 12 | %% Specifies the directory for the elastic search additional data directories 13 | %%The value for this needs to be specified as a string with each directory passed seperated with a comma 14 | %% Example: "Path/To/Data1,Path/To/Data2" 15 | {es_db_extra_dir, ""}, %List with paths relative from engine application root or absolute paths 16 | 17 | %% Specifies the ip address for elastic search 18 | {es_ip, "localhost"}, 19 | 20 | %% Specifies the log directory for elastic search 21 | {es_log_dir, "priv/logs"}, %Relative from the engine application root or absolute path 22 | 23 | %% Specifies the port number for elastic search 24 | {es_port, 9200}, 25 | 26 | %% Specifies the log directory for nodejs 27 | {nodejs_log_dir, "priv/logs"}, %Relative from the engine application root or absolute path 28 | 29 | %% Specifies the log directory for rabbit_mq 30 | {rabbit_mq_log_dir, "priv/logs"}, %Relative from the engine application root or absolute path 31 | 32 | %% Specifies the log directory for rabbit_mq 33 | {rabbit_mq_ip, "localhost"}, 34 | 35 | %% Specifies the log directory for rabbit_mq 36 | {rabbit_mq_port, 5672}, 37 | 38 | %% Specifies the log directory for webmachine 39 | {webmachine_log_dir, "priv/logs"}, %Relative from the engine application root or absolute path 40 | 41 | %% Specifies the port number for webmachine 42 | {webmachine_port, 8000} 43 | ]} 44 | ]. 45 | -------------------------------------------------------------------------------- /config/sasl.config: -------------------------------------------------------------------------------- 1 | %%-*- mode: erlang -*- 2 | [{sasl, [ 3 | {errlog_type, error} % Values: error | progress | all 4 | ]} 5 | ]. 6 | -------------------------------------------------------------------------------- /doc/overview.edoc: -------------------------------------------------------------------------------- 1 | ** this is the overview.doc file for the application 'Sicth Sense' ** 2 | 3 | @author Project CS Group 4 | @copyright Sicsth Sense 5 | @version 1.0.0 6 | @title Welcome to Sicth Sense 7 | @doc This is the engine module for the Sicsth Sense. -------------------------------------------------------------------------------- /erlsonPatch.txt: -------------------------------------------------------------------------------- 1 | diff --git a/include/erlson.hrl b/include/erlson.hrl 2 | index f7ce189..565788b 100644 3 | --- a/include/erlson.hrl 4 | +++ b/include/erlson.hrl 5 | @@ -7,8 +7,8 @@ 6 | -define(erlson_default(X, Default), 7 | try 8 | X 9 | - catch error:'erlson_not_found' -> 10 | - Default 11 | + catch error:_ -> 12 | + Default 13 | end 14 | ). 15 | 16 | diff --git a/src/erlson.erl b/src/erlson.erl 17 | index 1205634..f86e716 100644 18 | --- a/src/erlson.erl 19 | +++ b/src/erlson.erl 20 | @@ -28,14 +28,14 @@ 21 | 22 | % public API 23 | -export([from_proplist/1, from_nested_proplist/1, from_nested_proplist/2]). 24 | --export([to_json/1, from_json/1]). 25 | +-export([to_json/1, from_json/1, is_json_string/1]). 26 | -export([list_to_json_array/1, list_from_json_array/1]). 27 | % these two functions are useful, if there's a need to call mochijson2:decode 28 | % and mochijson2:encode separately 29 | -export([to_json_term/1, from_json_term/1]). 30 | 31 | % these functions are used by Erlson compiled code 32 | --export([fetch/2, store/3]). 33 | +-export([fetch/2, store/3, remove/2]). 34 | 35 | -export([get_value/2]). 36 | -export([get_value/3]). 37 | @@ -105,6 +105,9 @@ get_value(Path, Dict, Default) -> 38 | end. 39 | 40 | 41 | +fetch_path([H|T], Dict) when is_integer(H) -> 42 | + Val = lists:nth(H, Dict), 43 | + fetch_path(T, Val); 44 | fetch_path([H|T], Dict) -> 45 | Val = fetch_val(H, Dict), 46 | fetch_path(T, Val); 47 | @@ -138,8 +141,17 @@ store(Path, Value, Dict) -> 48 | end. 49 | 50 | 51 | +store_path([N], Value, Dict) when is_integer(N) -> 52 | + {Part1, [_OldValue | Part2]} = lists:split(N-1, Dict), 53 | + Part1 ++ [Value | Part2]; 54 | store_path([N], Value, Dict) -> 55 | store_val(N, Value, Dict); 56 | +store_path([H|T], Value, Dict) when is_integer(H)-> 57 | + %% Split on H-1 in order to get the desired element in the head of the second list for quick access. 58 | + {Part1, [InnerDict | Part2]} = lists:split(H-1, Dict), 59 | + % replace the existing value with the new inner dictionary 60 | + NewInnerDict = store_path(T, Value, InnerDict), 61 | + Part1 ++ [NewInnerDict | Part2]; 62 | store_path([H|T], Value, Dict) -> 63 | InnerDict = fetch_val(H, Dict), 64 | % replace the existing value with the new inner dictionary 65 | @@ -151,6 +163,46 @@ store_val(Name, Value, Dict) -> 66 | orddict:store(Name, Value, Dict). 67 | 68 | 69 | +-spec remove/2 :: ( 70 | + Path :: name_path(), 71 | + Dict :: orddict() ) -> orddict(). 72 | +remove(Name, Dict) when is_atom(Name) -> 73 | + remove_val(Name, Dict); 74 | +remove(Path, Dict) -> 75 | + try 76 | + remove_path(Path, Dict) 77 | + catch 78 | + 'erlson_not_found' -> 79 | + erlang:error('erlson_not_found', [Path, Dict]) 80 | + end. 81 | + 82 | +remove_path([N], Dict) when is_integer(N) -> 83 | + {Part1, [_H | Part2]} = lists:split(N-1, Dict), 84 | + Part1 ++ Part2; 85 | +remove_path([N], Dict) -> 86 | + remove_val(N, Dict); 87 | +remove_path([H|T], Dict) when is_integer(H) -> 88 | + %% Split on H-1 in order to get the desired element in the head of the second list for quick access. 89 | + {Part1, [InnerDict | Part2]} = lists:split(H-1, Dict), 90 | + % replace the existing value with the new inner dictionary 91 | + case remove_path(T, InnerDict) of 92 | + [] -> 93 | + Part1 ++ Part2; 94 | + NewInnerDict -> 95 | + Part1 ++ [NewInnerDict | Part2] 96 | + end; 97 | +remove_path([H|T], Dict) -> 98 | + InnerDict = fetch_val(H, Dict), 99 | + % replace the existing value with the new inner dictionary 100 | + NewInnerDict = remove_path(T, InnerDict), 101 | + store_val(H, NewInnerDict, Dict). 102 | + 103 | + 104 | +remove_val(Name, Dict) -> 105 | + orddict:erase(Name, Dict). 106 | + 107 | + 108 | + 109 | % @doc Create Erlson dictionary from a proplist 110 | % 111 | % During conversion, each atom() property is converted to {atom(), true} 112 | @@ -237,6 +289,16 @@ store_proplist_elem(_X, _Dict, _MaxDepth) -> 113 | throw('erlson_bad_proplist'). 114 | 115 | 116 | +is_json_string(Json) -> 117 | + try from_json(Json) of 118 | + _ -> true 119 | + catch 120 | + _:_ -> false 121 | + end. 122 | + 123 | + 124 | + 125 | + 126 | % @doc Convert Erlson dictionary to a JSON Object 127 | -spec to_json/1 :: (Dict :: orddict()) -> iolist(). 128 | to_json(Dict) -> 129 | @@ -349,7 +411,7 @@ decode_json_term(L) when is_list(L) -> % JSON array 130 | [ decode_json_term(X) || X <- L ]; 131 | decode_json_term('null') -> 132 | % decoding JSON null as a more conventional 'undefined' 133 | - 'undefined'. 134 | + 'null'. 135 | 136 | 137 | from_json_fields(L) -> 138 | @@ -368,7 +430,7 @@ store_json_field({N, V}, Dict) -> 139 | % as either atom() or binary(), and because Erlson dict is ordered, all binary() 140 | % fields will be stored closer to the tail of the list 141 | decode_json_field_name(N) -> 142 | - try binary_to_existing_atom(N, utf8) 143 | + try binary_to_atom(N, utf8) 144 | catch 145 | error:badarg -> N 146 | end. 147 | -------------------------------------------------------------------------------- /es_config/logging.yml: -------------------------------------------------------------------------------- 1 | # you can override this using by setting a system property, for example -Des.logger.level=DEBUG 2 | es.logger.level: INFO 3 | rootLogger: ${es.logger.level}, console, file 4 | logger: 5 | # log action execution errors for easier debugging 6 | action: DEBUG 7 | # reduce the logging for aws, too much is logged under the default INFO 8 | com.amazonaws: WARN 9 | 10 | # gateway 11 | #gateway: DEBUG 12 | #index.gateway: DEBUG 13 | 14 | # peer shard recovery 15 | #indices.recovery: DEBUG 16 | 17 | # discovery 18 | #discovery: TRACE 19 | 20 | index.search.slowlog: TRACE, index_search_slow_log_file 21 | index.indexing.slowlog: TRACE, index_indexing_slow_log_file 22 | 23 | additivity: 24 | index.search.slowlog: false 25 | index.indexing.slowlog: false 26 | 27 | appender: 28 | console: 29 | type: console 30 | layout: 31 | type: consolePattern 32 | conversionPattern: "[%d{ISO8601}][%-5p][%-25c] %m%n" 33 | 34 | file: 35 | type: dailyRollingFile 36 | file: ${path.logs}/${cluster.name}.log 37 | datePattern: "'.'yyyy-MM-dd" 38 | layout: 39 | type: pattern 40 | conversionPattern: "[%d{ISO8601}][%-5p][%-25c] %m%n" 41 | 42 | index_search_slow_log_file: 43 | type: dailyRollingFile 44 | file: ${path.logs}/${cluster.name}_index_search_slowlog.log 45 | datePattern: "'.'yyyy-MM-dd" 46 | layout: 47 | type: pattern 48 | conversionPattern: "[%d{ISO8601}][%-5p][%-25c] %m%n" 49 | 50 | index_indexing_slow_log_file: 51 | type: dailyRollingFile 52 | file: ${path.logs}/${cluster.name}_index_indexing_slowlog.log 53 | datePattern: "'.'yyyy-MM-dd" 54 | layout: 55 | type: pattern 56 | conversionPattern: "[%d{ISO8601}][%-5p][%-25c] %m%n" 57 | -------------------------------------------------------------------------------- /es_config/mappings/sensorcloud/datapoint.json: -------------------------------------------------------------------------------- 1 | { 2 | "datapoint" : { 3 | "_source" : { "enabled" : true }, 4 | "dynamic": false, 5 | "properties" : { 6 | "stream_id":{ 7 | "type":"string", 8 | "index":"not_analyzed" 9 | }, 10 | "timestamp":{ 11 | "type":"date", 12 | "format":"dateOptionalTime" 13 | }, 14 | "value":{ 15 | "type":"double" 16 | } 17 | } 18 | } 19 | } 20 | -------------------------------------------------------------------------------- /es_config/mappings/sensorcloud/group.json: -------------------------------------------------------------------------------- 1 | { 2 | "group" : { 3 | "_source" : { "enabled" : true }, 4 | "_timestamp" : {"enabled" : true, "store" : true}, 5 | "dynamic": false, 6 | "properties" : { 7 | "owner":{ 8 | "type":"string", 9 | "index":"not_analyzed" 10 | }, 11 | "name":{ 12 | "type":"string", 13 | "index":"analyzed" 14 | }, 15 | "description":{ 16 | "type":"string" 17 | }, 18 | "tags":{ 19 | "type":"string", 20 | "index":"analyzed" 21 | }, 22 | "input":{ 23 | "type":"string", 24 | "index":"no" 25 | }, 26 | "output":{ 27 | "type":"string", 28 | "index":"no" 29 | }, 30 | "private":{ 31 | "type":"boolean", 32 | "index":"not_analyzed" 33 | }, 34 | "creation_date":{ 35 | "type":"date" 36 | }, 37 | "subscribers":{ 38 | "type":"long", 39 | "index":"not_analyzed" 40 | }, 41 | "user_ranking":{ 42 | "type":"double", 43 | "index":"not_analyzed" 44 | } 45 | } 46 | } 47 | } 48 | -------------------------------------------------------------------------------- /es_config/mappings/sensorcloud/pollinghistory.json: -------------------------------------------------------------------------------- 1 | { 2 | "pollinghistory" : { 3 | "_source" : { "enabled" : true }, 4 | "dynamic": false, 5 | "properties" : { 6 | "history":{ 7 | "type" : "object" 8 | } 9 | } 10 | } 11 | } 12 | -------------------------------------------------------------------------------- /es_config/mappings/sensorcloud/requests.json: -------------------------------------------------------------------------------- 1 | { 2 | "requests" : { 3 | "_source" : { "enabled" : true }, 4 | "_timestamp" : { "enabled" : true }, 5 | "_ttl" : { "enabled" : true, "default" : "1d"}, 6 | "dynamic" : false, 7 | 8 | "properties" : { 9 | "user_id" : { 10 | "type" : "string", 11 | "index" : "not_analyzed" 12 | }, 13 | "number" : { 14 | "type" : "integer", 15 | "index" : "not_analyzed" 16 | } 17 | } 18 | } 19 | } 20 | -------------------------------------------------------------------------------- /es_config/mappings/sensorcloud/resource.json: -------------------------------------------------------------------------------- 1 | { 2 | "resource" : { 3 | "_timestamp" : { "enabled" : true, "store" : true}, 4 | "dynamic": false, 5 | "properties" : { 6 | "name" : { 7 | "type" : "string", 8 | "index" : "not_analyzed" 9 | }, 10 | "description" : { 11 | "type" : "string", 12 | "index" : "not_analyzed" 13 | }, 14 | "manufacturer" : { 15 | "type" : "string", 16 | "index" : "not_analyzed" 17 | }, 18 | "model" : { 19 | "type" : "string", 20 | "index" : "not_analyzed" 21 | }, 22 | "streams_suggest" : { 23 | "properties" : { 24 | "name" : { 25 | "type" : "string", 26 | "index" : "not_analyzed" 27 | }, 28 | "accuracy" : { 29 | "type" : "float" 30 | }, 31 | "description" : { 32 | "type" : "string", 33 | "index" : "no" 34 | }, 35 | "min_val" : { 36 | "type" : "float" 37 | }, 38 | "max_val" : { 39 | "type" : "float" 40 | }, 41 | "polling" : { 42 | "type" : "boolean" 43 | }, 44 | "polling_freq" : { 45 | "type" : "long" 46 | }, 47 | "type" : { 48 | "type" : "string", 49 | "index" : "not_analyzed" 50 | }, 51 | "tags" : { 52 | "type" : "string", 53 | "index" : "analyzed" 54 | }, 55 | "unit" : { 56 | "type" : "string", 57 | "index" : "not_analyzed" 58 | } 59 | } 60 | } 61 | } 62 | } 63 | } 64 | -------------------------------------------------------------------------------- /es_config/mappings/sensorcloud/search_query.json: -------------------------------------------------------------------------------- 1 | { 2 | "search_query": { 3 | "properties": { 4 | "search_suggest": { 5 | "type": "completion", 6 | "analyzer": "keyword", 7 | "payloads": false, 8 | "preserve_separators": true, 9 | "preserve_position_increments": true, 10 | "max_input_len": 50 11 | } 12 | } 13 | } 14 | } 15 | -------------------------------------------------------------------------------- /es_config/mappings/sensorcloud/stream.json: -------------------------------------------------------------------------------- 1 | { 2 | "stream" : { 3 | "_source" : { 4 | "enabled" : true 5 | }, 6 | "_timestamp" : { 7 | "enabled" : true 8 | }, 9 | "dynamic": false, 10 | "properties" : { 11 | "name" : { 12 | "type" : "multi_field", 13 | "fields" : { 14 | "name" : { 15 | "type" : "string", 16 | "index" : "analyzed" 17 | }, 18 | "untouched" : { 19 | "type" : "string", 20 | "index" : "not_analyzed" 21 | } 22 | } 23 | }, 24 | "description" : { 25 | "type" : "string", 26 | "index" : "no" 27 | }, 28 | "type" : { 29 | "type" : "string", 30 | "index" : "not_analyzed" 31 | }, 32 | "tags" : { 33 | "type" : "string", 34 | "index" : "analyzed" 35 | }, 36 | "private" : { 37 | "type" : "boolean" 38 | }, 39 | "unit" : { 40 | "type" : "string", 41 | "index" : "not_analyzed" 42 | }, 43 | "accuracy" : { 44 | "type" : "float" 45 | }, 46 | "min_val" : { 47 | "type" : "float" 48 | }, 49 | "max_val" : { 50 | "type" : "float" 51 | }, 52 | "polling" : { 53 | "type" : "boolean" 54 | }, 55 | "parser" : { 56 | "type" : "string", 57 | "index" : "not_analyzed" 58 | }, 59 | "data_type" : { 60 | "type" : "string", 61 | "index" : "not_analyzed" 62 | }, 63 | "uri" : { 64 | "type" : "string", 65 | "index" : "not_analyzed" 66 | }, 67 | "polling_freq" : { 68 | "type" : "long" 69 | }, 70 | "location" : { 71 | "type" : "geo_point", 72 | "lat_lon" : true, 73 | "geohash" : true, 74 | "geohash_prefix" : true, 75 | "normalize" : false, 76 | "validate" : true 77 | }, 78 | // "location" : { 79 | // "properties" : { 80 | // "lon" : { 81 | // "type" : "float" 82 | // }, 83 | // "lat" : { 84 | // "type" : "float" 85 | // } 86 | // } 87 | // }, 88 | "resource" : { 89 | "properties" : { 90 | "resource_type" : { 91 | "type" : "string", 92 | "index" : "not_analyzed" 93 | }, 94 | "uuid" : { 95 | "type" : "string", 96 | "index" : "no" 97 | } 98 | } 99 | }, 100 | "quality" : { 101 | "type" : "float" 102 | }, 103 | "active" : { 104 | "type" : "boolean" 105 | }, 106 | "user_ranking" : { 107 | "properties":{ 108 | "average":{ 109 | "type" : "float" 110 | }, 111 | "nr_rankings":{ 112 | "type": "long" 113 | } 114 | } 115 | }, 116 | "nr_subscribers" : { 117 | "type" : "long" 118 | }, 119 | "subscribers" : { 120 | "properties" : { 121 | "user_id" : { 122 | "type" : "string" 123 | } 124 | } 125 | }, 126 | "last_updated" : { 127 | "type" : "date" 128 | }, 129 | "creation_date" : { 130 | "type" : "date" 131 | }, 132 | "history_size" : { 133 | "type" : "long" 134 | }, 135 | "user_id" : { 136 | "type" : "string", 137 | "index" : "not_analyzed" 138 | } 139 | } 140 | } 141 | } 142 | 143 | 144 | 145 | 146 | 147 | 148 | -------------------------------------------------------------------------------- /es_config/mappings/sensorcloud/suggestion.json: -------------------------------------------------------------------------------- 1 | { 2 | "suggestion" : { 3 | "properties" : { 4 | "resource_id" : { 5 | "type" : "string", 6 | "index" : "not_analyzed" 7 | }, 8 | "suggest" : { "type" : "completion", 9 | "index_analyzer" : "model_analyzer", 10 | "search_analyzer" : "model_analyzer", 11 | "payloads" : true 12 | } 13 | } 14 | } 15 | } 16 | -------------------------------------------------------------------------------- /es_config/mappings/sensorcloud/token.json: -------------------------------------------------------------------------------- 1 | { 2 | "token" : { 3 | "_source" : { 4 | "enabled" : true 5 | }, 6 | "_timestamp" : { 7 | "enabled" : true 8 | }, 9 | "dynamic": false, 10 | "properties" : { 11 | "access_token" : { 12 | "type" : "string", 13 | "index" : "not_analyzed" 14 | }, 15 | "refresh_token" : { 16 | "type" : "string", 17 | "index" : "not_analyzed" 18 | }, 19 | "issued_at" : { 20 | "type" : "string", 21 | "index" : "no" 22 | }, 23 | "expires_in" : { 24 | "type" : "string", 25 | "index" : "no" 26 | }, 27 | "user_id" : { 28 | "type" : "string", 29 | "index" : "not_analyzed" 30 | } 31 | } 32 | } 33 | } 34 | 35 | 36 | 37 | 38 | 39 | 40 | -------------------------------------------------------------------------------- /es_config/mappings/sensorcloud/trigger.json: -------------------------------------------------------------------------------- 1 | { 2 | "trigger" : { 3 | "_timestamp" : { "enabled" : true, "store" : true}, 4 | "properties" : { 5 | "function" : { 6 | "type" : "string", 7 | "index" : "not_analyzed" 8 | }, 9 | "outputlist" : { 10 | "properties" : { 11 | "input" : {"type" : "float"}, 12 | "output" : {"properties" : { 13 | "output_id" : {"type" : "string"}, 14 | "output_type" : {"type" : "string"} 15 | } 16 | } 17 | } 18 | }, 19 | "streams" : { 20 | "type" : "string", 21 | "index" : "analyzed" 22 | }, 23 | "vstreams" : { 24 | "type" : "string", 25 | "index" : "analyzed" 26 | } 27 | } 28 | } 29 | } 30 | -------------------------------------------------------------------------------- /es_config/mappings/sensorcloud/user.json: -------------------------------------------------------------------------------- 1 | { 2 | "user" : { 3 | "_timestamp" : { "enabled" : true, "store" : true}, 4 | "_source" : {"excludes" : ["password"]}, 5 | "properties" : { 6 | "username" : { 7 | "type" : "string", 8 | "index" : "not_analyzed" 9 | }, 10 | "rankings" : { 11 | "properties" : { 12 | "stream_id" : {"type" : "string"}, 13 | "rank" : {"type" : "float"} 14 | } 15 | }, 16 | "triggers" : { 17 | "properties" : { 18 | "function" : {"type" : "string"}, 19 | "input" : {"type" : "float"}, 20 | "streams" : {"type" : "string"} 21 | } 22 | }, 23 | "notifications" : { 24 | "type" : "object" 25 | }, 26 | "email" : { 27 | "type" : "string", 28 | "index" : "no" 29 | }, 30 | "password" : { 31 | "type" : "string", 32 | "index" : "no", 33 | "store" : "yes" 34 | }, 35 | "firstname" : { 36 | "type" : "string" 37 | }, 38 | "lastname" : { 39 | "type" : "string" 40 | }, 41 | "description" : { 42 | "type" : "string" 43 | }, 44 | "subscriptions" : { 45 | "properties" : { 46 | "stream_id" : { 47 | "type" : "string", 48 | "index" : "not_analyzed", 49 | "store" : "no" 50 | } 51 | } 52 | }, 53 | "private" : { 54 | "type":"boolean" 55 | } 56 | } 57 | } 58 | } 59 | -------------------------------------------------------------------------------- /es_config/mappings/sensorcloud/virtual_stream.json: -------------------------------------------------------------------------------- 1 | { 2 | "virtual_stream" : { 3 | 4 | "_source" : { 5 | "enabled" : true 6 | }, 7 | 8 | "_timestamp" : { 9 | "enabled" : true 10 | }, 11 | "dynamic": false, 12 | 13 | "properties" : { 14 | 15 | "name": { 16 | "type" : "string", 17 | "index" : "analyzed" 18 | }, 19 | 20 | "user_id": { 21 | "type" : "string", 22 | "index" : "not_analyzed" 23 | }, 24 | 25 | "description" : { 26 | "type" : "string", 27 | "index" : "no" 28 | }, 29 | 30 | "tags" : { 31 | "type" : "string", 32 | "index" : "analyzed" 33 | }, 34 | 35 | "group" : { 36 | "type" : "string", 37 | "index" : "not_analyzed" 38 | }, 39 | 40 | "private" : { 41 | "type" : "boolean", 42 | "index" : "not_analyzed" 43 | }, 44 | "user_ranking" : { 45 | "properties":{ 46 | "average":{ 47 | "type" : "float" 48 | }, 49 | "nr_rankings":{ 50 | "type": "long" 51 | } 52 | } 53 | }, 54 | "nr_subscribers" : { 55 | "type" : "long" 56 | }, 57 | "subscribers" : { 58 | "properties" : { 59 | "user_id" : { 60 | "type" : "string" 61 | } 62 | } 63 | }, 64 | "history_size" : { 65 | "type" : "long", 66 | "index" : "not_analyzed" 67 | }, 68 | 69 | "last_updated" : { 70 | "type" : "date", 71 | "index" : "not_analyzed" 72 | }, 73 | 74 | "creation_date" : { 75 | "type" : "date", 76 | "index" : "no" 77 | }, 78 | 79 | "streams_involved" : { 80 | "type" : "string", 81 | "index" : "not_analyzed" 82 | }, 83 | 84 | "function" : { 85 | "type" : "string", 86 | "index" : "no" 87 | } 88 | } 89 | } 90 | } 91 | -------------------------------------------------------------------------------- /es_config/mappings/sensorcloud/vsdatapoint.json: -------------------------------------------------------------------------------- 1 | { 2 | "vsdatapoint" : { 3 | "_source" : { "enabled" : true }, 4 | "dynamic": false, 5 | "properties" : { 6 | "stream_id":{ 7 | "type":"string", 8 | "index":"not_analyzed" 9 | }, 10 | "timestamp":{ 11 | "type":"date", 12 | "format":"dateOptionalTime" 13 | }, 14 | "value":{ 15 | "type":"double", 16 | "index":"not_analyzed" 17 | } 18 | } 19 | } 20 | } 21 | -------------------------------------------------------------------------------- /es_config/templates/sensorcloud.json: -------------------------------------------------------------------------------- 1 | { 2 | "sensorcloud":{ 3 | "template":"sensorcloud", 4 | "settings":{ 5 | "index":{ 6 | "analysis":{ 7 | "filter":{ 8 | }, 9 | "analyzer":{ 10 | "model_analyzer":{ 11 | "filter":[ "standard", 12 | "lowercase", 13 | "asciifolding" 14 | ], 15 | "type":"custom", 16 | "tokenizer":"keyword" 17 | } 18 | } 19 | } 20 | } 21 | } 22 | } 23 | } 24 | -------------------------------------------------------------------------------- /include/common.hrl: -------------------------------------------------------------------------------- 1 | %% @doc 2 | %% Author: Gabriel Tholsgård, Li Hao 3 | %% [www.csproj13.student.it.uu.se] 4 | %% == common settings and names include file == 5 | %% Provides definitions and settings for common things 6 | %% 7 | %% @end 8 | 9 | -ifndef(__COMMON_HRL__). 10 | -define(__COMMON_HRL__, 1). 11 | 12 | 13 | %% IP address to Elastic Search server 14 | -ifndef(ES_IP_ADDR). 15 | -define(ES_IP_ADDR, "localhost"). 16 | -endif. 17 | 18 | 19 | %% Port used by Elastic Search server 20 | -ifndef(ES_PORT). 21 | -define(ES_PORT, "9200"). 22 | -endif. 23 | 24 | 25 | %% Index name of Elastic Search 26 | -ifndef(ES_INDEX). 27 | -define(ES_INDEX, "sensorcloud"). 28 | -endif. 29 | 30 | 31 | %% HTTP URL to Elastic Search server (according to the set macros above) 32 | -ifndef(ES_ADDR). 33 | -define(ES_ADDR, "http://" ++ ?ES_IP_ADDR ++ ":" ++ ?ES_PORT ++ "/" ++ ?ES_INDEX). 34 | -endif. 35 | 36 | 37 | %% User Agent of httpc request 38 | -ifndef(UA). 39 | -define(UA, "sensor-cloud:"). 40 | -endif. 41 | 42 | 43 | %% IP address to RabbitMQ server 44 | -define(RMQ_IPADDR, "localhost"). 45 | 46 | 47 | %% Port used by RabbitMQ server 48 | -define(RMQ_PORT, "5672"). 49 | 50 | 51 | %% HTTP URL to RabbitMQ server 52 | -define(RMQ_ADDR, ?RMQ_ADDR ++ ":" ++ ?RMQ_PORT). 53 | 54 | -endif. 55 | -------------------------------------------------------------------------------- /include/debug.hrl: -------------------------------------------------------------------------------- 1 | %% Author: Tommy Mattsson 2 | %% [www.csproj13.student.it.uu.se] 3 | %% == debug include file == 4 | %% Provides useful macros used for debugging. 5 | %% 6 | %% @end 7 | -define(INFO(X), ?GENERIC("INFO", X)). 8 | 9 | -ifdef(debug). 10 | -define(DEBUG(X), ?GENERIC("DEBUG", X)). 11 | -else. 12 | -define(DEBUG(X), true). 13 | -endif. 14 | 15 | -ifdef(debug). 16 | -define(ERROR(X), ?GENERIC("ERROR", X)). 17 | -else. 18 | -define(ERROR(X), true). 19 | -endif. 20 | 21 | -define(GENERIC(TYPE, X), io:format("*** ~s: {MODULE: ~p}{LINE: ~p}{MSG: ~p} ***~n", [TYPE,?MODULE,?LINE,X])). 22 | 23 | 24 | -------------------------------------------------------------------------------- /include/field_restrictions.hrl: -------------------------------------------------------------------------------- 1 | %% Author: Tomas S�vstr�m , Li Hao 2 | %% [www.csproj13.student.it.uu.se] 3 | %% == api include file == 4 | %% Includes defenitions of what fields are accteded and what fields are restricted, 5 | %% and also what index to use in elastic search 6 | %% 7 | %% @end 8 | 9 | %% Index 10 | -define(INDEX, "sensorcloud"). 11 | 12 | %% Fields for streams 13 | -define(RESTRICTED_STREAMS_UPDATE, ["active","quality","user_ranking","subscribers","last_update","creation_date"]). 14 | -define(RESTRICTED_STREAMS_CREATE, ["active","quality","user_ranking","subscribers","nr_subscribers","last_update","creation_date"]). 15 | -define(ACCEPTED_STREAMS_FIELDS, ["user_id","name","description", "type","tags","private","unit","accuracy","min_val","max_val","polling","uri","polling_freq","location","resource","resource.resource_type","resource.uuid","parser","data_type","location.lon","location.lat"]). 16 | 17 | %% Fields for users 18 | -define(RESTRCITEDUPDATEUSERS, ["username", "subscriptions"]). 19 | -define(ACCEPTEDFIELDSUSERS, ["username", "email", "firstname", "lastname", "description", "password", "private", "image_url", "access_token", "refresh_token"]). 20 | 21 | %% Fields for resources 22 | -define(RESTRICTED_RESOURCES_UPDATE, []). 23 | -define(RESTRICTED_RESOURCES_CREATE, ["streams_suggest"]). 24 | -define(ACCEPTED_RESOURCES_FIELDS, ["name","description","model","manufacturer"]). 25 | 26 | %% Fields for data-points 27 | -define(ACCEPTED_DATAPOINTS_FIELDS, ["stream_id","timestamp","value"]). 28 | 29 | %% Fields for virtual streams 30 | -define(ACCEPTED_FIELDS_VSTREAMS_UPDATE, ["user_id","name","description","tags","private"]). 31 | -define(ACCEPTED_FIELDS_VSTREAMS_CREATE, ["user_id","name","description","tags","private","function","streams_involved","creation_date","timestampfrom"]). 32 | -------------------------------------------------------------------------------- /include/json.hrl: -------------------------------------------------------------------------------- 1 | %% Author: Tommy Mattsson 2 | %% [www.csproj13.student.it.uu.se] 3 | %% == json include file == 4 | %% Provides definitions for creating JSON string objects 5 | %% 6 | %% @end 7 | %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% 8 | %%% Type definitions 9 | %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% 10 | %% @type attr() = atom() | string() 11 | -type attr() :: atom() | string(). 12 | %% @type field() = json_string() | mochijson() 13 | -type field() :: atom() | string() | [atom()]. 14 | %% @type json() = json_string() | mochijson() 15 | -type json() :: json_string() | mochijson(). 16 | %% @type json_string() = string() 17 | -type json_string() :: string(). 18 | %% @type json_input_value() = atom() | binary() | integer() | string() | json() | [json()] 19 | -type json_input_value() :: atom() | binary() | integer() | json() | [json_input_value()]. 20 | %% @type json_output_value() = integer() | string() | json_string() | [json_output_value()] 21 | -type json_output_value() :: boolean() | binary() | integer() | json_string() | [json_output_value()]. 22 | %% @type mochijson() = tuple() 23 | -type mochijson() :: tuple(). 24 | 25 | %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% 26 | %%% Convenience Macros 27 | %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% 28 | -ifndef(QUOTE). 29 | -define(QUOTE(ARG), "\"" ++ ARG ++ "\""). 30 | -endif. 31 | 32 | -ifndef(LIST). 33 | -define(LIST(ARG), "[" ++ ARG ++ "]"). 34 | -endif. 35 | 36 | -ifndef(TUPLE). 37 | -define(TUPLE(ARG), "{" ++ ARG ++ "}"). 38 | -endif. 39 | 40 | -ifndef(COLON). 41 | -define(COLON, ":"). 42 | -endif. 43 | -------------------------------------------------------------------------------- /include/poller.hrl: -------------------------------------------------------------------------------- 1 | %%here comes the definition of the structure of poller`s information 2 | 3 | -record(pollerInfo, {stream_id, 4 | name, 5 | uri, 6 | frequency, 7 | data_type, 8 | parser, 9 | pid, 10 | timer_ref}). 11 | 12 | 13 | -------------------------------------------------------------------------------- /include/pubsub.hrl: -------------------------------------------------------------------------------- 1 | %% Datapoint erlang representation, that will propagate around in the pub/sub-system. 2 | -record(datapoint, {timestamp, id, value}). 3 | 4 | -ifndef(__PUBSUB_HRL__). 5 | -define(__PUBSUB_HRL__, 1). 6 | 7 | 8 | %% Definition of timeout to wait for new messages, 9 | %% primarily used for virtual streams. 10 | -define(PUB_SUB_TIMEOUT, 1000). 11 | 12 | %% Takes an erlang:localtime() and converts it to the format: 13 | %% YYYY-MM-DDTHH:MM:SS.000 14 | -define(TIME_NOW(TIME), 15 | integer_to_list(element(1, element(1, TIME))) ++ "-" ++ 16 | case element(2, element(1, TIME)) < 10 of 17 | true -> "0" ++ integer_to_list(element(2, element(1, TIME))); 18 | _ -> "" ++ integer_to_list(element(2, element(1, TIME))) 19 | end ++ "-" ++ 20 | case element(3, element(1, TIME)) < 10 of 21 | true -> "0" ++ integer_to_list(element(3, element(1, TIME))); 22 | _ -> "" ++ integer_to_list(element(3, element(1, TIME))) 23 | end ++ 24 | "T" ++ 25 | case element(1, element(2, TIME)) < 10 of 26 | true -> "0" ++ integer_to_list(element(1, element(2, TIME))); 27 | _ -> "" ++ integer_to_list(element(1, element(2, TIME))) 28 | end ++ ":" ++ 29 | case element(2, element(2, TIME)) < 10 of 30 | true -> "0" ++ integer_to_list(element(2, element(2, TIME))); 31 | _ -> "" ++ integer_to_list(element(2, element(2, TIME))) 32 | end ++ ":" ++ 33 | case element(3, element(2, TIME)) < 10 of 34 | true -> "0" ++ integer_to_list(element(3, element(2, TIME))); 35 | _ -> "" ++ integer_to_list(element(3, element(2, TIME))) 36 | end ++ case Num = (element(3, erlang:now()) div 1000) of 37 | X when X < 100 -> 38 | ".0" ++ integer_to_list(Num); 39 | Y when Y < 10 -> 40 | ".00" ++ integer_to_list(Num); 41 | Z -> "." ++ integer_to_list(Z) 42 | end 43 | ). 44 | 45 | 46 | -endif. 47 | -------------------------------------------------------------------------------- /include/rabbit_common/include/gm_specs.hrl: -------------------------------------------------------------------------------- 1 | %% The contents of this file are subject to the Mozilla Public License 2 | %% Version 1.1 (the "License"); you may not use this file except in 3 | %% compliance with the License. You may obtain a copy of the License at 4 | %% http://www.mozilla.org/MPL/ 5 | %% 6 | %% Software distributed under the License is distributed on an "AS IS" 7 | %% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See the 8 | %% License for the specific language governing rights and limitations 9 | %% under the License. 10 | %% 11 | %% The Original Code is RabbitMQ. 12 | %% 13 | %% The Initial Developer of the Original Code is GoPivotal, Inc. 14 | %% Copyright (c) 2007-2013 GoPivotal, Inc. All rights reserved. 15 | %% 16 | 17 | -ifdef(use_specs). 18 | 19 | -type(callback_result() :: 'ok' | {'stop', any()} | {'become', atom(), args()}). 20 | -type(args() :: any()). 21 | -type(members() :: [pid()]). 22 | 23 | -spec(joined/2 :: (args(), members()) -> callback_result()). 24 | -spec(members_changed/3 :: (args(), members(), members()) -> callback_result()). 25 | -spec(handle_msg/3 :: (args(), pid(), any()) -> callback_result()). 26 | -spec(terminate/2 :: (args(), term()) -> any()). 27 | 28 | -endif. 29 | -------------------------------------------------------------------------------- /include/rabbit_common/include/rabbit.hrl: -------------------------------------------------------------------------------- 1 | %% The contents of this file are subject to the Mozilla Public License 2 | %% Version 1.1 (the "License"); you may not use this file except in 3 | %% compliance with the License. You may obtain a copy of the License 4 | %% at http://www.mozilla.org/MPL/ 5 | %% 6 | %% Software distributed under the License is distributed on an "AS IS" 7 | %% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See 8 | %% the License for the specific language governing rights and 9 | %% limitations under the License. 10 | %% 11 | %% The Original Code is RabbitMQ. 12 | %% 13 | %% The Initial Developer of the Original Code is GoPivotal, Inc. 14 | %% Copyright (c) 2007-2013 GoPivotal, Inc. All rights reserved. 15 | %% 16 | 17 | -record(user, {username, 18 | tags, 19 | auth_backend, %% Module this user came from 20 | impl %% Scratch space for that module 21 | }). 22 | 23 | -record(internal_user, {username, password_hash, tags}). 24 | -record(permission, {configure, write, read}). 25 | -record(user_vhost, {username, virtual_host}). 26 | -record(user_permission, {user_vhost, permission}). 27 | 28 | -record(vhost, {virtual_host, dummy}). 29 | 30 | -record(content, 31 | {class_id, 32 | properties, %% either 'none', or a decoded record/tuple 33 | properties_bin, %% either 'none', or an encoded properties binary 34 | %% Note: at most one of properties and properties_bin can be 35 | %% 'none' at once. 36 | protocol, %% The protocol under which properties_bin was encoded 37 | payload_fragments_rev %% list of binaries, in reverse order (!) 38 | }). 39 | 40 | -record(resource, {virtual_host, kind, name}). 41 | 42 | -record(exchange, {name, type, durable, auto_delete, internal, arguments, 43 | scratches, policy, decorators}). 44 | -record(exchange_serial, {name, next}). 45 | 46 | -record(amqqueue, {name, durable, auto_delete, exclusive_owner = none, 47 | arguments, pid, slave_pids, sync_slave_pids, policy, 48 | gm_pids}). 49 | 50 | %% mnesia doesn't like unary records, so we add a dummy 'value' field 51 | -record(route, {binding, value = const}). 52 | -record(reverse_route, {reverse_binding, value = const}). 53 | 54 | -record(binding, {source, key, destination, args = []}). 55 | -record(reverse_binding, {destination, key, source, args = []}). 56 | 57 | -record(topic_trie_node, {trie_node, edge_count, binding_count}). 58 | -record(topic_trie_edge, {trie_edge, node_id}). 59 | -record(topic_trie_binding, {trie_binding, value = const}). 60 | 61 | -record(trie_node, {exchange_name, node_id}). 62 | -record(trie_edge, {exchange_name, node_id, word}). 63 | -record(trie_binding, {exchange_name, node_id, destination}). 64 | 65 | -record(listener, {node, protocol, host, ip_address, port}). 66 | 67 | -record(runtime_parameters, {key, value}). 68 | 69 | -record(basic_message, {exchange_name, routing_keys = [], content, id, 70 | is_persistent}). 71 | 72 | -record(ssl_socket, {tcp, ssl}). 73 | -record(delivery, {mandatory, sender, message, msg_seq_no}). 74 | -record(amqp_error, {name, explanation = "", method = none}). 75 | 76 | -record(event, {type, props, timestamp}). 77 | 78 | -record(message_properties, {expiry, needs_confirming = false}). 79 | 80 | -record(plugin, {name, %% atom() 81 | version, %% string() 82 | description, %% string() 83 | type, %% 'ez' or 'dir' 84 | dependencies, %% [{atom(), string()}] 85 | location}). %% string() 86 | 87 | %%---------------------------------------------------------------------------- 88 | 89 | -define(COPYRIGHT_MESSAGE, "Copyright (C) 2007-2013 GoPivotal, Inc."). 90 | -define(INFORMATION_MESSAGE, "Licensed under the MPL. See http://www.rabbitmq.com/"). 91 | -define(ERTS_MINIMUM, "5.6.3"). 92 | 93 | %% EMPTY_FRAME_SIZE, 8 = 1 + 2 + 4 + 1 94 | %% - 1 byte of frame type 95 | %% - 2 bytes of channel number 96 | %% - 4 bytes of frame payload length 97 | %% - 1 byte of payload trailer FRAME_END byte 98 | %% See rabbit_binary_generator:check_empty_frame_size/0, an assertion 99 | %% called at startup. 100 | -define(EMPTY_FRAME_SIZE, 8). 101 | 102 | -define(MAX_WAIT, 16#ffffffff). 103 | 104 | -define(HIBERNATE_AFTER_MIN, 1000). 105 | -define(DESIRED_HIBERNATE, 10000). 106 | -define(CREDIT_DISC_BOUND, {2000, 500}). 107 | 108 | -define(INVALID_HEADERS_KEY, <<"x-invalid-headers">>). 109 | -define(ROUTING_HEADERS, [<<"CC">>, <<"BCC">>]). 110 | -define(DELETED_HEADER, <<"BCC">>). 111 | -------------------------------------------------------------------------------- /include/rabbit_common/include/rabbit_framing.hrl: -------------------------------------------------------------------------------- 1 | %% Autogenerated code. Do not edit. 2 | %% 3 | %% The contents of this file are subject to the Mozilla Public License 4 | %% Version 1.1 (the "License"); you may not use this file except in 5 | %% compliance with the License. You may obtain a copy of the License 6 | %% at http://www.mozilla.org/MPL/ 7 | %% 8 | %% Software distributed under the License is distributed on an "AS IS" 9 | %% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See 10 | %% the License for the specific language governing rights and 11 | %% limitations under the License. 12 | %% 13 | %% The Original Code is RabbitMQ. 14 | %% 15 | %% The Initial Developer of the Original Code is GoPivotal, Inc. 16 | %% Copyright (c) 2007-2013 GoPivotal, Inc. All rights reserved. 17 | %% 18 | -define(PROTOCOL_PORT, 5672). 19 | -define(FRAME_METHOD, 1). 20 | -define(FRAME_HEADER, 2). 21 | -define(FRAME_BODY, 3). 22 | -define(FRAME_HEARTBEAT, 8). 23 | -define(FRAME_MIN_SIZE, 4096). 24 | -define(FRAME_END, 206). 25 | -define(REPLY_SUCCESS, 200). 26 | -define(CONTENT_TOO_LARGE, 311). 27 | -define(NO_ROUTE, 312). 28 | -define(NO_CONSUMERS, 313). 29 | -define(ACCESS_REFUSED, 403). 30 | -define(NOT_FOUND, 404). 31 | -define(RESOURCE_LOCKED, 405). 32 | -define(PRECONDITION_FAILED, 406). 33 | -define(CONNECTION_FORCED, 320). 34 | -define(INVALID_PATH, 402). 35 | -define(FRAME_ERROR, 501). 36 | -define(SYNTAX_ERROR, 502). 37 | -define(COMMAND_INVALID, 503). 38 | -define(CHANNEL_ERROR, 504). 39 | -define(UNEXPECTED_FRAME, 505). 40 | -define(RESOURCE_ERROR, 506). 41 | -define(NOT_ALLOWED, 530). 42 | -define(NOT_IMPLEMENTED, 540). 43 | -define(INTERNAL_ERROR, 541). 44 | -define(FRAME_OOB_METHOD, 4). 45 | -define(FRAME_OOB_HEADER, 5). 46 | -define(FRAME_OOB_BODY, 6). 47 | -define(FRAME_TRACE, 7). 48 | -define(NOT_DELIVERED, 310). 49 | %% Method field records. 50 | -record('connection.start', {version_major = 0, version_minor = 9, server_properties, mechanisms = <<"PLAIN">>, locales = <<"en_US">>}). 51 | -record('connection.start_ok', {client_properties, mechanism = <<"PLAIN">>, response, locale = <<"en_US">>}). 52 | -record('connection.secure', {challenge}). 53 | -record('connection.secure_ok', {response}). 54 | -record('connection.tune', {channel_max = 0, frame_max = 0, heartbeat = 0}). 55 | -record('connection.tune_ok', {channel_max = 0, frame_max = 0, heartbeat = 0}). 56 | -record('connection.open', {virtual_host = <<"/">>, capabilities = <<"">>, insist = false}). 57 | -record('connection.open_ok', {known_hosts = <<"">>}). 58 | -record('connection.close', {reply_code, reply_text = <<"">>, class_id, method_id}). 59 | -record('connection.close_ok', {}). 60 | -record('connection.redirect', {host, known_hosts = <<"">>}). 61 | -record('channel.open', {out_of_band = <<"">>}). 62 | -record('channel.open_ok', {channel_id = <<"">>}). 63 | -record('channel.flow', {active}). 64 | -record('channel.flow_ok', {active}). 65 | -record('channel.close', {reply_code, reply_text = <<"">>, class_id, method_id}). 66 | -record('channel.close_ok', {}). 67 | -record('channel.alert', {reply_code, reply_text = <<"">>, details = []}). 68 | -record('access.request', {realm = <<"/data">>, exclusive = false, passive = true, active = true, write = true, read = true}). 69 | -record('access.request_ok', {ticket = 1}). 70 | -record('exchange.declare', {ticket = 0, exchange, type = <<"direct">>, passive = false, durable = false, auto_delete = false, internal = false, nowait = false, arguments = []}). 71 | -record('exchange.declare_ok', {}). 72 | -record('exchange.delete', {ticket = 0, exchange, if_unused = false, nowait = false}). 73 | -record('exchange.delete_ok', {}). 74 | -record('exchange.bind', {ticket = 0, destination, source, routing_key = <<"">>, nowait = false, arguments = []}). 75 | -record('exchange.bind_ok', {}). 76 | -record('exchange.unbind', {ticket = 0, destination, source, routing_key = <<"">>, nowait = false, arguments = []}). 77 | -record('exchange.unbind_ok', {}). 78 | -record('queue.declare', {ticket = 0, queue = <<"">>, passive = false, durable = false, exclusive = false, auto_delete = false, nowait = false, arguments = []}). 79 | -record('queue.declare_ok', {queue, message_count, consumer_count}). 80 | -record('queue.bind', {ticket = 0, queue = <<"">>, exchange, routing_key = <<"">>, nowait = false, arguments = []}). 81 | -record('queue.bind_ok', {}). 82 | -record('queue.purge', {ticket = 0, queue = <<"">>, nowait = false}). 83 | -record('queue.purge_ok', {message_count}). 84 | -record('queue.delete', {ticket = 0, queue = <<"">>, if_unused = false, if_empty = false, nowait = false}). 85 | -record('queue.delete_ok', {message_count}). 86 | -record('queue.unbind', {ticket = 0, queue = <<"">>, exchange, routing_key = <<"">>, arguments = []}). 87 | -record('queue.unbind_ok', {}). 88 | -record('basic.qos', {prefetch_size = 0, prefetch_count = 0, global = false}). 89 | -record('basic.qos_ok', {}). 90 | -record('basic.consume', {ticket = 0, queue = <<"">>, consumer_tag = <<"">>, no_local = false, no_ack = false, exclusive = false, nowait = false, arguments = []}). 91 | -record('basic.consume_ok', {consumer_tag}). 92 | -record('basic.cancel', {consumer_tag, nowait = false}). 93 | -record('basic.cancel_ok', {consumer_tag}). 94 | -record('basic.publish', {ticket = 0, exchange = <<"">>, routing_key = <<"">>, mandatory = false, immediate = false}). 95 | -record('basic.return', {reply_code, reply_text = <<"">>, exchange, routing_key}). 96 | -record('basic.deliver', {consumer_tag, delivery_tag, redelivered = false, exchange, routing_key}). 97 | -record('basic.get', {ticket = 0, queue = <<"">>, no_ack = false}). 98 | -record('basic.get_ok', {delivery_tag, redelivered = false, exchange, routing_key, message_count}). 99 | -record('basic.get_empty', {cluster_id = <<"">>}). 100 | -record('basic.ack', {delivery_tag = 0, multiple = false}). 101 | -record('basic.reject', {delivery_tag, requeue = true}). 102 | -record('basic.recover_async', {requeue = false}). 103 | -record('basic.recover', {requeue = false}). 104 | -record('basic.recover_ok', {}). 105 | -record('basic.nack', {delivery_tag = 0, multiple = false, requeue = true}). 106 | -record('basic.credit', {consumer_tag = <<"">>, credit, drain}). 107 | -record('basic.credit_ok', {available}). 108 | -record('basic.credit_drained', {consumer_tag = <<"">>, credit_drained}). 109 | -record('tx.select', {}). 110 | -record('tx.select_ok', {}). 111 | -record('tx.commit', {}). 112 | -record('tx.commit_ok', {}). 113 | -record('tx.rollback', {}). 114 | -record('tx.rollback_ok', {}). 115 | -record('confirm.select', {nowait = false}). 116 | -record('confirm.select_ok', {}). 117 | -record('file.qos', {prefetch_size = 0, prefetch_count = 0, global = false}). 118 | -record('file.qos_ok', {}). 119 | -record('file.consume', {ticket = 1, queue = <<"">>, consumer_tag = <<"">>, no_local = false, no_ack = false, exclusive = false, nowait = false}). 120 | -record('file.consume_ok', {consumer_tag}). 121 | -record('file.cancel', {consumer_tag, nowait = false}). 122 | -record('file.cancel_ok', {consumer_tag}). 123 | -record('file.open', {identifier, content_size}). 124 | -record('file.open_ok', {staged_size}). 125 | -record('file.stage', {}). 126 | -record('file.publish', {ticket = 1, exchange = <<"">>, routing_key = <<"">>, mandatory = false, immediate = false, identifier}). 127 | -record('file.return', {reply_code = 200, reply_text = <<"">>, exchange, routing_key}). 128 | -record('file.deliver', {consumer_tag, delivery_tag, redelivered = false, exchange, routing_key, identifier}). 129 | -record('file.ack', {delivery_tag = 0, multiple = false}). 130 | -record('file.reject', {delivery_tag, requeue = true}). 131 | -record('stream.qos', {prefetch_size = 0, prefetch_count = 0, consume_rate = 0, global = false}). 132 | -record('stream.qos_ok', {}). 133 | -record('stream.consume', {ticket = 1, queue = <<"">>, consumer_tag = <<"">>, no_local = false, exclusive = false, nowait = false}). 134 | -record('stream.consume_ok', {consumer_tag}). 135 | -record('stream.cancel', {consumer_tag, nowait = false}). 136 | -record('stream.cancel_ok', {consumer_tag}). 137 | -record('stream.publish', {ticket = 1, exchange = <<"">>, routing_key = <<"">>, mandatory = false, immediate = false}). 138 | -record('stream.return', {reply_code = 200, reply_text = <<"">>, exchange, routing_key}). 139 | -record('stream.deliver', {consumer_tag, delivery_tag, exchange, queue}). 140 | -record('dtx.select', {}). 141 | -record('dtx.select_ok', {}). 142 | -record('dtx.start', {dtx_identifier}). 143 | -record('dtx.start_ok', {}). 144 | -record('tunnel.request', {meta_data}). 145 | -record('test.integer', {integer_1, integer_2, integer_3, integer_4, operation}). 146 | -record('test.integer_ok', {result}). 147 | -record('test.string', {string_1, string_2, operation}). 148 | -record('test.string_ok', {result}). 149 | -record('test.table', {table, integer_op, string_op}). 150 | -record('test.table_ok', {integer_result, string_result}). 151 | -record('test.content', {}). 152 | -record('test.content_ok', {content_checksum}). 153 | %% Class property records. 154 | -record('P_connection', {}). 155 | -record('P_channel', {}). 156 | -record('P_access', {}). 157 | -record('P_exchange', {}). 158 | -record('P_queue', {}). 159 | -record('P_basic', {content_type, content_encoding, headers, delivery_mode, priority, correlation_id, reply_to, expiration, message_id, timestamp, type, user_id, app_id, cluster_id}). 160 | -record('P_tx', {}). 161 | -record('P_confirm', {}). 162 | -record('P_file', {content_type, content_encoding, headers, priority, reply_to, message_id, filename, timestamp, cluster_id}). 163 | -record('P_stream', {content_type, content_encoding, headers, priority, timestamp}). 164 | -record('P_dtx', {}). 165 | -record('P_tunnel', {headers, proxy_name, data_name, durable, broadcast}). 166 | -record('P_test', {}). 167 | -------------------------------------------------------------------------------- /include/rabbit_common/include/rabbit_msg_store.hrl: -------------------------------------------------------------------------------- 1 | %% The contents of this file are subject to the Mozilla Public License 2 | %% Version 1.1 (the "License"); you may not use this file except in 3 | %% compliance with the License. You may obtain a copy of the License 4 | %% at http://www.mozilla.org/MPL/ 5 | %% 6 | %% Software distributed under the License is distributed on an "AS IS" 7 | %% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See 8 | %% the License for the specific language governing rights and 9 | %% limitations under the License. 10 | %% 11 | %% The Original Code is RabbitMQ. 12 | %% 13 | %% The Initial Developer of the Original Code is GoPivotal, Inc. 14 | %% Copyright (c) 2007-2013 GoPivotal, Inc. All rights reserved. 15 | %% 16 | 17 | -include("rabbit.hrl"). 18 | 19 | -ifdef(use_specs). 20 | 21 | -type(msg() :: any()). 22 | 23 | -endif. 24 | 25 | -record(msg_location, {msg_id, ref_count, file, offset, total_size}). 26 | -------------------------------------------------------------------------------- /include/state.hrl: -------------------------------------------------------------------------------- 1 | %%here comes the state`s information of the gen_server 2 | 3 | -record(state, {stream_id, 4 | uri, 5 | parser, 6 | data_type, 7 | channel, 8 | exchange, 9 | connection}). -------------------------------------------------------------------------------- /javascripts/package.json: -------------------------------------------------------------------------------- 1 | 2 | { 3 | "name": "required-deps", 4 | "description":"Installs the necessary dependencies for running receive.js", 5 | "dependencies": { 6 | "socket.io": "*", 7 | "rabbit.js": "*" 8 | } 9 | } -------------------------------------------------------------------------------- /javascripts/receive.js: -------------------------------------------------------------------------------- 1 | 2 | // Create context using rabbit.js (cfr ZMQ), 3 | // io and the subscriber socket. 4 | 5 | var port = 8080, 6 | context = require('rabbit.js').createContext(), 7 | io = require('socket.io').listen(port), 8 | url = require('url'); 9 | var streams = new Object(); 10 | process.setMaxListeners(0); 11 | // Limits debug messages that are printed by socket.io 12 | io.set('log level', 1); 13 | 14 | // A websocket is connected (eg: browser). 15 | io.sockets.on('connection', function(socket) { 16 | var sub = context.socket('SUB'); 17 | sub.setEncoding('utf8'); 18 | var con_url = (url.parse(socket.handshake.url, true).query.ns).split("/"); 19 | var namespace = con_url[con_url.length - 1]; 20 | if(streams[namespace] == undefined) { 21 | streams[namespace] = 1; 22 | sub.connect(namespace); 23 | console.log("Created "+namespace+" with "+streams[namespace]+" users connected."); 24 | }else { 25 | streams[namespace]++; 26 | console.log("Connected to "+namespace+" with "+streams[namespace]+" users connected."); 27 | } 28 | 29 | io.of('/'+namespace).on('connection', function(sock) { 30 | sub.on('data', function(data) { 31 | sock.send(data); 32 | //console.log(io.sockets.clients().length); 33 | }); 34 | }); 35 | socket.on('disconnect', function() { 36 | streams[namespace]--; 37 | 38 | console.log("Disconnected from "+namespace+" with "+streams[namespace]+" users connected."); 39 | if(streams[namespace] == 0) { 40 | console.log("No user connected to "+namespace); 41 | //streams[namespace] = undefined; 42 | //sub.destroy(); 43 | console.log("NameSpace : "+namespace+" is "+streams[namespace]); 44 | } 45 | }); 46 | }); 47 | -------------------------------------------------------------------------------- /priv/dispatch.conf: -------------------------------------------------------------------------------- 1 | %%-*- mode: erlang -*- 2 | 3 | 4 | %%STREAMS 5 | {["users", 'user', "streams", "_search"], streams, []}. 6 | {["users", 'user', "streams", 'stream'], streams, []}. 7 | {["users", 'user', "streams"], streams, []}. 8 | {["streams",'stream',"pollinghistory"], streams, []}. 9 | {["streams", "_search"], streams, []}. 10 | {["streams", 'stream'], streams, []}. 11 | {["streams"], streams, []}. 12 | 13 | %%RESOURCES 14 | {["resources", 'resourceid'], resources, []}. 15 | {["resources", "_search"], resources, []}. 16 | {["resources"], resources, []}. 17 | 18 | %%USERS 19 | {["users", "_auth"], users, []}. 20 | {["users", "_search"], users, []}. 21 | {["users", 'id'], users, []}. 22 | {["users"], users, []}. 23 | 24 | %%VIRTUAL STREAMS 25 | {["vstreams", 'id'], virtual_streams, []}. 26 | {["vstreams", "_search"], virtual_streams, []}. 27 | {["vstreams"], virtual_streams, []}. 28 | {["users", 'user', "vstreams"], virtual_streams, []}. 29 | 30 | %%GROUPS 31 | {["users", 'user',"groups","_search"], groups, []}. 32 | {["users", 'user',"groups",'group'], groups, []}. 33 | 34 | %% list a group or groups if they are public 35 | {["groups", 'group'], groups, []}. 36 | {["groups"], groups, []}. 37 | 38 | %%SUGGESTIONS 39 | {["suggest",'field','term'], suggest, []}. 40 | {["suggest",'term'], suggest, []}. 41 | {["suggest"], suggest, []}. 42 | 43 | %%TRIGGERS 44 | {['users', 'userid', "streams", 'streamid', "triggers"], triggers, []}. 45 | {['users', 'userid', "vstreams", 'vstreamid', "triggers"], triggers, []}. 46 | {["users", 'userid', "triggers",'action'], triggers, []}. 47 | {["users", 'userid', "triggers"], triggers, []}. 48 | 49 | %%DATA-POINTS 50 | {["streams", 'id', "data", "_search"], datapoints, []}. 51 | {["streams", 'id', "data", "_count"], datapoints, []}. 52 | {["streams", 'id', "data"], datapoints, []}. 53 | {["vstreams", 'id', "data", "_search"], datapoints, []}. 54 | {["vstreams", 'id', "data", "_count"], datapoints, []}. 55 | {["vstreams", 'id', "data"], datapoints, []}. 56 | 57 | %%SEARCH 58 | {["_search"], search, []}. 59 | {["_history"], search, []}. 60 | 61 | %%ANALYSE 62 | {["users", 'userid', "streams", 'streamid', "_analyse"], analyse, []}. 63 | {["streams", 'streamid', "_analyse"], analyse, []}. 64 | {["vstreams", 'vstreamid', "_analyse"], analyse, []}. 65 | 66 | %%RANK 67 | {["streams", 'stream', "_rank"], streams, []}. 68 | 69 | %%SUBSCRIPTION 70 | {["users", 'id', "_unsubscribe"], users, []}. 71 | {["users", 'id', "_subscribe"], users, []}. 72 | 73 | %% ?? 74 | {['*'], static_resource, [{root, "priv/www"}]}. 75 | 76 | 77 | 78 | -------------------------------------------------------------------------------- /priv/www/stream.html: -------------------------------------------------------------------------------- 1 | 2 | 4 | 11 | 12 | 13 | Awesome Stream Handler 14 | 16 | 71 | 72 | 73 | Create a new stream: 74 |
75 | Stream name: 76 | 77 |
78 |
79 | Fetch streams: 80 |
81 | 82 |
83 |
84 | Delete stream: 85 |
86 | 87 | 88 |
89 |
90 | Update stream: 91 |
92 | Stream id: 93 | Stream description: 94 | 95 |
96 | 97 | 98 | -------------------------------------------------------------------------------- /rebar: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/EricssonResearch/iot-framework-engine/106636d2f3b43698ae8c07bbe29d7d25c3e7383e/rebar -------------------------------------------------------------------------------- /rebar.config: -------------------------------------------------------------------------------- 1 | %%-*- mode: erlang -*- 2 | 3 | 4 | %% Specify library paths 5 | {lib_dirs, ["lib"]}. 6 | 7 | 8 | %% Edoc options 9 | {edoc_opts, [{packages, false},{private, true}]}. 10 | 11 | %% Eunit options 12 | %% Checks coverage of unit tests 13 | {cover_enabled, true}. 14 | {eunit_opts, [verbose, 15 | {report, {eunit_surefire, [{dir, "."}]}} 16 | ]}. 17 | 18 | %% Erlang compiler options 19 | {erl_opts, [debug_info, 20 | verbose, 21 | %% strong_validation, 22 | return, 23 | warn_export_all, 24 | %% This is needed because there are files in src/ that includes a file from here 25 | %% If header files from other libraries needs to be included, then the path to that include 26 | %% included below (as a separate {i, Dir} entry) 27 | {i, "include/"}, 28 | {i, "lib/erlastic_search/include/"}, 29 | {i, "lib/erlson/include/"}, 30 | {i, "lib/rabbitmq-erlang-client/include/"}, 31 | {i, "lib/webmachine/include/"}, 32 | {src_dirs, ["src","test"]}, 33 | {d, debug} 34 | 35 | ]}. 36 | 37 | {deps_dir, ["lib"]}. 38 | {deps, [ 39 | {webmachine, "", {git, "https://github.com/basho/webmachine", e0c1cbe128cd010c9069852f70ff97534349c7f2}}, 40 | {erlastic_search, ".*", {git, "https://github.com/EricssonResearch/erlastic_search.git", {branch, "master"}}}, 41 | {"rabbitmq-server", ".*", {git, "git://github.com/rabbitmq/rabbitmq-server.git", {tag, "rabbitmq_v3_2_4"}}, [raw]}, 42 | {"rabbitmq-codegen", ".*", {git, "git://github.com/rabbitmq/rabbitmq-codegen.git", {tag, "rabbitmq_v3_2_4"}}, [raw]}, 43 | {"rabbitmq-erlang-client", ".*", {git, "git://github.com/rabbitmq/rabbitmq-erlang-client.git", {tag, "rabbitmq_v3_2_4"}}, [raw]}, 44 | {rErlang, ".*", {git, "git://github.com/projectcs13/rErlang.git", {branch, "master"}}, [raw]}, 45 | {erlson, "", {git, "https://github.com/projectcs13/erlson.git", d0f5a48020f3e7dfa73f6b374722577681f4947f}}, 46 | {mochijson2, "", {git, "https://github.com/bjnortier/mochijson2.git", {branch, "master"}}} 47 | ]}. 48 | 49 | {plugins, [erlson_rebar_plugin]}. % for newer rebar 50 | -------------------------------------------------------------------------------- /scripts/boot/iotf-backend: -------------------------------------------------------------------------------- 1 | #!/bin/sh 2 | ### BEGIN INIT INFO 3 | # Provides: 4 | # Required-Start: $remote_fs $syslog 5 | # Required-Stop: $remote_fs $syslog 6 | # Default-Start: 2 3 4 5 7 | # Default-Stop: 0 1 6 8 | # Short-Description: Start daemon at boot time 9 | # Description: Enable service provided by daemon. 10 | ### END INIT INFO 11 | 12 | dir="/home/ubuntu/iot-framework-engine" 13 | user="iotf-backend" 14 | cmd="make run" 15 | 16 | name=`basename $0` 17 | pid_file="/var/run/$name.pid" 18 | stdout_log="/var/log/$name.log" 19 | stderr_log="/var/log/$name.err" 20 | 21 | get_pid() { 22 | cat "$pid_file" 23 | } 24 | 25 | is_running() { 26 | [ -f "$pid_file" ] && ps `get_pid` > /dev/null 2>&1 27 | } 28 | 29 | case "$1" in 30 | start) 31 | if is_running; then 32 | echo "Already started" 33 | else 34 | echo "Starting $name" 35 | cd "$dir" 36 | echo $PWD 37 | 38 | sudo su ubuntu -c "$cmd" >> "$stdout_log" 2>> "$stderr_log" & 39 | echo $! > "$pid_file" 40 | if ! is_running; then 41 | echo "Unable to start, see $stdout_log and $stderr_log" 42 | exit 1 43 | fi 44 | fi 45 | ;; 46 | stop) 47 | if is_running; then 48 | echo -n "Stopping $name.." 49 | kill `get_pid` 50 | for i in {1..10} 51 | do 52 | if ! is_running; then 53 | break 54 | fi 55 | 56 | echo -n "." 57 | sleep 1 58 | done 59 | echo 60 | 61 | if is_running; then 62 | echo "Not stopped; may still be shutting down or shutdown may have failed" 63 | exit 1 64 | else 65 | echo "Stopped" 66 | if [ -f "$pid_file" ]; then 67 | rm "$pid_file" 68 | fi 69 | fi 70 | else 71 | echo "Not running" 72 | fi 73 | ;; 74 | restart) 75 | $0 stop 76 | if is_running; then 77 | echo "Unable to stop, will not attempt to start" 78 | exit 1 79 | fi 80 | $0 start 81 | ;; 82 | status) 83 | if is_running; then 84 | echo "Running" 85 | else 86 | echo "Stopped" 87 | exit 1 88 | fi 89 | ;; 90 | *) 91 | echo "Usage: $0 {start|stop|restart|status}" 92 | exit 1 93 | ;; 94 | esac 95 | 96 | exit 0 97 | 98 | -------------------------------------------------------------------------------- /scripts/boot/iotf-es: -------------------------------------------------------------------------------- 1 | #!/bin/sh 2 | ### BEGIN INIT INFO 3 | # Provides: 4 | # Required-Start: $remote_fs $syslog 5 | # Required-Stop: $remote_fs $syslog 6 | # Default-Start: 2 3 4 5 7 | # Default-Stop: 0 1 6 8 | # Short-Description: Start daemon at boot time 9 | # Description: Enable service provided by daemon. 10 | ### END INIT INFO 11 | 12 | dir="/home/ubuntu/iot-framework-engine" 13 | user="iotf-es" 14 | cmd="make run_es" 15 | 16 | name=`basename $0` 17 | pid_file="/var/run/$name.pid" 18 | stdout_log="/var/log/$name.log" 19 | stderr_log="/var/log/$name.err" 20 | 21 | get_pid() { 22 | cat "$pid_file" 23 | } 24 | 25 | is_running() { 26 | [ -f "$pid_file" ] && ps `get_pid` > /dev/null 2>&1 27 | } 28 | 29 | case "$1" in 30 | start) 31 | if is_running; then 32 | echo "Already started" 33 | else 34 | echo "Starting $name" 35 | cd "$dir" 36 | # sudo -u "$user" $cmd >> "$stdout_log" 2>> "$stderr_log" & 37 | sudo $cmd >> "$stdout_log" 2>> "$stderr_log" & 38 | echo $! > "$pid_file" 39 | if ! is_running; then 40 | echo "Unable to start, see $stdout_log and $stderr_log" 41 | exit 1 42 | fi 43 | fi 44 | ;; 45 | stop) 46 | if is_running; then 47 | echo -n "Stopping $name.." 48 | kill `get_pid` 49 | for i in {1..10} 50 | do 51 | if ! is_running; then 52 | break 53 | fi 54 | 55 | echo -n "." 56 | sleep 1 57 | done 58 | echo 59 | 60 | if is_running; then 61 | echo "Not stopped; may still be shutting down or shutdown may have failed" 62 | exit 1 63 | else 64 | echo "Stopped" 65 | if [ -f "$pid_file" ]; then 66 | rm "$pid_file" 67 | fi 68 | fi 69 | else 70 | echo "Not running" 71 | fi 72 | ;; 73 | restart) 74 | $0 stop 75 | if is_running; then 76 | echo "Unable to stop, will not attempt to start" 77 | exit 1 78 | fi 79 | $0 start 80 | ;; 81 | status) 82 | if is_running; then 83 | echo "Running" 84 | else 85 | echo "Stopped" 86 | exit 1 87 | fi 88 | ;; 89 | *) 90 | echo "Usage: $0 {start|stop|restart|status}" 91 | exit 1 92 | ;; 93 | esac 94 | 95 | exit 0 96 | 97 | -------------------------------------------------------------------------------- /scripts/boot/iotf-rmq: -------------------------------------------------------------------------------- 1 | #!/bin/sh 2 | ### BEGIN INIT INFO 3 | # Provides: 4 | # Required-Start: $remote_fs $syslog 5 | # Required-Stop: $remote_fs $syslog 6 | # Default-Start: 2 3 4 5 7 | # Default-Stop: 0 1 6 8 | # Short-Description: Start daemon at boot time 9 | # Description: Enable service provided by daemon. 10 | ### END INIT INFO 11 | 12 | dir="/home/ubuntu/iot-framework-engine" 13 | user="iotf-rmq" 14 | cmd="make run_rabbit" 15 | 16 | name=`basename $0` 17 | pid_file="/var/run/$name.pid" 18 | stdout_log="/var/log/$name.log" 19 | stderr_log="/var/log/$name.err" 20 | 21 | get_pid() { 22 | cat "$pid_file" 23 | } 24 | 25 | is_running() { 26 | [ -f "$pid_file" ] && ps `get_pid` > /dev/null 2>&1 27 | } 28 | 29 | case "$1" in 30 | start) 31 | if is_running; then 32 | echo "Already started" 33 | else 34 | echo "Starting $name" 35 | cd "$dir" 36 | # sudo -u "$user" $cmd >> "$stdout_log" 2>> "$stderr_log" & 37 | sudo $cmd >> "$stdout_log" 2>> "$stderr_log" & 38 | echo $! > "$pid_file" 39 | if ! is_running; then 40 | echo "Unable to start, see $stdout_log and $stderr_log" 41 | exit 1 42 | fi 43 | fi 44 | ;; 45 | stop) 46 | if is_running; then 47 | echo -n "Stopping $name.." 48 | kill `get_pid` 49 | for i in {1..10} 50 | do 51 | if ! is_running; then 52 | break 53 | fi 54 | 55 | echo -n "." 56 | sleep 1 57 | done 58 | echo 59 | 60 | if is_running; then 61 | echo "Not stopped; may still be shutting down or shutdown may have failed" 62 | exit 1 63 | else 64 | echo "Stopped" 65 | if [ -f "$pid_file" ]; then 66 | rm "$pid_file" 67 | fi 68 | fi 69 | else 70 | echo "Not running" 71 | fi 72 | ;; 73 | restart) 74 | $0 stop 75 | if is_running; then 76 | echo "Unable to stop, will not attempt to start" 77 | exit 1 78 | fi 79 | $0 start 80 | ;; 81 | status) 82 | if is_running; then 83 | echo "Running" 84 | else 85 | echo "Stopped" 86 | exit 1 87 | fi 88 | ;; 89 | *) 90 | echo "Usage: $0 {start|stop|restart|status}" 91 | exit 1 92 | ;; 93 | esac 94 | 95 | exit 0 96 | 97 | -------------------------------------------------------------------------------- /scripts/install.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | ### Author : Tommy Mattsson 3 | ### Purpose: Easy installation of needed software for our project that is part of the linux system rather than our own project 4 | 5 | #Break on error 6 | set -e 7 | 8 | 9 | install_boot_script() { # $1 is the script 10 | sudo cp $PWD/scripts/boot/$1 /etc/init.d 11 | if [ $? -ne 0 ]; then 12 | echo "Call the script from the project folder" 13 | else 14 | echo "iotf-rmq... Success!" 15 | sudo chmod +x /etc/init.d/$1 16 | sudo update-rc.d $1 defaults 17 | fi 18 | } 19 | 20 | echo "#################################################################" 21 | echo "Installing misc dependencies" 22 | echo "#################################################################" 23 | #sudo apt-get install -yq xsltproc software-properties-commom python-pip libpython-dev 24 | sudo apt-get install -yq xsltproc python-pip libpython-dev 25 | 26 | echo "#################################################################" 27 | echo "Installing openjdk-7" 28 | echo "#################################################################" 29 | sudo apt-get install -yq openjdk-7-jre-headless libwxgtk2.8-0 30 | 31 | echo "#################################################################" 32 | echo "Installing Erlang 16B1" 33 | echo "#################################################################" 34 | apt-get install -yq erlang 35 | 36 | echo "#################################################################" 37 | echo "Installing Nodejs together with npm" 38 | echo "#################################################################" 39 | sudo apt-get install python-software-properties python g++ make 40 | sudo add-apt-repository ppa:chris-lea/node.js 41 | sudo apt-get update -q 42 | sudo apt-get install -yq nodejs 43 | 44 | echo "#################################################################" 45 | echo "Installing R" 46 | echo "#################################################################" 47 | sudo add-apt-repository "deb http://ftp.sunet.se/pub/lang/CRAN/bin/linux/ubuntu trusty/" 48 | sudo apt-get update -q 49 | sudo apt-get install \r-base 50 | 51 | echo "#################################################################" 52 | echo "Installing pip" 53 | echo "#################################################################" 54 | sudo pip install -r semantic-adapter/pip-freeze.txt 55 | 56 | echo "#################################################################" 57 | echo "Installing boot scripts" 58 | echo "#################################################################" 59 | install_boot_script "iotf-backend" 60 | install_boot_script "iotf-es" 61 | install_boot_script "iotf-rmq" 62 | 63 | -------------------------------------------------------------------------------- /scripts/python/README.md: -------------------------------------------------------------------------------- 1 | ## Resource for polling 2 | 3 | This script acts as a dummy resource which you can poll. 4 | You start a small webserver which you can request a 5 | resource JSON-object from. 6 | 7 | To use the script, you start a CGI HTTP server in python. 8 | 9 | From the terminal, in this directory, type: 10 | 11 | python -m CGIHTTPServer 8000 12 | 13 | The port number (8000) can be whatever. 14 | You will now have the resource accessable at 15 | loclahost:8000/cgi-bin/resource.py 16 | 17 | If you get a restriction-error upon GET-request, 18 | Make sure you have the right permissions to resource.py 19 | by typing 20 | 21 | chmod 755 resource.py 22 | 23 | ## Self-posting stream 24 | 25 | The script post_avg.py reads the average load from the file "/proc/loadavg" and sends the value to the specified 26 | stream at the engine url. the script is called using: 27 | 28 | python post_avg.py 29 | 30 | Example use 31 | 32 | python post_avg.py asd21 30 http://localhost:8000 33 | 34 | the above example will post '{"value": }' every 30 seconds to http://localhost:8000/streams/asd21/data 35 | 36 | The script posts using curl and thus requires curl to be installed. 37 | 38 | ## Post streams to localhost:8000 39 | 40 | The script poststreams.py will post streams in a file to localhost:8000/streams, the example files are 41 | weatherstreams, financialstreams and gamestreams. These files contains the JSON objects of a number of streams 42 | and they have the user_id 'andreas'. Either make sure that the user 'andreas' exists or change the user_ids in 43 | the files. 44 | 45 | If you want to make your own streamfiles, make sure that each stream object is contained in ONE line and that 46 | there are no empty lines between objects. Also make sure that the file does not contain trailing endlines or 47 | whitespaces. 48 | 49 | Example usages: 50 | python poststreams.py weatherstreams 51 | 52 | python poststreams.py financialstreams 53 | 54 | python poststreams.py gamestreams 55 | 56 | 57 | -------------------------------------------------------------------------------- /scripts/python/cgi-bin/humidity.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | import datetime 3 | import random 4 | timestamp = datetime.datetime.now() 5 | humidity = random.random() 6 | print "Content-Type: application/json" 7 | print 8 | print """\ 9 | {"resource": "polling-resource", 10 | "streams": 11 | { 12 | "humidity": {"value": %f, "timestamp": "%s"} 13 | } 14 | } 15 | """ % (humidity, timestamp.strftime("%Y-%m-%d %H:%M:%S")) 16 | -------------------------------------------------------------------------------- /scripts/python/cgi-bin/temperature.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | import datetime 3 | import random 4 | timestamp = datetime.datetime.now() 5 | temperature = random.random() * 100 6 | print "Content-Type: application/json" 7 | print 8 | print """\ 9 | {"resource": "polling-resource", 10 | "streams": 11 | { 12 | "temperature": {"value": %f, "timestamp": "%s"} 13 | } 14 | } 15 | """ % (temperature, timestamp.strftime("%Y-%m-%d %H:%M:%S")) 16 | -------------------------------------------------------------------------------- /scripts/python/financialstreams: -------------------------------------------------------------------------------- 1 | {"name":"EURO vs SEK","description":"This stream displays the value of one EURO in SEK","type":"currency","location":"0,0","tags":"financial, economic, currency, finance, money, sek, euro, eur","private":false,"unit":"SEK","accuracy":0.001,"min_val":0,"max_val":10000,"polling":true,"data_type":"application/json","parser":"rate","uri":"http://rate-exchange.appspot.com/currency?from=EUR&to=SEK","polling_freq":1800,"user_id":"andreas"} 2 | {"name":"USD vs SEK","description":"This stream displays the value of one US dollar in SEK","type":"currency","location":"0,0","tags":"financial, economic, currency, finance, money, sek, USD, dollar","private":false,"unit":"SEK","accuracy":0.001,"min_val":0,"max_val":10000,"polling":true,"data_type":"application/json","parser":"rate","uri":"http://rate-exchange.appspot.com/currency?from=USD&to=SEK","polling_freq":1800,"user_id":"andreas"} 3 | {"name":"GBP vs SEK","description":"This stream displays the value of one Brittish pound in SEK","type":"currency","location":"0,0","tags":"financial, economic, currency, finance, money, sek, GBP, pound","private":false,"unit":"SEK","accuracy":0.001,"min_val":0,"max_val":10000,"polling":true,"data_type":"application/json","parser":"rate","uri":"http://rate-exchange.appspot.com/currency?from=GBP&to=SEK","polling_freq":1800,"user_id":"andreas"} 4 | {"name":"Bitcoin vs SEK","description":"This stream displays the value of one bitcoin in SEK","type":"currency","location":"0,0","tags":"financial, economic, currency, finance, money, sek, bitcoin","private":false,"unit":"SEK","accuracy":0.001,"min_val":0,"max_val":10000000,"polling":true,"data_type":"application/json","parser":"SEK.24h","uri":"http://api.bitcoincharts.com/v1/weighted_prices.json","polling_freq":86400,"user_id":"andreas"} 5 | {"name":"Bitcoin vs USD","description":"This stream displays the value of one bitcoin in USD","type":"currency","location":"0,0","tags":"financial, economic, currency, finance, money, USD, dollar, bitcoin","private":false,"unit":"USD","accuracy":0.001,"min_val":0,"max_val":10000000,"polling":true,"data_type":"application/json","parser":"USD.24h","uri":"http://api.bitcoincharts.com/v1/weighted_prices.json","polling_freq":86400,"user_id":"andreas"} 6 | -------------------------------------------------------------------------------- /scripts/python/gamestreams: -------------------------------------------------------------------------------- 1 | {"name":"Dota 2: Players Online","description":"This stream displays the number of players currently online in Dota 2","type":"players","location":"0,0","tags":"Dota2,players,online,steam,website","private":false,"unit":"Players","accuracy":1,"min_val":0,"max_val":100000000,"polling":true,"data_type":"application/json","parser":"response.player_count","uri":"http://api.steampowered.com/ISteamUserStats/GetNumberOfCurrentPlayers/v1?appid=570","polling_freq":300,"resource":{"resource_type":"Website","uuid":"http://api.steampowered.com"},"user_id":"andreas"} 2 | {"name":"Counterstrike: Players Online","description":"This stream displays the number of players currently online playing Counterstrike","type":"players","location":"0,0","tags":"CS,Counterstrike,players,online,steam,website","private":false,"unit":"Players","accuracy":1,"min_val":0,"max_val":100000000,"polling":true,"data_type":"application/json","parser":"response.player_count","uri":"http://api.steampowered.com/ISteamUserStats/GetNumberOfCurrentPlayers/v1?appid=10","polling_freq":300,"resource":{"resource_type":"Website","uuid":"http://api.steampowered.com"},"user_id":"andreas"} 3 | {"name":"Counterstrike Source : Players Online","description":"This stream displays the number of players currently online playing Counterstrike","type":"player_count","location":"0,0","tags":"CS,Counterstrike,source,players,online,steam,website","private":false,"unit":"Players","accuracy":1,"min_val":0,"max_val":100000000,"polling":true,"data_type":"application/json","parser":"response.player_count","uri":"http://api.steampowered.com/ISteamUserStats/GetNumberOfCurrentPlayers/v1?appid=240","polling_freq":300,"user_id":"andreas"} 4 | -------------------------------------------------------------------------------- /scripts/python/post_avg.py: -------------------------------------------------------------------------------- 1 | import os 2 | import time 3 | import sys 4 | import subprocess 5 | 6 | stream_id = str(sys.argv[1]) 7 | interval = int(sys.argv[2]) 8 | base_url = str(sys.argv[3]) 9 | while 1: 10 | time.sleep(interval) 11 | a = open("/proc/loadavg") 12 | b = a.readline().split(" ") 13 | loadavg = float(b[0])*100 14 | a.close() 15 | value = '{"value": %f}' % loadavg 16 | url = base_url + '/streams/'+ stream_id +'/data' 17 | subprocess.call(['curl', '-XPOST', '-H', 'Content-type: application/json', '-d', value, url]) 18 | 19 | -------------------------------------------------------------------------------- /scripts/python/poststreams.py: -------------------------------------------------------------------------------- 1 | import os 2 | import time 3 | import sys 4 | import urllib 5 | import urllib2 6 | 7 | filename = str(sys.argv[1]) 8 | f = open(filename, 'r') 9 | url = "http://localhost:8000/streams" 10 | 11 | for line in f: 12 | req = urllib2.Request(url) 13 | print line 14 | req.add_data(line) 15 | response = urllib2.urlopen(req) 16 | print response.read() 17 | time.sleep(0.1) 18 | 19 | -------------------------------------------------------------------------------- /scripts/sensec.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | dir=`dirname $0` 3 | #Specify project home directory. 4 | HOME_PATH=`cd $dir;cd ..;pwd` 5 | LOG_DIR=priv/logs 6 | LOG_JS_DIR=priv/logs 7 | ES_PORT=9200 8 | MY_PATH=`pwd` 9 | 10 | if [[ ("$1" = "start") || ("$1" = "test_setup") ]]; then 11 | cd $HOME_PATH 12 | echo "starting rabbit" 13 | make run_rabbit & 14 | echo $! >> $HOME_PATH/.temp.log 15 | echo "starting ES" 16 | make run_es & 17 | echo $! >> $HOME_PATH/.temp.log 18 | sleep 3 19 | echo "Starting node.js with receive.js" 20 | if [ -d "$LOG_JS_DIR" ]; then 21 | make run_nodejs > $LOG_JS_DIR/nodejs_log.log & 22 | echo $! >> $HOME_PATH/.temp.log 23 | else 24 | make run_nodejs & 25 | echo $! >> $HOME_PATH/.temp.log 26 | fi 27 | sleep 7 28 | echo "Starting IoT-Framework" 29 | export R_HOME="/usr/lib/R" 30 | sleep 3 31 | curl -XPUT localhost:$ES_PORT/sensorcloud 32 | if [ "$1" = "start" ]; then 33 | if [ -d "$LOG_DIR" ]; then 34 | erl -noshell -pa $HOME_PATH/ebin/ $HOME_PATH/lib/*/ebin/ $HOME_PATH/lib/*/bin/ -boot start_sasl -s reloader -s engine -sname engine -config $HOME_PATH/config/engine.config > $LOG_DIR/sensor-cloud_log.log & 35 | else 36 | erl -noshell -pa $HOME_PATH/ebin/ $HOME_PATH/lib/*/ebin/ $HOME_PATH/lib/*/bin/ -boot start_sasl -s reloader -s engine -sname engine -config $HOME_PATH/config/engine.config & 37 | fi 38 | fi 39 | echo $! >> $HOME_PATH/.temp.log 40 | cd $MY_PATH 41 | echo "Starting Semantic-Adapter" 42 | make run_semantic_adapter & 43 | echo $! >> $HOME_PATH/.temp.log 44 | elif [ "$1" = "stop" ]; then 45 | echo "Closing nodejs and Sensor-Cloud" 46 | while read line 47 | do 48 | kill -- -$(ps opgid= $line | tr -d ' ') 49 | done < $HOME_PATH/.temp.log 50 | rm $HOME_PATH/.temp.log 51 | fi 52 | 53 | 54 | -------------------------------------------------------------------------------- /scripts/sensec_light.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | dir=`dirname $0` 3 | #Specify project home directory. 4 | HOME_PATH=`cd $dir;cd ..;pwd` 5 | LOG_DIR=priv/logs 6 | LOG_JS_DIR=priv/logs 7 | ES_PORT=9200 8 | MY_PATH=`pwd` 9 | 10 | if [[ ("$1" = "start") || ("$1" = "test_setup") ]]; then 11 | cd $HOME_PATH 12 | echo "starting rabbit" 13 | make run_rabbit & 14 | echo $! >> $HOME_PATH/.temp.log 15 | echo "starting ES" 16 | make run_es & 17 | echo $! >> $HOME_PATH/.temp.log 18 | sleep 7 19 | echo "Starting IoT-Framework" 20 | export R_HOME="/usr/lib/R" 21 | sleep 3 22 | curl -XPUT localhost:$ES_PORT/sensorcloud 23 | if [ "$1" = "start" ]; then 24 | if [ -d "$LOG_DIR" ]; then 25 | erl -noshell -pa $HOME_PATH/ebin/ $HOME_PATH/lib/*/ebin/ $HOME_PATH/lib/*/bin/ -boot start_sasl -s reloader -s engine -sname engine -config $HOME_PATH/config/engine.config > $LOG_DIR/sensor-cloud_log.log 26 | else 27 | erl -noshell -pa $HOME_PATH/ebin/ $HOME_PATH/lib/*/ebin/ $HOME_PATH/lib/*/bin/ -boot start_sasl -s reloader -s engine -sname engine -config $HOME_PATH/config/engine.config 28 | fi 29 | fi 30 | echo $! >> $HOME_PATH/.temp.log 31 | elif [ "$1" = "stop" ]; then 32 | echo "Closing Sensor-Cloud" 33 | while read line 34 | do 35 | kill -- -$(ps opgid= $line | tr -d ' ') 36 | done < $HOME_PATH/.temp.log 37 | rm $HOME_PATH/.temp.log 38 | fi 39 | 40 | -------------------------------------------------------------------------------- /scripts/travis-elasticsearch.sh: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env bash 2 | 3 | is_elasticsearch_up(){ 4 | http_code=`echo $(curl -s -o /dev/null -w "%{http_code}" "http://localhost:9200")` 5 | return `test $http_code = "200"` 6 | } 7 | 8 | wait_for_elasticsearch(){ 9 | while ! is_elasticsearch_up; do 10 | sleep 3 11 | done 12 | } 13 | 14 | run() { 15 | echo "Starting elasticsearch ..." 16 | ./lib/elasticsearch*/bin/elasticsearch & 17 | wait_for_elasticsearch 18 | cd ../../ 19 | echo "Started" 20 | } 21 | 22 | run 23 | -------------------------------------------------------------------------------- /semantic-adapter/README.md: -------------------------------------------------------------------------------- 1 | ## Semantic Adapter for IoT Framework 2 | 3 | ### Used API of IoT Frameworks 4 | 5 | * Get all streams 6 | 7 | http://axondev.cf.ericsson.net:8000/streams 8 | 9 | * Get a specified stream 10 | 11 | http://axondev.cf.ericsson.net:8000/streams/FFqNysD8Qqe18BNUAEphkA 12 | 13 | * Get all data of the specified stream 14 | 15 | http://axondev.cf.ericsson.net:8000/streams/FFqNysD8Qqe18BNUAEphkA/data 16 | 17 | 18 | ### Preparation 19 | 20 | * Install elasticsearch 21 | 22 | http://www.elasticsearch.org/guide/en/elasticsearch/reference/current/setup-repositories.html 23 | 24 | ### Mapping 25 | 26 | ``` 27 | Stream -> ssn:Sensor 28 | accuracy -> ssn:Accuracy 29 | active -> ??? 30 | creation_date -> ssn:hasDeployment 31 | data_type -> ??? (applicaton/json, maybe not needed) 32 | description -> foaf:depiction 33 | history_size -> ??? 34 | last_updated -> ssn:Observation 35 | location -> foaf:based_near 36 | # we need to add two properties to express 37 | # min/max value because SSN doesn't define it. 38 | # http://www.w3.org/2005/Incubator/ssn/wiki/SSN_Smart_product 39 | max_val, min_val -> ssn:MeasurementRange 40 | name -> foaf:name 41 | nr_subscribers -> ??? 42 | parser -> ??? 43 | polling -> ??? 44 | polling_freq -> ssn:Frequency 45 | private -> ??? 46 | quality -> DUL:Quality 47 | resource -> ??? 48 | subscribers -> ??? 49 | tags -> ??? 50 | # shall we model all observations? 51 | type -> ssn:observes 52 | unit -> UnitOfMeasure 53 | uri -> foaf:homepage 54 | user_id -> foaf:maker 55 | user_ranking -> ??? 56 | ``` 57 | 58 | ``` 59 | Virtual Stream -> ssn:Sensor 60 | creation_date -> ssn:hasDeployment 61 | description -> foaf:depiction 62 | function -> ??? 63 | group -> ??? 64 | history_size -> ??? 65 | last_updated -> ssn:Observation 66 | name -> foaf:name 67 | nr_subscribers -> ??? 68 | private -> ??? 69 | streams_involved -> foaf:member 70 | subscribers -> ??? 71 | tags -> ??? 72 | user_id -> foaf:maker 73 | user_ranking -> ??? 74 | ``` 75 | 76 | ``` 77 | Data Point -> ssn:SensorOutput 78 | stream_id -> ssn:isProducedBy 79 | timestamp -> hasEventDate 80 | value -> ssn:ObservationValue 81 | ``` 82 | 83 | ``` 84 | Virtual Stream Data Point -> ssn:SensorOutput 85 | stream_id -> ssn:isProducedBy 86 | timestamp -> hasEventDate 87 | value -> ssn:ObservationValue 88 | ``` -------------------------------------------------------------------------------- /semantic-adapter/app/__init__.py: -------------------------------------------------------------------------------- 1 | __author__ = 'ehonlia' 2 | 3 | import inspect 4 | from flask import Flask, url_for, render_template 5 | 6 | from app.semantic_adapter import semantic_adapter 7 | from app.semantic_adapter_citypulse import semantic_adapter_citypulse 8 | 9 | app = Flask(__name__) 10 | app.register_blueprint(semantic_adapter) 11 | app.register_blueprint(semantic_adapter_citypulse, url_prefix='/citypulse') 12 | 13 | 14 | @app.route('/') 15 | def site_map(): 16 | links = [] 17 | for rule in app.url_map.iter_rules(): 18 | # not static and not site_map 19 | if rule.endpoint not in ['static', inspect.stack()[0][3]] and rule.endpoint.find('citypulse') == -1: 20 | if rule.endpoint != "semantic_adapter.datapoints2": 21 | url = url_for(rule.endpoint, **(rule.defaults or {})) 22 | links.append((url, rule.endpoint)) 23 | return render_template('index.html', links=links) 24 | -------------------------------------------------------------------------------- /semantic-adapter/app/mimetype.py: -------------------------------------------------------------------------------- 1 | __author__ = 'ehonlia' 2 | 3 | JSON = 'application/json' 4 | 5 | mimetype_map = { 6 | 'xml': 'text/xml', 7 | 'json-ld': JSON, 8 | 'n3': 'text/plain', 9 | None: 'text/xml' 10 | } 11 | 12 | 13 | def correct_format(output_format): 14 | if output_format is None: 15 | corrected_output_format = 'n3' 16 | else: 17 | corrected_output_format = output_format 18 | 19 | return corrected_output_format 20 | -------------------------------------------------------------------------------- /semantic-adapter/app/semantic_adapter.py: -------------------------------------------------------------------------------- 1 | __author__ = 'ehonlia' 2 | 3 | import json 4 | 5 | from flask import Response, request, jsonify, Blueprint 6 | 7 | from app.mimetype import JSON, mimetype_map, correct_format 8 | from lib import semantics 9 | 10 | 11 | semantic_adapter = Blueprint('semantic_adapter', __name__, template_folder='../templates') 12 | 13 | 14 | @semantic_adapter.route('/streams') 15 | def streams(): 16 | output_format = correct_format(request.args.get('format')) 17 | combined = request.args.get('combined') 18 | 19 | if combined is None: 20 | return Response(json.dumps(semantics.semantic_streams(output_format, request.args)), mimetype=JSON) 21 | else: 22 | return Response(semantics.semantic_streams_combined(output_format, request.args), 23 | mimetype=mimetype_map[output_format]) 24 | 25 | 26 | @semantic_adapter.route('/virtual_streams') 27 | def virtual_streams(): 28 | output_format = correct_format(request.args.get('format')) 29 | combined = request.args.get('combined') 30 | 31 | if combined is None: 32 | return Response(json.dumps(semantics.semantic_virtual_streams(output_format, request.args)), mimetype=JSON) 33 | else: 34 | return Response(semantics.semantic_virtual_streams_combined(output_format, params=request.args), 35 | mimetype=mimetype_map[output_format]) 36 | 37 | 38 | @semantic_adapter.route('/datapoints') 39 | def datapoints(): 40 | output_format = correct_format(request.args.get('format')) 41 | combined = request.args.get('combined') 42 | 43 | if combined is None: 44 | return Response(json.dumps(semantics.semantic_datapoints(output_format, params=request.args)), mimetype=JSON) 45 | else: 46 | return Response(semantics.semantic_datapoints_combined(output_format, params=request.args), 47 | mimetype=mimetype_map[output_format]) 48 | 49 | 50 | 51 | @semantic_adapter.route('/datapoints/') 52 | def datapoints2(id): 53 | output_format = correct_format(request.args.get('format')) 54 | 55 | return Response(semantics.semantic_datapoints_citypulse(id, output_format, params=request.args), 56 | mimetype=mimetype_map[output_format]) 57 | 58 | 59 | @semantic_adapter.route('/vsdatapoints') 60 | def vsdatapoints(): 61 | output_format = correct_format(request.args.get('format')) 62 | combined = request.args.get('combined') 63 | 64 | if combined is None: 65 | return Response(json.dumps(semantics.semantic_datapoints(output_format, 'vsdatapoint', request.args)), 66 | mimetype=JSON) 67 | else: 68 | return Response(semantics.semantic_datapoints_combined(output_format, 'vsdatapoint', request.args), 69 | mimetype=mimetype_map[output_format]) 70 | 71 | 72 | @semantic_adapter.route('/mapping') 73 | def mapping(): 74 | return jsonify(semantics.mapping()) 75 | -------------------------------------------------------------------------------- /semantic-adapter/app/semantic_adapter_citypulse.py: -------------------------------------------------------------------------------- 1 | __author__ = 'ehonlia' 2 | 3 | import json 4 | 5 | from flask import Blueprint, request, Response 6 | 7 | from app.mimetype import correct_format, mimetype_map 8 | from lib import semantics 9 | 10 | semantic_adapter_citypulse = Blueprint('semantic_adapter_citypulse', __name__, template_folder='../templates') 11 | 12 | 13 | @semantic_adapter_citypulse.route('/datapoints/') 14 | def datapoints(id): 15 | output_format = correct_format(request.args.get('format')) 16 | 17 | return Response(semantics.semantic_datapoints_citypulse(id, output_format, params=request.args), 18 | mimetype=mimetype_map[output_format]) 19 | -------------------------------------------------------------------------------- /semantic-adapter/gunicorn_run.sh: -------------------------------------------------------------------------------- 1 | #!/bin/sh 2 | 3 | gunicorn -w 5 -b 0.0.0.0:5000 app:app 4 | -------------------------------------------------------------------------------- /semantic-adapter/lib/__init__.py: -------------------------------------------------------------------------------- 1 | __author__ = 'ehonlia' 2 | -------------------------------------------------------------------------------- /semantic-adapter/lib/broker.py: -------------------------------------------------------------------------------- 1 | __author__ = 'ehonlia' 2 | 3 | import pika 4 | import json 5 | import logging 6 | 7 | import semantics 8 | from constants import ID, METADATA 9 | 10 | HOST = 'honnix-ws' 11 | EXCHANGE_TYPE = 'topic' 12 | STREAM_EXCHANGE = 'topic_stream' 13 | VIRTUAL_STREAM_EXCHANGE = 'topic_virtual_stream' 14 | SEMANTIC_STREAM_EXCHANGE = 'topic_semantic_stream' 15 | SEMANTIC_VIRTUAL_STREAM_EXCHANGE = 'topic_semantic_virtual_stream' 16 | STREAM_ROUTING_KEY = 'stream' 17 | VIRTUAL_STREAM_ROUTING_KEY = 'virtual_stream' 18 | SEMANTIC_STREAM_ROUTING_KEY = 'semantic_stream' 19 | SEMANTIC_VIRTUAL_STREAM_ROUTING_KEY = 'semantic_virtual_stream' 20 | 21 | 22 | def __callback_stream(ch, method, properties, body): 23 | print " [x] %r:%r" % (method.routing_key, body,) 24 | 25 | stream_id, semantic_stream = semantics.semantic_stream(json.loads(body)) 26 | print semantic_stream 27 | 28 | connection = pika.BlockingConnection(pika.ConnectionParameters(host=HOST)) 29 | channel = connection.channel() 30 | channel.exchange_declare(exchange=SEMANTIC_STREAM_EXCHANGE, type=EXCHANGE_TYPE) 31 | channel.basic_publish(exchange=SEMANTIC_STREAM_EXCHANGE, routing_key=SEMANTIC_STREAM_ROUTING_KEY, 32 | body=json.dumps({ID: stream_id, METADATA: semantic_stream})) 33 | connection.close() 34 | 35 | 36 | def __callback_virtual_stream(ch, method, properties, body): 37 | print " [x] %r:%r" % (method.routing_key, body,) 38 | 39 | stream_id, semantic_virtual_stream = semantics.semantic_virtual_stream(json.loads(body)) 40 | 41 | connection = pika.BlockingConnection(pika.ConnectionParameters(host=HOST)) 42 | channel = connection.channel() 43 | channel.exchange_declare(exchange=SEMANTIC_VIRTUAL_STREAM_EXCHANGE, type=EXCHANGE_TYPE) 44 | channel.basic_publish(exchange=SEMANTIC_VIRTUAL_STREAM_EXCHANGE, routing_key=SEMANTIC_VIRTUAL_STREAM_ROUTING_KEY, 45 | body=json.dumps({ID: stream_id, METADATA: semantic_virtual_stream})) 46 | connection.close() 47 | 48 | 49 | def subscribe_to_scream_update(): 50 | connect = pika.BlockingConnection(pika.ConnectionParameters(host=HOST)) 51 | channel = connect.channel() 52 | 53 | channel.exchange_declare(exchange=STREAM_EXCHANGE, type=EXCHANGE_TYPE) 54 | 55 | result = channel.queue_declare(exclusive=True) 56 | queue_name = result.method.queue 57 | 58 | channel.queue_bind(exchange=STREAM_EXCHANGE, queue=queue_name, routing_key=STREAM_ROUTING_KEY) 59 | 60 | channel.basic_consume(__callback_stream, queue=queue_name, no_ack=True) 61 | channel.start_consuming() 62 | 63 | 64 | def subscribe_to_virtual_scream_update(): 65 | connect = pika.BlockingConnection(pika.ConnectionParameters(host=HOST)) 66 | channel = connect.channel() 67 | 68 | channel.exchange_declare(exchange=VIRTUAL_STREAM_EXCHANGE, type=EXCHANGE_TYPE) 69 | 70 | result = channel.queue_declare(exclusive=True) 71 | queue_name = result.method.queue 72 | 73 | channel.queue_bind(exchange=VIRTUAL_STREAM_EXCHANGE, queue=queue_name, routing_key=VIRTUAL_STREAM_ROUTING_KEY) 74 | 75 | channel.basic_consume(__callback_virtual_stream, queue=queue_name, no_ack=True) 76 | channel.start_consuming() 77 | 78 | 79 | if __name__ == '__main__': 80 | logging.basicConfig() 81 | subscribe_to_scream_update() 82 | subscribe_to_virtual_scream_update() 83 | -------------------------------------------------------------------------------- /semantic-adapter/lib/constants.py: -------------------------------------------------------------------------------- 1 | __author__ = 'ehonlia' 2 | 3 | from rdflib import Namespace 4 | 5 | SSN = Namespace('http://purl.oclc.org/NET/ssnx/ssn#') 6 | DUL = Namespace('http://www.loa-cnr.it/ontologies/DUL.owl#') 7 | GEO = Namespace('http://www.w3.org/2003/01/geo/wgs84_pos#') 8 | 9 | SAO = Namespace('http://purl.oclc.org/NET/sao/') 10 | CT = Namespace('http://www.insight-centre.org/citytraffic#') 11 | PROV = Namespace('http://purl.org/NET/provenance.owl#') 12 | TL = Namespace('http://purl.org/NET/c4dm/timeline.owl#') 13 | UCUM = Namespace('http://purl.oclc.org/NET/muo/ucum/') 14 | 15 | ID = '_id' 16 | METADATA = 'metadata' 17 | -------------------------------------------------------------------------------- /semantic-adapter/lib/util.py: -------------------------------------------------------------------------------- 1 | __author__ = 'ehonlia' 2 | 3 | import re 4 | 5 | # Solr/Lucene special characters: + - ! ( ) { } [ ] ^ " ~ * ? : \ 6 | # There are also operators && and ||, but we're just going to escape 7 | # the individual ampersand and pipe chars. 8 | # Also, we're not going to escape backslashes! 9 | # http://lucene.apache.org/java/2_9_1/queryparsersyntax.html#Escaping+Special+Characters 10 | __ESCAPE_CHARS_RE = re.compile(r'(?[&|+\-!(){}[\]^"~*?:])') 11 | 12 | 13 | def lucene_escape(value): 14 | r"""Escape un-escaped special characters and return escaped value. 15 | 16 | >>> lucene_escape(r'foo+') == r'foo\+' 17 | True 18 | >>> lucene_escape(r'foo\+') == r'foo\+' 19 | True 20 | >>> lucene_escape(r'foo\\+') == r'foo\\+' 21 | True 22 | """ 23 | return __ESCAPE_CHARS_RE.sub(r'\\\g', value) 24 | -------------------------------------------------------------------------------- /semantic-adapter/ontologies/SAO-v04.ttl: -------------------------------------------------------------------------------- 1 | @prefix : . 2 | @prefix dc: . 3 | @prefix om: . 4 | @prefix tl: . 5 | @prefix owl: . 6 | @prefix rdf: . 7 | @prefix sao: . 8 | @prefix ssn: . 9 | @prefix xml: . 10 | @prefix xsd: . 11 | @prefix foaf: . 12 | @prefix prov: . 13 | @prefix rdfs: . 14 | @prefix DUL: . 15 | @prefix qoi: . 16 | @prefix muo: . 17 | @prefix geo: . 18 | @base . 19 | 20 | rdf:type owl:Ontology ; 21 | 22 | owl:versionIRI . 23 | 24 | 25 | ################################################################# 26 | # 27 | # Annotation properties 28 | # 29 | ################################################################# 30 | 31 | 32 | 33 | ### http://purl.org/dc/terms/description 34 | 35 | dc:description rdf:type owl:AnnotationProperty . 36 | 37 | 38 | 39 | 40 | ################################################################# 41 | # 42 | # Object Properties 43 | # 44 | ################################################################# 45 | 46 | 47 | ### http://purl.oclc.org/NET/sao/computeby 48 | 49 | sao:computeby rdf:type owl:ObjectProperty ; 50 | 51 | rdfs:comment "relates a stream data to a computed result" ; 52 | 53 | rdfs:range sao:StreamAnalysis ; 54 | 55 | rdfs:domain sao:StreamData . 56 | 57 | 58 | 59 | ### http://purl.oclc.org/NET/sao/computedfrom 60 | 61 | sao:computedfrom rdf:type owl:ObjectProperty ; 62 | 63 | rdfs:comment "relates a computed result to a stream data" ; 64 | 65 | rdfs:domain sao:StreamAnalysis ; 66 | 67 | rdfs:range sao:StreamData . 68 | 69 | 70 | 71 | ### http://purl.oclc.org/NET/sao/time 72 | 73 | sao:time rdf:type owl:ObjectProperty ; 74 | 75 | rdfs:comment "Relates a segment to the time interval concept in Timeline Ontology" ; 76 | 77 | rdfs:range tl:Interval . 78 | 79 | 80 | 81 | ### http://purl.org/NET/provenance.owl#wasAssociatedWith 82 | 83 | prov:wasAssociatedWith rdf:type owl:ObjectProperty ; 84 | 85 | rdfs:comment "Relates a stream analysis to associated entity" ; 86 | 87 | rdfs:domain sao:StreamAnalysis ; 88 | 89 | rdfs:range DUL:InformationEntity . 90 | 91 | 92 | 93 | ### http://purl.org/NET/provenance.owl#wasDerivedFrom 94 | 95 | prov:wasDerivedFrom rdf:type owl:ObjectProperty ; 96 | 97 | rdfs:comment "Relates a stream data to an obervation" ; 98 | 99 | rdfs:range sao:StreamData . 100 | 101 | 102 | ### http://purl.oclc.org/NET/sao/hasQuantityUnitOfMeasurement 103 | 104 | sao:hasQuantityUnitOfMeasurement rdf:type owl:ObjectProperty ; 105 | 106 | rdfs:comment "Relates a stream data to a quantity unit of mesaurement" ; 107 | 108 | rdfs:domain sao:StreamData ; 109 | 110 | rdfs:range DUL:InformationEntity . 111 | 112 | 113 | ### http://purl.oclc.org/NET/sao/quality 114 | 115 | sao:quality rdf:type owl:ObjectProperty ; 116 | 117 | rdfs:comment "Relates a stream data to an information entity regarding the quality" ; 118 | 119 | rdfs:domain sao:StreamData ; 120 | 121 | rdfs:range DUL:InformationEntity . 122 | 123 | ### http://purl.oclc.org/NET/sao/hasUnitOfMeasurement 124 | 125 | sao:hasUnitOfMeasurement rdf:type owl:ObjectProperty ; 126 | 127 | rdfs:comment "relates a stream data to a unit of measurement" ; 128 | 129 | rdfs:range muo:UnitOfMeasurement ; 130 | 131 | rdfs:domain sao:StreamData . 132 | 133 | 134 | ### http://purl.oclc.org/NET/sao/value 135 | 136 | sao:value rdf:type owl:ObjectProperty ; 137 | 138 | rdfs:domain sao:StreamData ; 139 | 140 | rdfs:range owl:Thing . 141 | 142 | 143 | ### http://purl.oclc.org/NET/sao/hasLocation 144 | 145 | sao:hasLocation rdf:type owl:ObjectProperty ; 146 | 147 | rdfs:range geo:SpatialThing . 148 | 149 | 150 | ################################################################# 151 | # 152 | # Data properties 153 | # 154 | ################################################################# 155 | 156 | 157 | 158 | ### http://purl.oclc.org/NET/sao/alphabetsize 159 | 160 | sao:alphabetsize rdf:type owl:DatatypeProperty ; 161 | 162 | rdfs:comment "describes the alphabet size that have been used for a stream analysis technique (e.g. SymbolicAggregateApproximation)" ; 163 | 164 | rdfs:domain sao:SymbolicAggregateApproximation ; 165 | 166 | rdfs:range xsd:int . 167 | 168 | 169 | 170 | 171 | 172 | 173 | ### http://purl.oclc.org/NET/sao/hasURI 174 | 175 | sao:hasURI rdf:type owl:DatatypeProperty ; 176 | 177 | rdfs:range xsd:string . 178 | 179 | 180 | 181 | ### http://purl.oclc.org/NET/sao/samplesize 182 | 183 | sao:samplesize rdf:type owl:DatatypeProperty ; 184 | 185 | rdfs:comment "describes the number of samples that a stream data involve; or used for a stream analysis technique" ; 186 | 187 | rdfs:domain sao:StreamData ; 188 | 189 | rdfs:range xsd:int . 190 | 191 | 192 | 193 | ### http://purl.oclc.org/NET/sao/samplingrate 194 | 195 | sao:samplingrate rdf:type owl:DatatypeProperty ; 196 | 197 | rdfs:domain sao:StreamData ; 198 | 199 | rdfs:range xsd:int . 200 | 201 | 202 | 203 | ### http://purl.oclc.org/NET/sao/segmentsize 204 | 205 | sao:segmentsize rdf:type owl:DatatypeProperty ; 206 | 207 | rdfs:comment "Describes the number of segments that have been used for a stream data/analysis" ; 208 | 209 | rdfs:domain sao:StreamAnalysis ; 210 | 211 | rdfs:range xsd:int . 212 | 213 | 214 | 215 | ### http://purl.oclc.org/NET/sao/stepsize 216 | 217 | sao:stepsize rdf:type owl:DatatypeProperty ; 218 | 219 | rdfs:comment "Describes the step size in other words the size of overlapping frames that have been used for a stream data/analysis" ; 220 | 221 | rdfs:domain sao:StreamAnalysis ; 222 | 223 | rdfs:range xsd:int . 224 | 225 | 226 | 227 | ### http://purl.oclc.org/NET/sao/hasDataValue 228 | 229 | sao:hasDataValue rdf:type owl:DataTypeProperty; 230 | rdfs:range xsd:any. 231 | 232 | 233 | 234 | 235 | 236 | ################################################################# 237 | # 238 | # Classes 239 | # 240 | ################################################################# 241 | 242 | 243 | 244 | 245 | 246 | ### http://purl.oclc.org/NET/sao/DiscreteCosineTransform 247 | 248 | sao:DiscreteCosineTransform rdf:type owl:Class ; 249 | 250 | rdfs:subClassOf sao:StreamAnalysis . 251 | 252 | 253 | 254 | ### http://purl.oclc.org/NET/sao/DiscreteFourierTransform 255 | 256 | sao:DiscreteFourierTransform rdf:type owl:Class ; 257 | 258 | rdfs:subClassOf sao:StreamAnalysis . 259 | 260 | 261 | 262 | ### http://purl.oclc.org/NET/sao/Mean 263 | 264 | sao:Mean rdf:type owl:Class ; 265 | 266 | rdfs:subClassOf sao:StreamAnalysis ; 267 | 268 | dc:description "Mean calculate the mean of each row of observations" . 269 | 270 | 271 | 272 | ### http://purl.oclc.org/NET/sao/Median 273 | 274 | sao:Median rdf:type owl:Class ; 275 | 276 | rdfs:subClassOf sao:StreamAnalysis ; 277 | 278 | dc:description "Median calculate the median of each row of observations" . 279 | 280 | 281 | 282 | ### http://purl.oclc.org/NET/sao/PiecewiseAggregateApproximation 283 | 284 | sao:PiecewiseAggregateApproximation rdf:type owl:Class ; 285 | 286 | rdfs:subClassOf sao:StreamAnalysis . 287 | 288 | 289 | 290 | ### http://purl.oclc.org/NET/sao/Point 291 | 292 | sao:Point rdf:type owl:Class ; 293 | 294 | rdfs:subClassOf sao:StreamData . 295 | 296 | 297 | 298 | ### http://purl.oclc.org/NET/sao/StreamAnalysis 299 | 300 | sao:StreamAnalysis rdf:type owl:Class ; 301 | 302 | rdfs:subClassOf sao:StreamData . 303 | 304 | 305 | 306 | ### http://purl.oclc.org/NET/sao/StreamData 307 | 308 | sao:StreamData rdf:type owl:Class ; 309 | 310 | rdfs:subClassOf sao:StreamAnalysis, ssn:Observation ; 311 | 312 | rdfs:subClassOf prov:Entity ; 313 | 314 | rdfs:subClassOf [ a owl:Restriction ; 315 | owl:allValuesFrom DUL:InformationEntity ; 316 | owl:onProperty sao:quality 317 | ] . 318 | 319 | 320 | 321 | 322 | ### http://purl.oclc.org/NET/sao/StreamEvent 323 | 324 | sao:StreamEvent rdf:type owl:Class ; 325 | 326 | rdfs:subClassOf prov:Activity . 327 | 328 | 329 | 330 | ### http://purl.oclc.org/NET/sao/StreamSegment 331 | 332 | sao:Segment rdf:type owl:Class ; 333 | 334 | rdfs:subClassOf sao:StreamData . 335 | 336 | 337 | 338 | ### http://purl.oclc.org/NET/sao/SymbolicAggregateApproximation 339 | 340 | sao:SymbolicAggregateApproximation rdf:type owl:Class ; 341 | 342 | rdfs:subClassOf sao:StreamAnalysis . 343 | -------------------------------------------------------------------------------- /semantic-adapter/pip-freeze.txt: -------------------------------------------------------------------------------- 1 | Flask==0.10.1 2 | Jinja2==2.7.2 3 | MarkupSafe==0.19 4 | SPARQLWrapper==1.6.1 5 | Werkzeug==0.9.4 6 | aniso8601==0.82 7 | elasticsearch==1.0.0 8 | gnureadline==6.3.3 9 | gunicorn==19.0.0 10 | html5lib==0.999 11 | ipython==2.1.0 12 | isodate==0.5.0 13 | itsdangerous==0.24 14 | pika==0.9.13 15 | pyparsing==2.0.2 16 | pytz==2014.2 17 | rdflib==4.1.2 18 | rdflib-jsonld==0.2 19 | six==1.7.3 20 | urllib3==1.8 21 | wsgiref==0.1.2 22 | -------------------------------------------------------------------------------- /semantic-adapter/templates/index.html: -------------------------------------------------------------------------------- 1 | {# index.html #} 2 |
3 | For following URLs, additional parameters are: 4 |
    5 |
  • combined (no value required)
  • 6 |
  • format (only read when combined is set, possible values are n3, xml, json-ld)
  • 7 |
8 |
    9 | {% for url, endpoint in links %} 10 |
  • {{ endpoint }}
  • 11 | {% endfor %} 12 |
13 |
14 |
15 | Here is the entrance to CityPulse. 16 |
    17 |
  • An example to CityPulse style annotation; the last section of the URL is stream ID: (n3) (xml) (json-ld) 21 |
  • 22 |
23 |
24 | -------------------------------------------------------------------------------- /semantic-adapter/test_pubsub/__init__.py: -------------------------------------------------------------------------------- 1 | __author__ = 'ehonlia' 2 | -------------------------------------------------------------------------------- /semantic-adapter/test_pubsub/kb_uploader.py: -------------------------------------------------------------------------------- 1 | from lib import broker 2 | 3 | __author__ = 'ehonlia' 4 | 5 | import pika 6 | import base64 7 | import urllib 8 | import urllib2 9 | import json 10 | 11 | from constants import METADATA 12 | 13 | 14 | def __upload(body, type): 15 | message = json.loads(body) 16 | 17 | data = urllib.urlencode({ 18 | 'data': message[METADATA], 19 | 'baseURI': 'http://iot.iot/streams/' 20 | }) 21 | url = 'http://192.121.150.101:3020/servlets/uploadData' 22 | username = 'admin' 23 | password = 's3cret' 24 | 25 | base64string = base64.encodestring('%s:%s' % (username, password)).replace('\n', '') 26 | req = urllib2.Request(url, data) 27 | req.add_header("Authorization", "Basic %s" % base64string) 28 | urllib2.urlopen(req) 29 | 30 | 31 | def __callback_stream(ch, method, properties, body): 32 | __upload(body, 'streams') 33 | 34 | 35 | def __callback_virtual_stream(ch, method, properties, body): 36 | __upload(body, 'virtual_streams') 37 | 38 | 39 | def subscribe_to_stream_update(): 40 | connect = pika.BlockingConnection(pika.ConnectionParameters(host=broker.HOST)) 41 | channel = connect.channel() 42 | 43 | channel.exchange_declare(exchange=broker.SEMANTIC_STREAM_EXCHANGE, type=broker.EXCHANGE_TYPE) 44 | 45 | result = channel.queue_declare(exclusive=True) 46 | queue_name = result.method.queue 47 | 48 | channel.queue_bind(exchange=broker.SEMANTIC_STREAM_EXCHANGE, queue=queue_name, 49 | routing_key=broker.SEMANTIC_STREAM_ROUTING_KEY) 50 | 51 | channel.basic_consume(__callback_stream, queue=queue_name, no_ack=True) 52 | channel.start_consuming() 53 | 54 | 55 | def subscribe_to_virtual_stream_update(): 56 | connect = pika.BlockingConnection(pika.ConnectionParameters(host=broker.HOST)) 57 | channel = connect.channel() 58 | 59 | channel.exchange_declare(exchange=broker.SEMANTIC_VIRTUAL_STREAM_EXCHANGE, type=broker.EXCHANGE_TYPE) 60 | 61 | result = channel.queue_declare(exclusive=True) 62 | queue_name = result.method.queue 63 | 64 | channel.queue_bind(exchange=broker.SEMANTIC_VIRTUAL_STREAM_EXCHANGE, queue=queue_name, 65 | routing_key=broker.SEMANTIC_VIRTUAL_STREAM_ROUTING_KEY) 66 | 67 | channel.basic_consume(__callback_virtual_stream, queue=queue_name, no_ack=True) 68 | channel.start_consuming() 69 | 70 | 71 | if __name__ == '__main__': 72 | subscribe_to_stream_update() 73 | -------------------------------------------------------------------------------- /semantic-adapter/test_pubsub/test_publisher.py: -------------------------------------------------------------------------------- 1 | from lib import broker 2 | 3 | __author__ = 'ehonlia' 4 | 5 | import pika 6 | import logging 7 | 8 | logging.basicConfig() 9 | 10 | connection = pika.BlockingConnection(pika.ConnectionParameters(host=broker.HOST)) 11 | channel = connection.channel() 12 | 13 | channel.exchange_declare(exchange=broker.STREAM_EXCHANGE, type=broker.EXCHANGE_TYPE) 14 | 15 | message = ''' 16 | { 17 | "_index": "sensorcloud", 18 | "_source": { 19 | "polling": false, 20 | "min_val": "0", 21 | "nr_subscribers": 0, 22 | "uri": "", 23 | "name": "[ER Day 2013] Battery North", 24 | "resource": { 25 | "resource_type": "", 26 | "uuid": "" 27 | }, 28 | "active": true, 29 | "subscribers": [], 30 | "user_ranking": { 31 | "average": 60, 32 | "nr_rankings": 1 33 | }, 34 | "unit": "", 35 | "quality": 1, 36 | "history_size": 6995, 37 | "polling_freq": 0, 38 | "creation_date": "2014-01-09", 39 | "private": false, 40 | "parser": "", 41 | "last_updated": "2014-01-21T16:26:50.000", 42 | "user_id": "user", 43 | "location": { 44 | "lon": 17.949467700000014, 45 | "lat": 59.40325599999999 46 | }, 47 | "type": "battery level", 48 | "accuracy": "", 49 | "description": "battery level of the mote on the North pipe (not leaky)", 50 | "data_type": "application/json", 51 | "tags": "battery charge", 52 | "max_val": "255" 53 | }, 54 | "_id": "abcdef", 55 | "_type": "stream", 56 | "_score": 1 57 | } 58 | ''' 59 | channel.basic_publish(exchange=broker.STREAM_EXCHANGE, 60 | routing_key=broker.STREAM_ROUTING_KEY, 61 | body=message) 62 | print " [x] Sent %r:%r" % (broker.STREAM_ROUTING_KEY, message) 63 | connection.close() 64 | -------------------------------------------------------------------------------- /semantic-adapter/web_run.py: -------------------------------------------------------------------------------- 1 | __author__ = 'ehonlia' 2 | 3 | if __name__ == '__main__': 4 | from app import app 5 | 6 | app.run(host='0.0.0.0', debug=True) 7 | -------------------------------------------------------------------------------- /src/api_help.erl: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/EricssonResearch/iot-framework-engine/106636d2f3b43698ae8c07bbe29d7d25c3e7383e/src/api_help.erl -------------------------------------------------------------------------------- /src/destructure_json.erl: -------------------------------------------------------------------------------- 1 | %% Copyright (c) 2010,2011 Alessandro Sivieri 2 | %% 3 | %% This file is part of CREST-Erlang. 4 | %% 5 | %% CREST-Erlang is free software: you can redistribute it and/or modify 6 | %% it under the terms of the GNU Lesser General Public License as published by 7 | %% the Free Software Foundation, either version 3 of the License, or 8 | %% (at your option) any later version. 9 | %% 10 | %% CREST-Erlang is distributed in the hope that it will be useful, 11 | %% but WITHOUT ANY WARRANTY; without even the implied warranty of 12 | %% MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the 13 | %% GNU Lesser General Public License for more details. 14 | %% 15 | %% You should have received a copy of the GNU Lesser General Public License 16 | %% along with CREST-Erlang. If not, see . 17 | %% 18 | -module(destructure_json). 19 | -export([parse/1,file/1]). 20 | -compile(nowarn_unused_vars). 21 | -compile({nowarn_unused_function,[p/4, p/5, p_eof/0, p_optional/1, p_not/1, p_assert/1, p_seq/1, p_and/1, p_choose/1, p_zero_or_more/1, p_one_or_more/1, p_label/2, p_string/1, p_anything/0, p_charclass/1, line/1, column/1]}). 22 | 23 | 24 | 25 | file(Filename) -> {ok, Bin} = file:read_file(Filename), parse(binary_to_list(Bin)). 26 | 27 | parse(Input) -> 28 | setup_memo(), 29 | Result = case 'object'(Input,{{line,1},{column,1}}) of 30 | {AST, [], _Index} -> AST; 31 | Any -> Any 32 | end, 33 | release_memo(), Result. 34 | 35 | 'object'(Input, Index) -> 36 | p(Input, Index, 'object', fun(I,D) -> (p_seq([fun 'var'/2, fun 'path'/2]))(I,D) end, fun(Node, Idx) -> 37 | [_Var, PathFun] = Node, 38 | fun(JSON) -> 39 | PathFun(JSON) 40 | end 41 | end). 42 | 43 | 'path'(Input, Index) -> 44 | p(Input, Index, 'path', fun(I,D) -> (p_choose([p_seq([p_string("."), fun 'var'/2, p_optional(fun 'path'/2)]), p_seq([p_string("["), fun 'int'/2, p_string("]"), p_optional(fun 'path'/2)])]))(I,D) end, fun(Node, Idx) -> 45 | case Node of 46 | [".", Key, []] -> 47 | fun({struct, Obj}) -> 48 | proplists:get_value(Key, Obj) 49 | end; 50 | [".", Key, PathFun] -> 51 | fun({struct, Obj}) -> 52 | V = proplists:get_value(Key, Obj), 53 | PathFun(V) 54 | end; 55 | ["[", I, "]", []] -> 56 | fun(Array) -> 57 | lists:nth(I + 1, Array) 58 | end; 59 | ["[", I, "]", PathFun] -> 60 | fun(Array) -> 61 | V = lists:nth(I + 1, Array), 62 | PathFun(V) 63 | end 64 | end 65 | end). 66 | 67 | 'int'(Input, Index) -> 68 | p(Input, Index, 'int', fun(I,D) -> (p_one_or_more(p_charclass("[0-9]")))(I,D) end, fun(Node, Idx) -> list_to_integer(Node) end). 69 | 70 | 'var'(Input, Index) -> 71 | p(Input, Index, 'var', fun(I,D) -> (p_seq([p_charclass("[_a-zA-Z]"), p_zero_or_more(p_charclass("[_a-zA-Z0-9]"))]))(I,D) end, fun(Node, Idx) -> list_to_binary(Node) end). 72 | 73 | 74 | 75 | 76 | 77 | 78 | p(Inp, Index, Name, ParseFun) -> 79 | p(Inp, Index, Name, ParseFun, fun(N, _Idx) -> N end). 80 | 81 | p(Inp, StartIndex, Name, ParseFun, TransformFun) -> 82 | % Grab the memo table from ets 83 | Memo = get_memo(StartIndex), 84 | % See if the current reduction is memoized 85 | case dict:find(Name, Memo) of 86 | % If it is, return the result 87 | {ok, Result} -> Result; 88 | % If not, attempt to parse 89 | _ -> 90 | case ParseFun(Inp, StartIndex) of 91 | % If it fails, memoize the failure 92 | {fail,_} = Failure -> 93 | memoize(StartIndex, dict:store(Name, Failure, Memo)), 94 | Failure; 95 | % If it passes, transform and memoize the result. 96 | {Result, InpRem, NewIndex} -> 97 | Transformed = TransformFun(Result, StartIndex), 98 | memoize(StartIndex, dict:store(Name, {Transformed, InpRem, NewIndex}, Memo)), 99 | {Transformed, InpRem, NewIndex} 100 | end 101 | end. 102 | 103 | setup_memo() -> 104 | put(parse_memo_table, ets:new(?MODULE, [set])). 105 | 106 | release_memo() -> 107 | ets:delete(memo_table_name()). 108 | 109 | memoize(Position, Struct) -> 110 | ets:insert(memo_table_name(), {Position, Struct}). 111 | 112 | get_memo(Position) -> 113 | case ets:lookup(memo_table_name(), Position) of 114 | [] -> dict:new(); 115 | [{Position, Dict}] -> Dict 116 | end. 117 | 118 | memo_table_name() -> 119 | get(parse_memo_table). 120 | 121 | p_eof() -> 122 | fun([], Index) -> {eof, [], Index}; 123 | (_, Index) -> {fail, {expected, eof, Index}} end. 124 | 125 | p_optional(P) -> 126 | fun(Input, Index) -> 127 | case P(Input, Index) of 128 | {fail,_} -> {[], Input, Index}; 129 | {_, _, _} = Success -> Success 130 | end 131 | end. 132 | 133 | p_not(P) -> 134 | fun(Input, Index)-> 135 | case P(Input,Index) of 136 | {fail,_} -> 137 | {[], Input, Index}; 138 | {Result, _, _} -> {fail, {expected, {no_match, Result},Index}} 139 | end 140 | end. 141 | 142 | p_assert(P) -> 143 | fun(Input,Index) -> 144 | case P(Input,Index) of 145 | {fail,_} = Failure-> Failure; 146 | _ -> {[], Input, Index} 147 | end 148 | end. 149 | 150 | p_and(P) -> 151 | p_seq(P). 152 | 153 | p_seq(P) -> 154 | fun(Input, Index) -> 155 | p_all(P, Input, Index, []) 156 | end. 157 | 158 | p_all([], Inp, Index, Accum ) -> {lists:reverse( Accum ), Inp, Index}; 159 | p_all([P|Parsers], Inp, Index, Accum) -> 160 | case P(Inp, Index) of 161 | {fail, _} = Failure -> Failure; 162 | {Result, InpRem, NewIndex} -> p_all(Parsers, InpRem, NewIndex, [Result|Accum]) 163 | end. 164 | 165 | p_choose(Parsers) -> 166 | fun(Input, Index) -> 167 | p_attempt(Parsers, Input, Index, none) 168 | end. 169 | 170 | p_attempt([], _Input, _Index, Failure) -> Failure; 171 | p_attempt([P|Parsers], Input, Index, FirstFailure)-> 172 | case P(Input, Index) of 173 | {fail, _} = Failure -> 174 | case FirstFailure of 175 | none -> p_attempt(Parsers, Input, Index, Failure); 176 | _ -> p_attempt(Parsers, Input, Index, FirstFailure) 177 | end; 178 | Result -> Result 179 | end. 180 | 181 | p_zero_or_more(P) -> 182 | fun(Input, Index) -> 183 | p_scan(P, Input, Index, []) 184 | end. 185 | 186 | p_one_or_more(P) -> 187 | fun(Input, Index)-> 188 | Result = p_scan(P, Input, Index, []), 189 | case Result of 190 | {[_|_], _, _} -> 191 | Result; 192 | _ -> 193 | {fail, {expected, Failure, _}} = P(Input,Index), 194 | {fail, {expected, {at_least_one, Failure}, Index}} 195 | end 196 | end. 197 | 198 | p_label(Tag, P) -> 199 | fun(Input, Index) -> 200 | case P(Input, Index) of 201 | {fail,_} = Failure -> 202 | Failure; 203 | {Result, InpRem, NewIndex} -> 204 | {{Tag, Result}, InpRem, NewIndex} 205 | end 206 | end. 207 | 208 | p_scan(_, [], Index, Accum) -> {lists:reverse( Accum ), [], Index}; 209 | p_scan(P, Inp, Index, Accum) -> 210 | case P(Inp, Index) of 211 | {fail,_} -> {lists:reverse(Accum), Inp, Index}; 212 | {Result, InpRem, NewIndex} -> p_scan(P, InpRem, NewIndex, [Result | Accum]) 213 | end. 214 | 215 | p_string(S) -> 216 | fun(Input, Index) -> 217 | case lists:prefix(S, Input) of 218 | true -> {S, lists:sublist(Input, length(S)+1, length(Input)), p_advance_index(S,Index)}; 219 | _ -> {fail, {expected, {string, S}, Index}} 220 | end 221 | end. 222 | 223 | p_anything() -> 224 | fun([], Index) -> {fail, {expected, any_character, Index}}; 225 | ([H|T], Index) -> {H, T, p_advance_index(H, Index)} 226 | end. 227 | 228 | p_charclass(Class) -> 229 | fun(Inp, Index) -> 230 | {ok, RE} = re:compile("^"++Class), 231 | case re:run(Inp, RE) of 232 | {match, _} -> 233 | {hd(Inp), tl(Inp), p_advance_index(hd(Inp), Index)}; 234 | _ -> {fail,{expected, {character_class, Class}, Index}} 235 | end 236 | end. 237 | 238 | line({{line,L},_}) -> L; 239 | line(_) -> undefined. 240 | 241 | column({_,{column,C}}) -> C; 242 | column(_) -> undefined. 243 | 244 | p_advance_index(MatchedInput, Index) when is_list(MatchedInput) -> % strings 245 | lists:foldl(fun p_advance_index/2, Index, MatchedInput); 246 | p_advance_index(MatchedInput, Index) when is_integer(MatchedInput) -> % single characters 247 | {{line, Line}, {column, Col}} = Index, 248 | case MatchedInput of 249 | $\n -> {{line, Line+1}, {column, 1}}; 250 | _ -> {{line, Line}, {column, Col+1}} 251 | end. 252 | -------------------------------------------------------------------------------- /src/engine.app.src: -------------------------------------------------------------------------------- 1 | %%-*- mode: erlang -*- 2 | {application, engine, 3 | [ 4 | {description, "engine"}, 5 | {vsn, "1"}, 6 | {modules, []}, 7 | {registered, []}, 8 | {applications, [ 9 | kernel, 10 | stdlib, 11 | inets, 12 | ibrowse, 13 | crypto, 14 | mochiweb, 15 | webmachine 16 | ]}, 17 | {mod, { engine_app, []}}, 18 | {env, []} 19 | ]}. 20 | -------------------------------------------------------------------------------- /src/engine.erl: -------------------------------------------------------------------------------- 1 | %% @author Georgios Koutsoumpakis, Li Hao 2 | %% [www.csproj13.student.it.uu.se] 3 | %% @version 1.0 4 | %% @copyright [Copyright information] 5 | 6 | %% @doc engine startup code 7 | 8 | -module(engine). 9 | -author('author '). 10 | -export([start/0, start_link/0, stop/0]). 11 | 12 | ensure_started(App) -> 13 | case application:start(App) of 14 | ok -> ok; 15 | {error, {already_started, App}} -> ok 16 | end. 17 | 18 | %% @spec start_link() -> {ok,Pid::pid()} 19 | %% @doc Starts the app for inclusion in a supervisor tree 20 | start_link() -> 21 | ensure_started(crypto), 22 | ensure_started(as1), 23 | ensure_started(inets), 24 | ensure_started(ibrowse), 25 | ensure_started(public_key), 26 | ensure_started(ssl), 27 | ensure_started(xmerl), 28 | ensure_started(compiler), 29 | ensure_started(syntax_tools), 30 | ensure_started(mochiweb), 31 | application:set_env(webmachine, webmachine_logger_module, webmachine_logger), 32 | ensure_started(webmachine), 33 | engine_sup:start_link(). 34 | 35 | %% @spec start() -> ok 36 | %% @doc Start the engine server. 37 | start() -> 38 | ensure_started(crypto), 39 | ensure_started(asn1), 40 | ensure_started(inets), 41 | ensure_started(ibrowse), 42 | ensure_started(public_key), 43 | ensure_started(ssl), 44 | ensure_started(xmerl), 45 | ensure_started(compiler), 46 | ensure_started(syntax_tools), 47 | ensure_started(mochiweb), 48 | application:set_env(webmachine, webmachine_logger_module, webmachine_logger), 49 | ensure_started(webmachine), 50 | application:start(engine). 51 | 52 | %% @spec stop() -> ok 53 | %% @doc Stop the engine server 54 | stop() -> 55 | Res = application:stop(engine), 56 | application:stop(webmachine), 57 | application:stop(mochiweb), 58 | application:stop(syntax_tools), 59 | application:stop(compiler), 60 | application:stop(xmerl), 61 | application:stop(ssl), 62 | application:stop(asn1), 63 | application:stop(crypto), 64 | application:stop(public_key), 65 | application:stop(ibrowse), 66 | application:stop(inets), 67 | stop_if_running(polling_monitor), 68 | stop_if_running(polling_supervisor), 69 | stop_if_running(vstream_sup), 70 | Res. 71 | 72 | %% @spec stop() -> ok 73 | %% @doc Stop the given module if running using the PID 74 | stop_if_running(Atom) -> 75 | case whereis(Atom) of 76 | undefined -> ok; 77 | Pid -> exit(Pid, "stop") 78 | end. 79 | -------------------------------------------------------------------------------- /src/engine_app.erl: -------------------------------------------------------------------------------- 1 | %% @author Georgios Koutsoumpakis 2 | %% [www.csproj13.student.it.uu.se] 3 | %% @version 1.0 4 | %% @copyright [Copyright information] 5 | 6 | %% @doc Callbacks for the engine application. 7 | 8 | -module(engine_app). 9 | -author('author '). 10 | 11 | -behaviour(application). 12 | -export([start/2,stop/1]). 13 | 14 | -include_lib("amqp_client.hrl"). 15 | 16 | 17 | % %% @spec start(_Type, _StartArgs) -> ServerRet 18 | % %% @doc application start callback for engine. 19 | % start(_Type, _StartArgs) -> 20 | % erlastic_search_app:start(), 21 | % engine_sup:start_link(). 22 | 23 | % %% @spec stop(_State) -> ServerRet 24 | % %% @doc application stop callback for engine. 25 | % stop(_State) -> 26 | % ok. 27 | 28 | %% @spec start(_Type, _StartArgs) -> ServerRet 29 | %% @doc application start callback for engine. 30 | start(_Type, _StartArgs) -> 31 | erlastic_search_app:start(), 32 | singleton:start(), 33 | {ok,Connection} = amqp_connection:start(#amqp_params_network{host = "localhost"}), 34 | singleton:set(Connection), 35 | engine_sup:start_link(). 36 | 37 | %% @spec stop(_State) -> ServerRet 38 | %% @doc application stop callback for engine. 39 | stop(_State) -> 40 | {ok, Connection } = singleton:get(), 41 | ok = amqp_connection:close(Connection), 42 | ok. 43 | -------------------------------------------------------------------------------- /src/engine_sup.erl: -------------------------------------------------------------------------------- 1 | %% @author Georgios Koutsoumpakis 2 | %% [www.csproj13.student.it.uu.se] 3 | %% @version 1.0 4 | %% @copyright [Copyright information] 5 | 6 | %% @doc Supervisor for the engine application. 7 | 8 | -module(engine_sup). 9 | -author('author '). 10 | 11 | -behaviour(supervisor). 12 | 13 | %% External exports 14 | -export([start_link/0, upgrade/0]). 15 | 16 | %% supervisor callbacks 17 | -export([init/1]). 18 | 19 | %% @spec start_link() -> ServerRet 20 | %% @doc API for starting the supervisor. 21 | start_link() -> 22 | supervisor:start_link({local, ?MODULE}, ?MODULE, []). 23 | 24 | %% @spec upgrade() -> ok 25 | %% @doc Add processes if necessary. 26 | upgrade() -> 27 | {ok, {_, Specs}} = init([]), 28 | 29 | Old = sets:from_list( 30 | [Name || {Name, _, _, _} <- supervisor:which_children(?MODULE)]), 31 | New = sets:from_list([Name || {Name, _, _, _, _, _} <- Specs]), 32 | Kill = sets:subtract(Old, New), 33 | 34 | sets:fold(fun (Id, ok) -> 35 | supervisor:terminate_child(?MODULE, Id), 36 | supervisor:delete_child(?MODULE, Id), 37 | ok 38 | end, ok, Kill), 39 | 40 | [supervisor:start_child(?MODULE, Spec) || Spec <- Specs], 41 | ok. 42 | 43 | %% @spec init([]) -> SupervisorTree 44 | %% @doc supervisor callback. 45 | init([]) -> 46 | Ip = case os:getenv("WEBMACHINE_IP") of false -> "0.0.0.0"; Any -> Any end, 47 | {ok, App} = application:get_application(?MODULE), 48 | {ok, Dispatch} = file:consult(filename:join([priv_dir(App), 49 | "dispatch.conf"])), 50 | Port = case application:get_env(engine, webmachine_port) of 51 | undefined -> 8000; %Default port for webmachine 52 | {ok, AnyPort} -> AnyPort 53 | end, 54 | LogDir = case application:get_env(engine, webmachine_log_dir) of 55 | undefined -> "priv/log"; 56 | {ok, AnyDir} -> AnyDir 57 | end, 58 | 59 | WebConfig = [ 60 | {ip, Ip}, 61 | {port, Port}, 62 | {log_dir, LogDir}, 63 | {dispatch, Dispatch}], 64 | Web = {webmachine_mochiweb, 65 | {webmachine_mochiweb, start, [WebConfig]}, 66 | permanent, 5000, worker, [mochiweb_socket_server]}, 67 | polling_system:start_link(), 68 | virtual_stream_process_supervisor:start_link(), 69 | virtual_stream_process_supervisor:start_processes(), 70 | triggers:start_all_triggers_in_es(), 71 | Processes = [Web], 72 | {ok, { {one_for_one, 10, 10}, Processes} }. 73 | 74 | %% 75 | %% @doc return the priv dir 76 | priv_dir(Mod) -> 77 | case code:priv_dir(Mod) of 78 | {error, bad_name} -> 79 | Ebin = filename:dirname(code:which(Mod)), 80 | filename:join(filename:dirname(Ebin), "priv"); 81 | PrivDir -> 82 | PrivDir 83 | end. 84 | -------------------------------------------------------------------------------- /src/groups.erl: -------------------------------------------------------------------------------- 1 | 2 | -module(groups). 3 | -export([init/1, 4 | allowed_methods/2, 5 | exists/3, 6 | content_types_accepted/2, 7 | put_handler/2, 8 | put_group/2]). 9 | 10 | -include_lib("webmachine.hrl"). 11 | -include_lib("erlastic_search.hrl"). 12 | 13 | 14 | %% Index and Type definitions 15 | 16 | -define(INDEX, "cloud"). 17 | -define(GROUP, "group"). 18 | -define(USER, "user"). 19 | -define(STREAM, "stream"). 20 | 21 | %% @doc 22 | %% Function: init/1 23 | %% Purpose: init function used to fetch path information from webmachine dispatcher. 24 | %% Returns: {ok, undefined} 25 | %% @end 26 | -spec init([]) -> {ok, undefined}. 27 | init([]) -> 28 | %% start this in the make file somehow 29 | {ok, undefined}. 30 | 31 | %% @doc 32 | %% Function: allowed_methods/2 33 | %% Purpose: init function used to fetch path information from webmachine dispatcher. 34 | %% Returns: {ok, undefined} 35 | %% @end 36 | 37 | allowed_methods(ReqData, State) -> 38 | erlang:display(api_help:parse_path(wrq:path(ReqData))), 39 | case api_help:parse_path(wrq:path(ReqData)) of 40 | [{"users", _UserID}, {"groups", "_search"}] -> 41 | {['GET'], ReqData, State}; 42 | [{"users", _UserID}, {"groups", _GroupID}] -> 43 | {['POST','GET','PUT','DELETE'], ReqData, State}; 44 | [{"users", _UserID}, {"groups"}] -> 45 | {['GET'], ReqData, State}; 46 | [{"groups", _GroupID}] -> 47 | {['GET','PUT'], ReqData, State}; 48 | [{"groups"}] -> 49 | {['GET'], ReqData, State}; 50 | [error] -> 51 | {['POST', 'GET'], ReqData, State} % Probably should give som error message 52 | end. 53 | 54 | %% @doc 55 | %% Function exists/3 56 | %% Purpose: Given a index, type and Id corresponding to elasticsearch, 57 | %% this function checks the existence of a document. 58 | %% Returns: true | false 59 | %% @end 60 | 61 | exists(Index, Type, Id) -> 62 | case erlastic_search:get_doc(Index, Type, Id) of 63 | {ok, _Data} -> true; 64 | {error, _Error} -> false 65 | end. 66 | 67 | %% @doc 68 | %% Function: content_types_accepted/2 69 | %% Purpose: based on the content-type on a 'POST' or 'PUT', we know which kind of data that is 70 | %% allowed to be sent to the server. 71 | %% A code 406 is returned to the client if we don't accept a media type that the client has sent. 72 | %% Returns: {[{Mediatype, Handler}], ReqData, State} 73 | %% @end 74 | content_types_accepted(ReqData, State) -> 75 | {[{"application/json", put_handler}], ReqData, State}. 76 | 77 | %% @doc 78 | %% Function: put_handler/2 79 | %% Purpose: Checks to see if the user id and/or group id exists in elasticsearch 80 | %% before updating the group document. 81 | %% Returns : {true, ReqData, State} | {{error, Reason}, ReqData, State} | {error, ReqData, State} 82 | 83 | 84 | put_handler(ReqData, State) -> 85 | case api_help:parse_path(wrq:path(ReqData)) of 86 | [{"users", UserID}, {"groups", "_search"}] -> 87 | case exists(?INDEX, ?USER, UserID) of 88 | true -> put_group(ReqData, State); 89 | false -> {{error, "User ID does not exist!"}, ReqData, State} 90 | end; 91 | [{"users", UserID}, {"groups", GroupID}] -> 92 | User_cond = exists(?INDEX, ?USER, UserID), 93 | Group_cond = exists(?INDEX, ?GROUP, GroupID), 94 | erlang:display("User : "++User_cond), 95 | erlang:display("Group : "++Group_cond), 96 | case (User_cond =:= true) and (Group_cond =:= true) of 97 | true -> put_group(ReqData, State), 98 | {true, ReqData, State}; 99 | false -> {{error, "User / Group ID does not exist!"}, ReqData, State} 100 | end; 101 | [{"users", UserID}, {"groups"}] -> 102 | case exists(?INDEX, ?USER, UserID) of 103 | true -> put_group(ReqData, State), 104 | {true, ReqData, State}; 105 | false -> {{error, "User ID does not exist!"}, ReqData, State} 106 | end; 107 | [{"groups", GroupID}] -> 108 | case exists(?INDEX, ?GROUP, GroupID) of 109 | true -> put_group(ReqData, State), 110 | {true, ReqData, State}; 111 | false -> {{error, "Group ID does not exist!"}, ReqData, State} 112 | end; 113 | [error] -> {error, ReqData, State} 114 | end. 115 | 116 | %% @doc 117 | %% Function: put_group/2 118 | %% Purpose: updates / replaces fields values in a group document 119 | %% with the data contained in the request body. 120 | %% Side-Effect : The new data will replace the existing data. 121 | 122 | put_group(ReqData, State) -> 123 | GroupId = proplists:get_value(group, wrq:path_info(ReqData)), 124 | {ReqBody,_,_} = api_help:json_handler(ReqData,State), 125 | Update = lib_json:set_attr(doc, ReqBody), 126 | case api_help:update_doc(?INDEX, ?GROUP, GroupId, Update) of 127 | {error,Reason} -> {{error,Reason}, ReqData, State}; 128 | {ok,List} -> {List,ReqData,State} 129 | end. 130 | 131 | 132 | -------------------------------------------------------------------------------- /src/lib_file.erl: -------------------------------------------------------------------------------- 1 | %% @author Tommy Mattsson [www.csproj13.student.it.uu.se] 2 | %% @copyright [Copyright information] 3 | %% @version 1.0 4 | %% @doc == Library for manipulating files and directories == 5 | %% @end 6 | -module(lib_file). 7 | -include("debug.hrl"). 8 | -export([ensure_dir_exists/1, read_file_lines/1, write_file_lines/2]). 9 | 10 | %% @doc 11 | %% Ensures a directory exists. 12 | %% @end 13 | -spec ensure_dir_exists(Dir::string()) -> ok | error. 14 | ensure_dir_exists(AbsoluteDir = "/"++_Dir) -> 15 | User = os:cmd("echo $USER")--"\n", 16 | case User of 17 | "root" -> 18 | os:cmd("mkdir -p "++AbsoluteDir), 19 | ok; 20 | User -> 21 | case re:run("^/home/"++User, AbsoluteDir, [{capture, none}]) of 22 | nomatch -> 23 | case file:list_dir(AbsoluteDir) of 24 | {ok, _FileList} -> 25 | ?DEBUG("Directory '" ++ AbsoluteDir ++ "' exists."), 26 | ok; 27 | {error, _Reason} -> 28 | ?DEBUG("Cannot create directory '"++AbsoluteDir++"' without sudo rights. Rerun with sudo access"), 29 | error 30 | end; 31 | match -> 32 | os:cmd("mkdir -p " ++ AbsoluteDir), 33 | ok 34 | end 35 | end; 36 | ensure_dir_exists(RelativeDir) -> 37 | os:cmd("mkdir -p " ++ RelativeDir), 38 | ok. 39 | 40 | 41 | 42 | 43 | %% @doc 44 | %% Read all lines from a file. 45 | %% @end 46 | -spec read_file_lines(File::string()) -> [string()] | error. 47 | read_file_lines(File) -> 48 | case file:open(File, [read]) of 49 | {ok, Fd} -> 50 | Lines = read_file_lines(Fd, []), 51 | file:close(Fd), 52 | Lines; 53 | _ -> 54 | ?ERROR("Unable to open file: "++File), 55 | error 56 | end. 57 | 58 | %% @doc 59 | %% Read all lines from a file. Used by read_file_lines/1 60 | %% @end 61 | -spec read_file_lines(Fd::pid(), Acc::list()) -> [string()]. 62 | read_file_lines(Fd, Acc) -> 63 | case file:read_line(Fd) of 64 | eof -> 65 | lists:reverse(Acc); 66 | {ok, Line} -> 67 | read_file_lines(Fd, [Line | Acc]) 68 | end. 69 | 70 | %% @doc 71 | %% Writes all lines from a file. If the file exists then all of the old content is removed and Lines will be inserted instead. 72 | %% @end 73 | -spec write_file_lines(Fd::pid(), Lines::[string()]) -> ok | error. 74 | write_file_lines(File, Lines) -> 75 | case file:open(File, [write]) of 76 | {ok, Fd} -> 77 | file:write(Fd, Lines), 78 | file:close(Fd); 79 | _ -> 80 | ?ERROR("Unable to open file: "++File), 81 | error 82 | end. 83 | -------------------------------------------------------------------------------- /src/parser.erl: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/EricssonResearch/iot-framework-engine/106636d2f3b43698ae8c07bbe29d7d25c3e7383e/src/parser.erl -------------------------------------------------------------------------------- /src/poll_help.erl: -------------------------------------------------------------------------------- 1 | %% @author Gabriel Tholsg�rd, Li Hao 2 | %% [www.csproj13.student.it.uu.se] 3 | %% @version 1.0 4 | %% @copyright [Copyright information] 5 | %% 6 | %% @doc == poll_help == 7 | %% This module contains helper functions needed for the polling system 8 | %% 9 | %% @end 10 | 11 | -module(poll_help). 12 | 13 | -include("common.hrl"). 14 | -include_lib("erlastic_search.hrl"). 15 | -include("erlson.hrl"). 16 | -include("json.hrl"). 17 | -include("poller.hrl"). 18 | -include("pubsub.hrl"). 19 | -include("field_restrictions.hrl"). 20 | 21 | -export([get_streams_using_polling/0, 22 | json_to_record_streams/1, 23 | json_to_record_stream/1, 24 | create_poller_history/1, 25 | add_success/1, 26 | add_failed/2]). 27 | 28 | 29 | 30 | 31 | %% ==================================================================== 32 | %% API functions 33 | %% ==================================================================== 34 | 35 | %% @doc 36 | %% Function: get_streams_using_polling/0 37 | %% Purpose: Retrieves all streams from Elastic Search that are using polling. 38 | %% Returns: [] | [Stream, ... ] | {error, Reason}. 39 | %% @end 40 | -spec get_streams_using_polling() -> [] | [json_string()] | {error, term()}. 41 | get_streams_using_polling() -> 42 | JsonQuery = "{\"size\":10000, \"query\": {\"term\":{\"polling\":true}}, "++ 43 | "\"filter\": {\"exists\": {\"field\":\"uri\"}}}", 44 | 45 | case erlastic_search:search_json(#erls_params{}, 46 | ?ES_INDEX, 47 | "stream", 48 | JsonQuery) of 49 | {error, Reason} -> {error, Reason}; 50 | {ok, Result} -> 51 | lib_json:get_field(Result, "hits.hits") 52 | end. 53 | 54 | 55 | 56 | 57 | %% @doc 58 | %% Function: json_to_record_streams/1 59 | %% Purpose: Converts a list of stream Jsons to a list of pollerInfo records 60 | %% Returns: [] | [Stream, ...] 61 | %% @end 62 | -spec json_to_record_streams([json_string()]) -> [] | [record()]. 63 | json_to_record_streams([]) -> []; 64 | json_to_record_streams([H|T]) -> 65 | [json_to_record_stream(H) | json_to_record_streams(T)]. 66 | 67 | 68 | 69 | 70 | %% @doc 71 | %% Function: json_to_record_stream/1 72 | %% Purpose: Converts a stream Json to a pollerInfo record 73 | %% Returns: #pollerInfo{} 74 | %% @end 75 | -spec json_to_record_stream(Stream::json_string()) -> record(). 76 | json_to_record_stream(Stream) -> 77 | Name = case lib_json:get_field(Stream, "_source.name") of 78 | undefined -> undefined; 79 | N -> binary_to_list(N) 80 | end, 81 | Uri = case lib_json:get_field(Stream, "_source.uri") of 82 | undefined -> undefined; 83 | U -> binary_to_list(U) 84 | end, 85 | DataType = case lib_json:get_field(Stream, "_source.data_type") of 86 | undefined -> undefined; 87 | D -> binary_to_list(D) 88 | end, 89 | ParserString = case lib_json:get_field(Stream, "_source.parser") of 90 | undefined -> undefined; 91 | P -> binary_to_list(P) 92 | end, 93 | #pollerInfo{stream_id = binary_to_list(lib_json:get_field(Stream, "_id")), 94 | name = Name, 95 | uri = Uri, 96 | frequency = lib_json:get_field(Stream, "_source.polling_freq"), 97 | data_type = DataType, 98 | parser = ParserString 99 | }. 100 | 101 | %% @doc 102 | %% Function: create_poller_history/1 103 | %% Purpose: creates an empty polling history for 104 | %% the given stream 105 | %% Returns: ok or {Code,Body} if there was an error in ES 106 | %% @end 107 | -spec create_poller_history(StreamId::string()) -> ok | {integer(),string()}. 108 | 109 | create_poller_history(StreamId) -> 110 | NewHistory = lib_json:set_attrs([{"history","[]"}]), 111 | case erlastic_search:index_doc_with_id(?INDEX, "pollinghistory", StreamId, NewHistory) of 112 | {error,{Code,Body}} -> 113 | {Code,Body}; 114 | {ok,_List} -> 115 | ok 116 | end. 117 | create_poller_history(StreamId, Message) -> 118 | NewHistory = lib_json:set_attrs([{"history","["++Message++"]"}]), 119 | case erlastic_search:index_doc_with_id(?INDEX, "pollinghistory", StreamId, NewHistory) of 120 | {error,{Code,Body}} -> 121 | {Code,Body}; 122 | {ok,_List} -> 123 | ok 124 | end. 125 | 126 | %% @doc 127 | %% Function: add_failed/1 128 | %% Purpose: Updates the polling history with an error message 129 | %% Returns: ok or {error,{Code,Body}} if there was an error in ES 130 | %% @end 131 | -spec add_failed(StreamId::string(),Error::atom()) -> ok | {atom(),{integer(),string()}}. 132 | 133 | add_failed(StreamId,connection_error) -> 134 | Time = ?TIME_NOW(erlang:localtime()), 135 | UserId = case erlastic_search:get_doc(?INDEX, "stream", StreamId) of 136 | {error, Reason} -> 137 | error; 138 | {ok,List} -> 139 | lib_json:get_field(List, "_source.user_id") 140 | end, 141 | Message = lib_json:set_attrs([{"polling","{}"},{"polling.stream",list_to_binary(StreamId)},{"polling.action",list_to_binary("error")},{"polling.message",list_to_binary("Connection Error")},{"polling.timestamp",list_to_binary(Time)}]), 142 | UpdateJson = "{\"script\":\"ctx._source.notifications += msg\",\"params\":{\"msg\":"++ Message ++"}}", 143 | case api_help:update_doc(?INDEX, "user", UserId, UpdateJson, []) of 144 | {error, {Code, Body}} -> 145 | {error, {Code, Body}}; 146 | {ok, Response} -> 147 | ok 148 | end, 149 | UpdateJson2 = "{\"script\":\"if (ctx._source.history.size() == 100){ctx._source.history.remove((Object) ctx._source.history[0]);ctx._source.history += msg}{ctx._source.history += msg} \",\"params\":{\"msg\":"++ Message ++"}}", 150 | case api_help:update_doc(?INDEX, "pollinghistory", StreamId, UpdateJson2, []) of 151 | {error, {404, Body2}} -> create_poller_history(StreamId, Message); 152 | {error, {Code2, Body2}} -> 153 | erlang:display("Error when updateing pollinghistory for " ++ StreamId), 154 | {error, {Code2, Body2}}; 155 | {ok, Response2} -> 156 | ok 157 | end; 158 | 159 | add_failed(StreamId,elasticsearch_error) -> 160 | Time = ?TIME_NOW(erlang:localtime()), 161 | UserId = case erlastic_search:get_doc(?INDEX, "stream", StreamId) of 162 | {error, Reason} -> 163 | error; 164 | {ok,List} -> 165 | lib_json:get_field(List, "_source.user_id") 166 | end, 167 | Message = lib_json:set_attrs([{"polling","{}"},{"polling.stream",list_to_binary(StreamId)},{"polling.message",list_to_binary("Could not save datapoint")},{"polling.action",list_to_binary("error")},{"polling.timestamp",list_to_binary(Time)}]), 168 | UpdateJson = "{\"script\":\"ctx._source.notifications += msg\",\"params\":{\"msg\":"++ Message ++"}}", 169 | case api_help:update_doc(?INDEX, "user", UserId, UpdateJson, []) of 170 | {error, {Code, Body}} -> 171 | {error, {Code, Body}}; 172 | {ok, Response} -> 173 | ok 174 | end, 175 | UpdateJson2 = "{\"script\":\"if (ctx._source.history.size() == 100){ctx._source.history.remove((Object) ctx._source.history[0]);ctx._source.history += msg}{ctx._source.history += msg} \",\"params\":{\"msg\":"++ Message ++"}}", 176 | case api_help:update_doc(?INDEX, "pollinghistory", StreamId, UpdateJson2, []) of 177 | {error, {404, Body2}} -> create_poller_history(StreamId, Message); 178 | {error, {Code2, Body2}} -> 179 | erlang:display("Error when updateing pollinghistory for " ++ StreamId), 180 | {error, {Code2, Body2}}; 181 | {ok, Response2} -> 182 | ok 183 | end. 184 | 185 | 186 | %% @doc 187 | %% Function: add_success/1 188 | %% Purpose: Updates the polling history with a created datapoint message 189 | %% Returns: ok or {error,{Code,Body}} if there was an error in ES 190 | %% @end 191 | -spec add_success(StreamId::string()) -> ok | {atom(),{integer(),string()}}. 192 | 193 | add_success(StreamId) -> 194 | Time = ?TIME_NOW(erlang:localtime()), 195 | Message = lib_json:set_attrs([{"polling","{}"},{"polling.stream",list_to_binary(StreamId)},{"polling.message",list_to_binary("Created new datapoint")},{"polling.action",list_to_binary("create")},{"polling.timestamp",list_to_binary(Time)}]), 196 | UpdateJson = "{\"script\":\"if (ctx._source.history.size() == 100){ctx._source.history.remove((Object) ctx._source.history[0]);ctx._source.history += msg}{ctx._source.history += msg} \",\"params\":{\"msg\":"++ Message ++"}}", 197 | case api_help:update_doc(?INDEX, "pollinghistory", StreamId, UpdateJson, []) of 198 | {error, {404, Body2}} -> create_poller_history(StreamId, Message); 199 | {error, {Code, Body}} -> 200 | erlang:display("Error when updateing pollinghistory for " ++ StreamId), 201 | {error, {Code, Body}}; 202 | {ok, Response} -> 203 | ok 204 | end. 205 | 206 | %% ==================================================================== 207 | %% Internal functions 208 | %% ==================================================================== 209 | 210 | 211 | 212 | 213 | 214 | 215 | 216 | 217 | 218 | 219 | 220 | 221 | 222 | 223 | 224 | 225 | 226 | 227 | -------------------------------------------------------------------------------- /src/poller.erl: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/EricssonResearch/iot-framework-engine/106636d2f3b43698ae8c07bbe29d7d25c3e7383e/src/poller.erl -------------------------------------------------------------------------------- /src/polling_monitor.erl: -------------------------------------------------------------------------------- 1 | %% @author Li Hao 2 | 3 | %% [www.csproj13.student.it.uu.se] 4 | %% @version 1.0 5 | %% @copyright [Copyright information] 6 | %% 7 | %% @doc == polling_monitor == 8 | %% this module implements a supervisor of pollers, when one poller crashes for some reason, this monitor could restart it 9 | %% automaticaly. 10 | %% @end 11 | 12 | %% more information about supervisor framework 13 | %% could be seen here: http://learnyousomeerlang.com/supervisors 14 | %% http://www.erlang.org/doc/man/supervisor.html 15 | 16 | -module(polling_monitor). 17 | -include("state.hrl"). 18 | -behaviour(supervisor). 19 | 20 | %% ==================================================================== 21 | %% API functions 22 | %% ==================================================================== 23 | -export([start_link/0, init/1]). 24 | 25 | %% @doc 26 | %% Function: start_link/0 27 | %% Purpose: start function used to generate the polling_monitor process, and will call init/1 function to initialize. 28 | %% Returns: {already_started, pid()} | {shutdown, term()} | {ok, pid()} 29 | %% @end 30 | -spec start_link() -> {ok, pid()} | {already_started, pid()} | {shutdown, term()}. 31 | start_link()-> 32 | supervisor:start_link({local, ?MODULE}, ?MODULE, []). 33 | 34 | %% @doc 35 | %% Function: init/1 36 | %% Purpose: init function used to initialize the polling_monitor, will called by supervisor:start_link() 37 | %% Returns: {ok, {configuration of the supervisor, specifications of the children}} 38 | %% @end 39 | -spec init(term()) -> {ok,{{RestartStrategy,MaxR,MaxT},[ChildSpec]}} | ignore when 40 | RestartStrategy :: term(), 41 | MaxR :: integer(), 42 | MaxT :: integer(), 43 | ChildSpec :: tuple(). 44 | init(_)-> 45 | {ok, {{simple_one_for_one, 5, 60}, 46 | [{poller, {poller, start_link, []}, transient, 1000, worker, [poller]}] 47 | }}. -------------------------------------------------------------------------------- /src/polling_system.erl: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/EricssonResearch/iot-framework-engine/106636d2f3b43698ae8c07bbe29d7d25c3e7383e/src/polling_system.erl -------------------------------------------------------------------------------- /src/pubsub/gen_virtual_stream_process.erl: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/EricssonResearch/iot-framework-engine/106636d2f3b43698ae8c07bbe29d7d25c3e7383e/src/pubsub/gen_virtual_stream_process.erl -------------------------------------------------------------------------------- /src/pubsub/resourceProcessMock.erl: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/EricssonResearch/iot-framework-engine/106636d2f3b43698ae8c07bbe29d7d25c3e7383e/src/pubsub/resourceProcessMock.erl -------------------------------------------------------------------------------- /src/pubsub/streamProcess.erl: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/EricssonResearch/iot-framework-engine/106636d2f3b43698ae8c07bbe29d7d25c3e7383e/src/pubsub/streamProcess.erl -------------------------------------------------------------------------------- /src/pubsub/virtual_stream_process_supervisor.erl: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/EricssonResearch/iot-framework-engine/106636d2f3b43698ae8c07bbe29d7d25c3e7383e/src/pubsub/virtual_stream_process_supervisor.erl -------------------------------------------------------------------------------- /src/scoring.erl: -------------------------------------------------------------------------------- 1 | %% @author Tommy Mattsson, Georgios Koutsoumpakis 2 | %% [www.csproj13.student.it.uu.se] 3 | %% @version 1.0 4 | %% @headerfile "json.hrl" 5 | %% @copyright [Copyright information] 6 | %% @doc == Library for calculation scores (for suggestions) == 7 | %% @end 8 | 9 | -module(scoring). 10 | -include("json.hrl"). 11 | 12 | -export([calc/1, 13 | calc/2 14 | ]). 15 | 16 | %% ==================================================================== 17 | %% API functions 18 | %% ==================================================================== 19 | 20 | % @doc 21 | % Calculates the number of non "undefined" in the input list. It is used as a scoring 22 | % mechanism for suggestions 23 | % @end 24 | -spec calc(List::list()) -> integer(). 25 | calc(List) when is_list(List)-> 26 | Fun = fun(undefined, Acc) -> Acc; 27 | ("", Acc) -> Acc; 28 | (<<>>, Acc) -> Acc; 29 | (_, Acc) -> Acc+1 30 | end, 31 | lists:foldr(Fun, 0, List). 32 | 33 | 34 | % @doc 35 | % Calculates the score for a given resource. It is used as a scoring 36 | % mechanism for suggestions 37 | % @end 38 | -spec calc(Resource::json(), atom()) -> integer(). 39 | calc(Resource, resource) -> 40 | Manufacturer = lib_json:get_field(Resource, "manufacturer"), 41 | Tags = lib_json:get_field(Resource, "tags"), 42 | Polling_freq = lib_json:get_field(Resource, "polling_freq"), 43 | List = [Manufacturer, Tags, Polling_freq], 44 | calc(List); 45 | calc(Stream, stream) -> 46 | Name = lib_json:get_field(Stream, "name"), 47 | Description = lib_json:get_field(Stream, "description"), 48 | Min_val = lib_json:get_field(Stream, "min_val"), 49 | Max_val = lib_json:get_field(Stream, "max_val"), 50 | Tags = lib_json:get_field(Stream, "tags"), 51 | Type = lib_json:get_field(Stream, "type"), 52 | Accuracy = lib_json:get_field(Stream, "accuracy"), 53 | calc([Name, Description, Min_val, Max_val, Tags, Type, Accuracy]). 54 | 55 | 56 | 57 | 58 | %% ==================================================================== 59 | %% Internal functions 60 | %% ==================================================================== 61 | 62 | 63 | -------------------------------------------------------------------------------- /src/singleton.erl: -------------------------------------------------------------------------------- 1 | -module(singleton). 2 | 3 | -export([get/0, set/1, start/0]). 4 | 5 | -export([loop/1]). 6 | 7 | % spec singleton:get() -> {ok, Value::any()} | not_set 8 | get() -> 9 | ?MODULE ! {get, self()}, 10 | receive 11 | {ok, not_set} -> not_set; 12 | Answer -> Answer 13 | end. 14 | 15 | % spec singleton:set(Value::any()) -> ok 16 | set(Value) -> 17 | ?MODULE ! {set, self(), Value}, 18 | receive 19 | ok -> ok 20 | end. 21 | 22 | start() -> 23 | register(?MODULE, spawn(?MODULE, loop, [not_set])). 24 | 25 | loop(Value) -> 26 | receive 27 | {get, From} -> 28 | From ! {ok, Value}, 29 | loop(Value); 30 | {set, From, NewValue} -> 31 | From ! ok, 32 | loop(NewValue) 33 | end. 34 | -------------------------------------------------------------------------------- /src/static_resource.erl: -------------------------------------------------------------------------------- 1 | %% @author Bryan Fink 2 | %% @author Andy Gross 3 | %% @author Justin Sheehy 4 | %% @copyright 2008-2009 Basho Technologies, Inc. 5 | 6 | -module(static_resource). 7 | -export([init/1]). 8 | -export([allowed_methods/2, 9 | resource_exists/2, 10 | last_modified/2, 11 | content_types_provided/2, 12 | content_types_accepted/2, 13 | delete_resource/2, 14 | post_is_create/2, 15 | create_path/2, 16 | provide_content/2, 17 | accept_content/2, 18 | generate_etag/2]). 19 | 20 | -record(context, {root,response_body=undefined,metadata=[]}). 21 | 22 | -include("webmachine.hrl"). 23 | -include_lib("kernel/include/file.hrl"). 24 | 25 | 26 | init(ConfigProps) -> 27 | {root, Root} = proplists:lookup(root, ConfigProps), 28 | {ok, #context{root=Root}}. 29 | 30 | allowed_methods(ReqData, Context) -> 31 | {['HEAD', 'GET', 'PUT', 'DELETE', 'POST'], ReqData, Context}. 32 | 33 | file_path(_Context, []) -> 34 | false; 35 | file_path(Context, Name) -> 36 | RelName = case hd(Name) of 37 | "/" -> tl(Name); 38 | _ -> Name 39 | end, 40 | filename:join([Context#context.root, RelName]). 41 | 42 | file_exists(Context, Name) -> 43 | NamePath = file_path(Context, Name), 44 | case filelib:is_regular(NamePath) of 45 | true -> 46 | {true, NamePath}; 47 | false -> 48 | false 49 | end. 50 | 51 | resource_exists(ReqData, Context) -> 52 | Path = wrq:disp_path(ReqData), 53 | case file_exists(Context, Path) of 54 | {true, _} -> 55 | {true, ReqData, Context}; 56 | _ -> 57 | case Path of 58 | "p" -> {true, ReqData, Context}; 59 | _ -> {false, ReqData, Context} 60 | end 61 | end. 62 | 63 | maybe_fetch_object(Context, Path) -> 64 | % if returns {true, NewContext} then NewContext has response_body 65 | case Context#context.response_body of 66 | undefined -> 67 | case file_exists(Context, Path) of 68 | {true, FullPath} -> 69 | {ok, Value} = file:read_file(FullPath), 70 | {true, Context#context{response_body=Value}}; 71 | false -> 72 | {false, Context} 73 | end; 74 | _Body -> 75 | {true, Context} 76 | end. 77 | 78 | content_types_provided(ReqData, Context) -> 79 | CT = webmachine_util:guess_mime(wrq:disp_path(ReqData)), 80 | {[{CT, provide_content}], ReqData, 81 | Context#context{metadata=[{'content-type', CT}|Context#context.metadata]}}. 82 | 83 | content_types_accepted(ReqData, Context) -> 84 | CT = case wrq:get_req_header("content-type", ReqData) of 85 | undefined -> "application/octet-stream"; 86 | X -> X 87 | end, 88 | {MT, _Params} = webmachine_util:media_type_to_detail(CT), 89 | {[{MT, accept_content}], ReqData, 90 | Context#context{metadata=[{'content-type', MT}|Context#context.metadata]}}. 91 | 92 | accept_content(ReqData, Context) -> 93 | Path = wrq:disp_path(ReqData), 94 | FP = file_path(Context, Path), 95 | ok = filelib:ensure_dir(FP), 96 | ReqData1 = case file_exists(Context, Path) of 97 | {true, _} -> 98 | ReqData; 99 | _ -> 100 | LOC = "http://" ++ 101 | wrq:get_req_header("host", ReqData) ++ 102 | "/fs/" ++ Path, 103 | wrq:set_resp_header("Location", LOC, ReqData) 104 | end, 105 | Value = wrq:req_body(ReqData1), 106 | case file:write_file(FP, Value) of 107 | ok -> 108 | {true, wrq:set_resp_body(Value, ReqData1), Context}; 109 | Err -> 110 | {{error, Err}, ReqData1, Context} 111 | end. 112 | 113 | post_is_create(ReqData, Context) -> 114 | {true, ReqData, Context}. 115 | 116 | create_path(ReqData, Context) -> 117 | case wrq:get_req_header("slug", ReqData) of 118 | undefined -> {undefined, ReqData, Context}; 119 | Slug -> 120 | case file_exists(Context, Slug) of 121 | {true, _} -> {undefined, ReqData, Context}; 122 | _ -> {Slug, ReqData, Context} 123 | end 124 | end. 125 | 126 | delete_resource(ReqData, Context) -> 127 | case file:delete(file_path( 128 | Context, wrq:disp_path(ReqData))) of 129 | ok -> {true, ReqData, Context}; 130 | _ -> {false, ReqData, Context} 131 | end. 132 | 133 | provide_content(ReqData, Context) -> 134 | case maybe_fetch_object(Context, wrq:disp_path(ReqData)) of 135 | {true, NewContext} -> 136 | Body = NewContext#context.response_body, 137 | {Body, ReqData, Context}; 138 | {false, NewContext} -> 139 | {error, ReqData, NewContext} 140 | end. 141 | 142 | last_modified(ReqData, Context) -> 143 | {true, FullPath} = file_exists(Context, 144 | wrq:disp_path(ReqData)), 145 | LMod = filelib:last_modified(FullPath), 146 | {LMod, ReqData, Context#context{metadata=[{'last-modified', 147 | httpd_util:rfc1123_date(LMod)}|Context#context.metadata]}}. 148 | 149 | hash_body(Body) -> mochihex:to_hex(binary_to_list(crypto:sha(Body))). 150 | 151 | generate_etag(ReqData, Context) -> 152 | case maybe_fetch_object(Context, wrq:disp_path(ReqData)) of 153 | {true, BodyContext} -> 154 | ETag = hash_body(BodyContext#context.response_body), 155 | {ETag, ReqData, 156 | BodyContext#context{metadata=[{etag,ETag}| 157 | BodyContext#context.metadata]}}; 158 | _ -> 159 | {undefined, ReqData, Context} 160 | end. 161 | -------------------------------------------------------------------------------- /src/stream_publisher.erl: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/EricssonResearch/iot-framework-engine/106636d2f3b43698ae8c07bbe29d7d25c3e7383e/src/stream_publisher.erl -------------------------------------------------------------------------------- /src/stream_reciever.erl: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/EricssonResearch/iot-framework-engine/106636d2f3b43698ae8c07bbe29d7d25c3e7383e/src/stream_reciever.erl -------------------------------------------------------------------------------- /src/triggers.erl: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/EricssonResearch/iot-framework-engine/106636d2f3b43698ae8c07bbe29d7d25c3e7383e/src/triggers.erl -------------------------------------------------------------------------------- /src/triggers_lib.erl: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/EricssonResearch/iot-framework-engine/106636d2f3b43698ae8c07bbe29d7d25c3e7383e/src/triggers_lib.erl -------------------------------------------------------------------------------- /src/vs_func_lib.erl: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/EricssonResearch/iot-framework-engine/106636d2f3b43698ae8c07bbe29d7d25c3e7383e/src/vs_func_lib.erl -------------------------------------------------------------------------------- /test/config.spec: -------------------------------------------------------------------------------- 1 | {include, ["../include/", "../lib/erlastic_search/include/", "../lib/erlson/include/", "../lib/rabbitmq-erlang-client/include/", "../lib/webmachine/include/"]}. 2 | {suites, "../test/", virtualStreamProcess_tests_SUITE}. 3 | {logdir, "../test-results/"}. 4 | 5 | -------------------------------------------------------------------------------- /test/datapoints_tests.erl: -------------------------------------------------------------------------------- 1 | %% @author Iakovos Koutsoumpakis 2 | %% [www.csproj13.student.it.uu.se] 3 | %% @version 1.0 4 | %% @copyright [Copyright information] 5 | %% 6 | %% @doc == datapoints_tests == 7 | %% This module contains several tests to test the functionallity 8 | %% in the restful API in users. 9 | %% 10 | %% @end 11 | 12 | -module(datapoints_tests). 13 | -include_lib("eunit/include/eunit.hrl"). 14 | -include_lib("stdlib/include/qlc.hrl"). 15 | -include("debug.hrl"). 16 | 17 | 18 | %% ==================================================================== 19 | %% API functions 20 | %% ==================================================================== 21 | -export([]). 22 | 23 | %% ==================================================================== 24 | %% Internal functions 25 | %% ==================================================================== 26 | 27 | -define(WEBMACHINE_URL, api_help:get_webmachine_url()). 28 | -define(DATAPOINTS_URL, ?WEBMACHINE_URL++"/streams/4/data/"). 29 | -define(TEST_VALUE, "3"). 30 | -define(TEST_TIMESTAMP, "2"). 31 | -define(INDEX, "sensorcloud"). 32 | 33 | %% @doc 34 | %% Function: post_test/0 35 | %% Purpose: Test a post request 36 | %% Returns: ok | {error, term()} 37 | %% 38 | %% @end 39 | -spec post_test() -> ok | {error, term()}. 40 | post_test() -> 41 | erlastic_search:index_doc_with_id(?INDEX,"stream","4","{\"tags\" : \"data_points\"}"), 42 | api_help:refresh(), 43 | Response1 = post_request(?DATAPOINTS_URL, "application/json", 44 | "{\"value\":\"" ++ ?TEST_VALUE ++ "\", \"timestamp\": \"" ++ ?TEST_TIMESTAMP ++ "\"}"), 45 | check_returned_code(Response1, 200), 46 | api_help:refresh(), 47 | erlastic_search:delete_doc(?INDEX,"stream","4"), 48 | ?assertNotMatch({error, "no match"}, get_index_id(?TEST_VALUE, ?TEST_TIMESTAMP)). 49 | 50 | 51 | %% @doc 52 | %% Function: get_existing_datapoint_test/0 53 | %% Purpose: Test a get request for a datapoint that exists, using its Id 54 | %% Returns: ok | {error, term()} 55 | %% 56 | %% @end 57 | -spec get_existing_datapoint_test() -> ok | {error, term()}. 58 | get_existing_datapoint_test() -> 59 | Id = get_index_id(?TEST_VALUE, ?TEST_TIMESTAMP), 60 | ?assertNotMatch({error, "\"no match\""}, Id), 61 | Response1 = get_request(?DATAPOINTS_URL ++ "_search?_id=" ++ Id), 62 | check_returned_code(Response1, 200). 63 | 64 | 65 | %% @doc 66 | %% Function: no_timestamp_test/0 67 | %% Purpose: Test a post request without a timestamp 68 | %% Returns: ok | {error, term()} 69 | %% 70 | %% @end 71 | -spec no_timestamp_test() -> ok | {error, term()}. 72 | no_timestamp_test() -> 73 | erlastic_search:index_doc_with_id(?INDEX,"stream","5","{\"tags\" : \"data_points\"}"), 74 | api_help:refresh(), 75 | Response1 = post_request(?WEBMACHINE_URL++"/streams/5/data/", "application/json", 76 | "{\"value\":\"55\"}"), 77 | check_returned_code(Response1, 200), 78 | api_help:refresh(), 79 | {ok,{_,_,Body}} = httpc:request(get, {?WEBMACHINE_URL++"/streams/5/data/", []}, [], []), 80 | ObjectList = lib_json:get_field(Body,"data"), 81 | ?assertEqual(true, lib_json:get_field(lists:nth(1,ObjectList),"timestamp") =/= undefined). 82 | 83 | %% @doc 84 | %% Function: update_stream_fields_test/0 85 | %% Purpose: Test adding a datapoint and see that the 86 | %% stream is updated 87 | %% Returns: ok | {error, term()} 88 | %% 89 | %% @end 90 | -spec update_stream_fields_test() -> ok | {error, term()}. 91 | update_stream_fields_test() -> 92 | {ok, {{_Version, 200, _ReasonPhrase}, _Headers, Body}} = httpc:request(post, {?WEBMACHINE_URL++ "/users", [],"application/json", "{\"username\" : \"update_stream_user\"}"}, [], []), 93 | UserId = lib_json:get_field(Body,"_id"), 94 | api_help:refresh(), 95 | {ok, {{_Version1, 200, _ReasonPhrase1}, _Headers1, Body1}} = httpc:request(post, {?WEBMACHINE_URL++"/streams", [],"application/json", "{\"name\" : \"search\", \"min_val\": \"0.0\" , \"max_val\": \"1.0\", \"user_id\" : \"update_stream_user\", \"private\" : \"false\"}"}, [], []), 96 | StreamId = lib_json:get_field(Body1,"_id"), 97 | api_help:refresh(), 98 | {ok, {{_Version2, 200, _ReasonPhrase2}, _Headers2, Body2}} = httpc:request(post, {?WEBMACHINE_URL++"/streams/" ++ lib_json:to_string(StreamId) ++ "/data", [],"application/json", "{\"value\":5.0}"}, [], []), 99 | api_help:refresh(), 100 | {ok, {{_Version3, 200, _ReasonPhrase3}, _Headers3, Body3}} = httpc:request(get, {?WEBMACHINE_URL++"/streams/" ++ lib_json:to_string(StreamId), []}, [], []), 101 | {ok, {{_Version4, 200, _ReasonPhrase4}, _Headers4, Body4}} = httpc:request(get, {?WEBMACHINE_URL++"/streams/" ++ lib_json:to_string(StreamId) ++ "/data", []}, [], []), 102 | {ok, {{_Version5, 200, _ReasonPhrase5}, _Headers5, _Body5}} = httpc:request(delete, {?WEBMACHINE_URL++"/users/update_stream_user", []}, [], []), 103 | ?assertEqual(lib_json:get_field(Body3,"last_updated"), lib_json:get_field(Body4,"data[0].timestamp")). 104 | 105 | %% @doc 106 | %% Function: get_index_id/0 107 | %% Purpose: Searches the ES and returns the _id of a datapoint 108 | %% Returns: string() | {error, string()} 109 | %% 110 | %% @end 111 | -spec get_index_id(string(), string()) -> string() | {error, string()}. 112 | get_index_id(Uvalue, Uvalue2) -> 113 | Response1 = get_request(?DATAPOINTS_URL ++ "_search?value=" ++ Uvalue ++ "×tamp=" ++ Uvalue2), 114 | check_returned_code(Response1, 200), 115 | {ok, {_,_,A}} = Response1, 116 | case re:run(A, "id\":\"[^\"]*", [{capture, first, list}]) of 117 | {match, ["id\":\"" ++ Id]} -> Id; 118 | nomatch -> {error, "no match"} 119 | end. 120 | 121 | 122 | %% @doc 123 | %% Function: check_returned_code/0 124 | %% Purpose: Checks if the Response has the correct http return code 125 | %% 126 | %% @end 127 | -spec check_returned_code(string(), integer()) -> ok. 128 | check_returned_code(Response, Code) -> 129 | {ok, Rest} = Response, 130 | {Header,_,_} = Rest, 131 | ?assertMatch({_, Code, _}, Header). 132 | 133 | 134 | %% @doc 135 | %% Function: get_non_existent_user_datapoint/0 136 | %% Purpose: Tests a get request for a datapoint that doesn't exist 137 | %% Returns: ok | {error, term()} 138 | %% 139 | %% @end 140 | -spec get_non_existent_datapoint_test() -> ok | {error, term()}. 141 | get_non_existent_datapoint_test() -> 142 | Response1 = get_request(?DATAPOINTS_URL ++ "_search?_id=" ++ "nonexistent"), 143 | {ok, Rest} = Response1, 144 | {_,_,Result} = Rest, 145 | ?assertNotEqual(0, string:str(Result, "data\":[]")). 146 | 147 | 148 | %% @doc 149 | %% Function: add_unsupported_field_test/0 150 | %% Purpose: Test that unsuported fields are not allowed to be added 151 | %% on create 152 | %% Returns: ok | {error, term()} 153 | %% @end 154 | -spec add_unsupported_field_test() -> ok | {error, term()}. 155 | add_unsupported_field_test() -> 156 | {ok, {{_Version1, 403, _ReasonPhrase1}, _Headers1, Body1}} = httpc:request(post, {?WEBMACHINE_URL++"/streams/5/data", [],"application/json", "{\"test\":\"asdas\",\"value\" : 5.0}"}, [], []), 157 | erlastic_search:delete_doc(?INDEX,"stream","5"). 158 | 159 | post_request(URL, ContentType, Body) -> request(post, {URL, [], ContentType, Body}). 160 | get_request(URL) -> request(get, {URL, []}). 161 | request(Method, Request) -> 162 | httpc:request(Method, Request, [], []). 163 | 164 | 165 | -------------------------------------------------------------------------------- /test/gen_virtual_stream_process_tests.erl: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/EricssonResearch/iot-framework-engine/106636d2f3b43698ae8c07bbe29d7d25c3e7383e/test/gen_virtual_stream_process_tests.erl -------------------------------------------------------------------------------- /test/http.erl: -------------------------------------------------------------------------------- 1 | %% @author Tommy Mattsson 2 | %% [www.csproj13.student.it.uu.se] 3 | %% @version 1.0 4 | %% @copyright [Copyright information] 5 | %% 6 | %% @doc == http == 7 | %% This module contains several functions for making http request with webmachine 8 | %% in the module triggers which is done by calling the webbmachine. 9 | %% 10 | %% @end 11 | -module(http). 12 | -export([delete/1, get/1, post/2, put/2]). 13 | 14 | %% ==================================================================== 15 | %% API functions 16 | %% ==================================================================== 17 | %% @doc 18 | %% Makes an http delete request to an url. 19 | %% @end 20 | -spec delete(Url::string()) -> {integer(), string()}. 21 | delete(Url) -> 22 | {ok,{{_Vsn,Status,_Reason},_Hdrs,Body}} = httpc:request(delete,{Url,[]},[],[]), 23 | {Status, Body}. 24 | 25 | %% @doc 26 | %% Makes an http get request to an url. 27 | %% @end 28 | -spec get(Url::string()) -> {integer(), string()}. 29 | get(Url) -> 30 | {ok,{{_Vsn,Status,_Reason},_Hdrs,Body}} = httpc:request(get,{Url, []}, [], []), 31 | {Status, Body}. 32 | 33 | %% @doc 34 | %% Makes an http post request to an url with specific data. 35 | %% @end 36 | -spec post(Url::string(), Request::string()) -> {integer(), string()}. 37 | post(Url, Request) -> 38 | {ok,{{_Vsn,Status,_Reason},_Hdrs,Body}} = httpc:request(post,{Url,[],"application/json",Request},[],[]), 39 | {Status, Body}. 40 | 41 | %% @doc 42 | %% Makes an http put request to an url with specific data. 43 | %% @end 44 | -spec put(Url::string(), Request::string()) -> {integer(), string()}. 45 | put(Url, Request) -> 46 | {ok,{{_Vsn,Status,_Reason},_Hdrs,Body}} = httpc:request(put,{Url,[],"application/json",Request},[],[]), 47 | {Status, Body}. 48 | -------------------------------------------------------------------------------- /test/lib_json_tests.erl: -------------------------------------------------------------------------------- 1 | %% @author Tommy Mattsson 2 | %% [www.csproj13.student.it.uu.se] 3 | %% @version 1.0 4 | %% @copyright [Copyright information] 5 | %% 6 | %% @doc == json library_tests == 7 | %% This module contains several tests to test the functionallity 8 | %% in the module lib_json for decoding json objects. 9 | %% 10 | %% @end 11 | -module(lib_json_tests). 12 | -include_lib("eunit/include/eunit.hrl"). 13 | -include("json.hrl"). 14 | -export([]). 15 | 16 | %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% 17 | %%% = = Test input = = 18 | %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% 19 | -define(JSON1, 20 | "{" 21 | "\"friend\":[" 22 | "{\"name\":\"FriendName1\",\"nickname\":\"NickName1\"}," 23 | "{\"name\":\"FriendName2\",\"nickname\":[\"NickName2\",\"NickName3\"]}," 24 | "{\"name\":\"FriendName3\",\"nickname\":[\"NickName4\",\"NickName5\"]}" 25 | "]," 26 | "\"name\":\"Name1\"" 27 | "}"). 28 | 29 | -define(JSON2, 30 | "{" 31 | "\"name\":\"Name1\"," 32 | "\"friend\":{\"name\":\"FriendName2\",\"nickname\":[\"NickName2\",\"NickName3\"]}" 33 | "}"). 34 | 35 | -define(JSON3, 36 | "{\"took\":1,\"timed_out\":false,\"_shards\":{\"total\":5,\"successful\":5,\"failed\":0},\"hits\":{\"total\":0,\"max_score\":null,\"hits\":[]}}"). 37 | 38 | 39 | %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% 40 | %%% = = Test desired input = = 41 | %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% 42 | -define(JSON_RESULT1, 43 | "{" 44 | "\"friend\":[" 45 | "{\"name\":\"FriendName0\",\"nickname\":\"NickName0\"}," 46 | "{\"name\":\"FriendName1\",\"nickname\":\"NickName1\"}," 47 | "{\"name\":\"FriendName2\",\"nickname\":[\"NickName2\",\"NickName3\"]}," 48 | "{\"name\":\"FriendName3\",\"nickname\":[\"NickName4\",\"NickName5\"]}" 49 | "]," 50 | "\"name\":\"Name1\"" 51 | "}"). 52 | 53 | -define(JSON_RESULT2, 54 | "{" 55 | "\"friend\":[" 56 | "{\"name\":\"FriendName1\",\"nickname\":\"NickName1\"}," 57 | "{\"name\":\"FriendName2\",\"nickname\":[\"NickName2\",\"NickName3\",\"NickName6\"]}," 58 | "{\"name\":\"FriendName3\",\"nickname\":[\"NickName4\",\"NickName5\"]}" 59 | "]," 60 | "\"name\":\"Name1\"" 61 | "}"). 62 | 63 | -define(JSON_RESULT3, 64 | "{" 65 | "\"friend\":[" 66 | "{\"height\":180,\"name\":\"FriendName1\",\"nickname\":\"NickName1\"}," 67 | "{\"name\":\"FriendName2\",\"nickname\":[\"NickName2\",\"NickName3\"]}," 68 | "{\"name\":\"FriendName3\",\"nickname\":[\"NickName4\",\"NickName5\"]}" 69 | "]," 70 | "\"name\":\"Name1\"" 71 | "}"). 72 | -define(JSON_RESULT4, 73 | "{" 74 | "\"friend\":[" 75 | "{\"height\":[180,182],\"name\":\"FriendName1\",\"nickname\":\"NickName1\"}," 76 | "{\"name\":\"FriendName2\",\"nickname\":[\"NickName2\",\"NickName3\"]}," 77 | "{\"name\":\"FriendName3\",\"nickname\":[\"NickName4\",\"NickName5\"]}" 78 | "]," 79 | "\"name\":\"Name1\"" 80 | "}"). 81 | 82 | 83 | -define(JSON_RESULT5, 84 | ["{\"name\":\"FriendName1\",\"nickname\":\"NickName1\"}", 85 | "{\"name\":\"FriendName2\",\"nickname\":[\"NickName2\",\"NickName3\"]}", 86 | "{\"name\":\"FriendName3\",\"nickname\":[\"NickName4\",\"NickName5\"]}" 87 | ]). 88 | 89 | -define(JSON_RESULT6, 90 | "{\"name\":\"FriendName1\",\"nickname\":\"NickName1\"}" 91 | ). 92 | 93 | -define(JSON_RESULT7, 94 | "{\"name\":\"poff\",\"nickname\":\"NickName1\"}" 95 | ). 96 | 97 | -define(ENCODE_RESULT1, 98 | [$\{,[$\",<<"friend">>,$\"],$:,[$[, 99 | [${,[$\",<<"name">>,$\"],$:,[$\",<<"FriendName1">>,$\"],$,,[$\",<<"nickname">>,$\"],$:,[$\",<<"NickName1">>,$\"],$}],$,, 100 | [${,[$\",<<"name">>,$\"],$:,[$\",<<"FriendName2">>,$\"],$,,[$\",<<"nickname">>,$\"],$:,[$[,[$\",<<"NickName2">>,$\"],$,,[$\",<<"NickName3">>,$\"],$]],$}],$,, 101 | [${,[$\",<<"name">>,$\"],$:,[$\",<<"FriendName3">>,$\"],$,,[$\",<<"nickname">>,$\"],$:,[$[,[$\",<<"NickName4">>,$\"],$,,[$\",<<"NickName5">>,$\"],$]],$}] 102 | ,$]], 103 | $,,[$\", <<"name">>,$\"],$:,[$\",<<"Name1">>,$\"], 104 | $}]). 105 | 106 | -define(DECODE_RESULT1, 107 | {struct,[{<<"friend">>, 108 | [{struct,[{<<"name">>,<<"FriendName1">>}, 109 | {<<"nickname">>,<<"NickName1">>}]}, 110 | {struct,[{<<"name">>,<<"FriendName2">>}, 111 | {<<"nickname">>,[<<"NickName2">>,<<"NickName3">>]}]}, 112 | {struct,[{<<"name">>,<<"FriendName3">>}, 113 | {<<"nickname">>,[<<"NickName4">>,<<"NickName5">>]}]}]}, 114 | {<<"name">>,<<"Name1">>}]}). 115 | 116 | %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% 117 | %%% = = Test functions = = 118 | %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% 119 | %% @doc 120 | %% Purpose: Tests lib_json:add_value3 121 | %% @doc 122 | add_value_test() -> 123 | ?assertEqual("{\"attr1\":\"value1\"}", lib_json:add_value("{}", attr1, <<"value1">>)), 124 | 125 | %% For a regular string value the function does not recognize it, so 126 | %% it needs to be defined as a binary like above 127 | ?assertEqual("{\"attr1\":\"value1\"}", lib_json:add_value("{}", attr1, <<"value1">>)), 128 | ?assertEqual(?JSON_RESULT1, lib_json:add_value(?JSON1, friend, "{\"name\":\"FriendName0\", \"nickname\":\"NickName0\"}")), 129 | ?assertEqual(?JSON_RESULT2, lib_json:add_value(?JSON1, "friend[1].nickname", <<"NickName6">>)), 130 | ?assertEqual(?JSON_RESULT3, lib_json:add_value(?JSON1, "friend[0].height", 180)), 131 | ?assertEqual(?JSON_RESULT4, lib_json:add_value(?JSON1, "friend[0].height", "[180,182]")), 132 | ?assertEqual(?JSON_RESULT4, lib_json:add_value(?JSON1, "friend[0].height", [180, 182])), 133 | 134 | %% If the field already exist and is not a list then no action is taken 135 | ?assertEqual(?JSON1, lib_json:add_value(?JSON1, name, <<"poff">>)), 136 | 137 | ?assertEqual(?JSON1, lib_json:add_value(?JSON1, "name.poff", <<"poff">>)). 138 | 139 | 140 | add_values_test() -> 141 | true. 142 | 143 | %% @doc 144 | %% Purpose: Test the json_lib:decode/1 145 | %% @end 146 | decode_test() -> 147 | ?assertEqual(?DECODE_RESULT1, lib_json:decode(?JSON1)). 148 | 149 | %% @doc 150 | %% Purpose: Test the json_lib:encode/1 151 | %% @end 152 | encode_test() -> 153 | ?assertEqual(?ENCODE_RESULT1, lib_json:encode(?JSON1)). 154 | 155 | %% @doc 156 | %% Purpose: Test the json_lib:encode/1 and lib_json:decode/1 in combination 157 | %% @end 158 | encode_decode_test() -> 159 | ?assertEqual(?DECODE_RESULT1, lib_json:decode(lib_json:encode(?JSON1))), 160 | ?assertEqual(?ENCODE_RESULT1, lib_json:encode(lib_json:decode(lib_json:encode(?JSON1)))). 161 | 162 | %% @doc 163 | %% Purpose: Test the json_lib:field_value_exists/3 164 | %% @end 165 | field_value_exists_test() -> 166 | ?assertEqual(true, lib_json:field_value_exists(?JSON1, "friend[1].name", <<"FriendName2">>)), 167 | ?assertEqual(false, lib_json:field_value_exists(?JSON1, "friend[0].name", <<"FriendName2">>)), 168 | ?assertEqual(true, lib_json:field_value_exists(?JSON1, "friend[*].nickname", <<"NickName1">>)), 169 | ?assertEqual(true, lib_json:field_value_exists(?JSON1, "friend[*].nickname", <<"NickName3">>)), 170 | ?assertEqual(false, lib_json:field_value_exists(?JSON1, "friend[*].name", <<"NickName3">>)), 171 | ?assertEqual(false, lib_json:field_value_exists(?JSON2, "friend[*].name", <<"NickName3">>)), 172 | ?assertEqual(true, lib_json:field_value_exists(?JSON2, "friend.nickname", <<"NickName3">>)), 173 | ?assertEqual(true, lib_json:field_value_exists(?JSON1, "friend[0]", ?JSON_RESULT6)), 174 | ?assertEqual(true, lib_json:field_value_exists(?JSON1, "friend", ?JSON_RESULT5)). 175 | 176 | %% @doc 177 | %% Purpose: Tests lib_json:get_field/2 178 | %% @end 179 | get_field_test() -> 180 | ?assertEqual(<<"Name1">>, lib_json:get_field(?JSON1, "name")), 181 | ?assertEqual(?JSON_RESULT5, lib_json:get_field(?JSON1, "friend")), 182 | ?assertEqual(?JSON_RESULT6, lib_json:get_field(?JSON1, "friend[0]")), 183 | ?assertEqual(<<"FriendName1">>, lib_json:get_field(?JSON1, "friend[0].name")), 184 | ?assertEqual(<<"NickName1">>, lib_json:get_field(?JSON1, "friend[0].nickname")), 185 | ?assertEqual([<<"NickName2">>, <<"NickName3">>], lib_json:get_field(?JSON1, "friend[1].nickname")), 186 | ?assertEqual(<<"NickName2">>, lib_json:get_field(?JSON1, "friend[1].nickname[0]")), 187 | ?assertEqual(undefined, lib_json:get_field(?JSON1, "friend[0].nick")), 188 | ?assertEqual("{\"name\":\"FriendName2\",\"nickname\":[\"NickName2\",\"NickName3\"]}", 189 | lib_json:get_field(?JSON2, "friend")), 190 | 191 | AddedField1 = lib_json:add_value(?JSON1, "friend[0].height", [1,2]), 192 | ?assertEqual([1,2], lib_json:get_field(AddedField1, "friend[0].height")), 193 | AddedField2 = lib_json:add_value(?JSON1, "friend[0].height", ["value1","value2"]), 194 | ?assertEqual([<<"value1">>,<<"value2">>], lib_json:get_field(AddedField2, "friend[0].height")). 195 | 196 | %% @doc 197 | %% Purpose: Tests json_lib:get_field_value/2 198 | %% @end 199 | get_field_value_test() -> 200 | ?assertEqual(<<"FriendName2">>, lib_json:get_field_value(?JSON1, "friend[1].name", <<"FriendName2">>)), 201 | ?assertEqual(undefined, lib_json:get_field_value(?JSON1, "friend[0].name", <<"FriendName2">>)), 202 | ?assertEqual(<<"NickName1">>, lib_json:get_field_value(?JSON1, "friend[*].nickname", <<"NickName1">>)), 203 | ?assertEqual(<<"NickName3">>, lib_json:get_field_value(?JSON1, "friend[*].nickname", <<"NickName3">>)), 204 | ?assertEqual(undefined, lib_json:get_field_value(?JSON1, "friend[*].name", <<"NickName3">>)), 205 | ?assertEqual(undefined, lib_json:get_field_value(?JSON2, "friend[*].name", <<"NickName3">>)), 206 | ?assertEqual(<<"NickName3">>, lib_json:get_field_value(?JSON2, "friend.nickname", <<"NickName3">>)), 207 | ?assertEqual(?JSON_RESULT6, lib_json:get_field_value(?JSON1, "friend[0]", ?JSON_RESULT6)), 208 | ?assertEqual(?JSON_RESULT5, lib_json:get_field_value(?JSON1, "friend", ?JSON_RESULT5)), 209 | ?assertEqual(null, lib_json:get_field_value(?JSON3, "hits.max_score", null)), 210 | 211 | %% This call will produce an error. Added here as an example of how 212 | %% lib_json:get_field_value/3 NOT should be used. The second argument is not 213 | %% allowed to end with [*] 214 | %% See lib_json:get_field_value/3 for details of how to use the function 215 | Try = try lib_json:get_field_value(?JSON1, "friend[*]", "") of 216 | _ -> will_not_happen 217 | catch 218 | _:_ -> error 219 | end, 220 | ?assertEqual(error, Try). 221 | 222 | replace_field_test() -> 223 | true. 224 | 225 | rm_field_test() -> 226 | true. 227 | 228 | set_attr_test() -> 229 | true. 230 | 231 | set_attrs_test() -> 232 | true. 233 | 234 | to_string_test() -> 235 | true. 236 | -------------------------------------------------------------------------------- /test/poll_help_tests.erl: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/EricssonResearch/iot-framework-engine/106636d2f3b43698ae8c07bbe29d7d25c3e7383e/test/poll_help_tests.erl -------------------------------------------------------------------------------- /test/search_tests.erl: -------------------------------------------------------------------------------- 1 | %% @author Jose Arias, Andreas Moregård Haubenwaller 2 | %% [www.csproj13.student.it.uu.se] 3 | %% @version 1.0 4 | %% @copyright [Copyright information] 5 | %% 6 | %% @doc == streams_tests == 7 | %% This module contains several tests to test the functionallity 8 | %% in the module streams which is done by calling the webbmachine. 9 | %% 10 | %% @end 11 | 12 | -module(search_tests). 13 | -include_lib("eunit/include/eunit.hrl"). 14 | -export([]). 15 | 16 | -define(WEBMACHINE_URL, api_help:get_webmachine_url()). 17 | -define(ELASTIC_SEARCH_URL, api_help:get_webmachine_url()). 18 | 19 | %% @doc 20 | %% Function: inti_test/0 21 | %% Purpose: Used to start the inets to be able to do HTTP requests 22 | %% Returns: ok | {error, term()} 23 | %% 24 | %% Side effects: Start inets 25 | %% @end 26 | -spec init_test() -> ok | {error, term()}. 27 | 28 | init_test() -> 29 | inets:start(). 30 | 31 | %% @doc 32 | %% Function: get_search_test/0 33 | %% Purpose: Test the get_search function by doing some HTTP requests 34 | %% Returns: ok | {error, term()} 35 | %% @end 36 | get_search_test() -> 37 | {ok, {{_Version1, 405, _ReasonPhrase1}, _Headers1, _Body1}} = httpc:request(get, {?WEBMACHINE_URL++"/_history", []}, [], []), 38 | {ok, {{_Version2, 501, _ReasonPhrase2}, _Headers2, _Body2}} = httpc:request(get, {?WEBMACHINE_URL++"/_search", []}, [], []), 39 | {ok, {{_Version3, 200, _ReasonPhrase3}, _Headers3, Body3}} = httpc:request(get, {?WEBMACHINE_URL++"/_history?stream_id=id_that_doesnt_exist", []}, [], []), 40 | ?assertEqual([],lib_json:get_field(Body3,"history[0].data")). 41 | 42 | 43 | 44 | %% @doc 45 | %% Function: process_search_post_test/0 46 | %% Purpose: Test the process_post_test function by doing some HTTP requests 47 | %% Returns: ok | {error, term()} 48 | %% @end 49 | process_search_post_test() -> 50 | Body = case httpc:request(post, {?WEBMACHINE_URL++"/users", [],"application/json", "{\"username\" : \"search\"}"}, [], []) of 51 | {ok, {{_Version, 200, _ReasonPhrase}, _Headers, BodyUser}} -> %% Created unique user 52 | BodyUser; 53 | {ok, {{_Version, 409, _ReasonPhrase}, _Headers, _BodyUser}} -> %% Tried to create user with same username 54 | {ok, {{_, 200, _}, _, BodyUser2}} = httpc:request(get, {"http://localhost:8000/users/" ++ "search", []}, [], []), 55 | BodyUser2 56 | end, 57 | api_help:refresh(), 58 | {ok, {{_Version1, 200, _ReasonPhrase1}, _Headers1, Body1}} = httpc:request(post, {?WEBMACHINE_URL++"/streams", [],"application/json", "{\"name\" : \"search\", \"min_val\": \"0.0\" , \"max_val\": \"1.0\", \"user_id\" : \"search\", \"private\" : \"false\"}"}, [], []), 59 | {ok, {{_Version2, 200, _ReasonPhrase2}, _Headers2, Body2}} = httpc:request(post, {?WEBMACHINE_URL++"/streams", [],"application/json", "{\"name\" : \"search\", \"min_val\": \"0.0\" , \"max_val\": \"1.0\", \"user_id\" : \"search\", \"private\" : \"true\"}"}, [], []), 60 | DocId1 = lib_json:get_field(Body1,"_id"), 61 | DocId2 = lib_json:get_field(Body2,"_id"), 62 | api_help:refresh(), 63 | {ok, {{_Version3, 200, _ReasonPhrase3}, _Headers3, Body3}} = httpc:request(post, {?WEBMACHINE_URL++"/_search", [],"application/json", "{\"query\":{\"match_all\":{}}}"}, [], []), 64 | {ok, {{_Version8, 200, _ReasonPhrase8}, _Headers8, _Body8}} = httpc:request(delete, {?WEBMACHINE_URL++"/streams/" ++ lib_json:to_string(DocId1), []}, [], []), 65 | {ok, {{_Version9, 200, _ReasonPhrase9}, _Headers9, _Body9}} = httpc:request(delete, {?WEBMACHINE_URL++"/streams/" ++ lib_json:to_string(DocId2), []}, [], []), 66 | {ok, {{_Version10, 200, _ReasonPhrase10}, _Headers10, _Body10}} = httpc:request(delete, {?WEBMACHINE_URL++"/users/search", []}, [], []), 67 | ?assertEqual(true,lib_json:get_field(Body3,"streams.hits.total") >= 1). 68 | 69 | 70 | 71 | -------------------------------------------------------------------------------- /test/test.erl: -------------------------------------------------------------------------------- 1 | %% @author Tommy Mattsson 2 | %% [www.csproj13.student.it.uu.se] 3 | %% @version 1.0 4 | %% @copyright [Copyright information] 5 | 6 | %% @doc Test wrapper module 7 | -module(test). 8 | -author('Tommy Mattsson'). 9 | -export([run/0, run/1]). 10 | 11 | -define(WEBMACHINE_URL, api_help:get_webmachine_url()). 12 | -define(RESOURCE_URL, ?WEBMACHINE_URL++"/resources/"). 13 | %% @doc 14 | %% Function: run/0 15 | %% Purpose: Wrapper function for testing in order to be able to return a 16 | %% non-zero exit code on failure of one or more test cases fails. 17 | %% This is for getting tests to work with Travis CI. 18 | %% Returns: ok | no_return() 19 | %% @end 20 | run() -> 21 | run("ebin"). 22 | 23 | 24 | post_request(URL, ContentType, Body) -> request(post, {URL, [], ContentType, Body}). 25 | request(Method, Request) -> 26 | httpc:request(Method, Request, [], []). 27 | 28 | run(Suite) -> 29 | Result = eunit:test(Suite, 30 | [verbose, 31 | {cover_enabled, true}, 32 | {report, {eunit_surefire, [{dir, "test-results"}]}} 33 | ]), 34 | case Result of 35 | ok -> 36 | init:stop(); 37 | error -> 38 | halt(1) 39 | end. 40 | -------------------------------------------------------------------------------- /test/triggers_lib_tests.erl: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/EricssonResearch/iot-framework-engine/106636d2f3b43698ae8c07bbe29d7d25c3e7383e/test/triggers_lib_tests.erl -------------------------------------------------------------------------------- /test/vs_func_lib_tests.erl: -------------------------------------------------------------------------------- 1 | %% @author Tomas S�vstr�m 2 | %% [www.csproj13.student.it.uu.se] 3 | %% @version 1.0 4 | %% @copyright [Copyright information] 5 | %% 6 | %% @doc == vs_func_lib_tests == 7 | %% This module contains several tests to test the functionallity 8 | %% in the module vs_func_lib to test the data functions 9 | %% 10 | %% @end 11 | -module(vs_func_lib_tests). 12 | 13 | %% ==================================================================== 14 | %% API functions 15 | %% ==================================================================== 16 | -export([]). 17 | -include_lib("eunit/include/eunit.hrl"). 18 | 19 | 20 | %% ==================================================================== 21 | %% Internal functions 22 | %% ==================================================================== 23 | 24 | 25 | %% @doc 26 | %% Function: min_test/0 27 | %% Purpose: Used test the min function 28 | %% Returns: ok | {error, term()} 29 | %% 30 | %% @end 31 | -spec min_test() -> ok | {error, term()}. 32 | 33 | min_test() -> 34 | Data = [["{\"timestamp\":\"2014-11-21T10:15:56.000\",\"value\":5.0}", 35 | "{\"timestamp\":\"2014-11-21T10:16:56.000\",\"value\":4.0}", 36 | "{\"timestamp\":\"2014-11-21T11:15:56.000\",\"value\":3.0}", 37 | "{\"timestamp\":\"2014-12-21T10:15:56.000\",\"value\":2.0}", 38 | "{\"timestamp\":\"2014-11-24T10:15:56.000\",\"value\":1.0}"], 39 | ["{\"timestamp\":\"2015-11-21T10:15:56.000\",\"value\":-5.0}", 40 | "{\"timestamp\":\"2014-12-21T10:15:56.000\",\"value\":-4.0}", 41 | "{\"timestamp\":\"2014-11-25T10:15:56.000\",\"value\":-3.0}", 42 | "{\"timestamp\":\"2014-11-21T10:16:56.000\",\"value\":-2.0}", 43 | "{\"timestamp\":\"2014-11-21T10:15:59.000\",\"value\":-1.0}"], 44 | ["{\"timestamp\":\"2014-11-21T10:15:56.000\",\"value\":0.0}"], 45 | ["{\"timestamp\":\"2014-11-21T10:15:56.000\",\"value\":5.0}", 46 | "{\"timestamp\":\"2015-11-21T10:15:56.000\",\"value\":-5.0}"]], 47 | Result = vs_func_lib:min(Data,<<"adjkkvcj--sdffs">>), 48 | ?assertEqual(["{\"stream_id\":\"adjkkvcj--sdffs\",\"timestamp\":\"2015-11-21T10:15:56.000\",\"value\":-5.0}", 49 | "{\"stream_id\":\"adjkkvcj--sdffs\",\"timestamp\":\"2014-11-21T10:15:56.000\",\"value\":0.0}", 50 | "{\"stream_id\":\"adjkkvcj--sdffs\",\"timestamp\":\"2015-11-21T10:15:56.000\",\"value\":-5.0}", 51 | "{\"stream_id\":\"adjkkvcj--sdffs\",\"timestamp\":\"2014-12-21T10:15:56.000\",\"value\":1.0}"],Result). 52 | 53 | %% @doc 54 | %% Function: max_test/0 55 | %% Purpose: Used test the max function 56 | %% Returns: ok | {error, term()} 57 | %% 58 | %% @end 59 | -spec max_test() -> ok | {error, term()}. 60 | 61 | max_test() -> 62 | Data = [["{\"timestamp\":\"2014-11-21T10:15:56.000\",\"value\":5.0}", 63 | "{\"timestamp\":\"2014-11-21T10:16:56.000\",\"value\":4.0}", 64 | "{\"timestamp\":\"2014-11-21T11:15:56.000\",\"value\":3.0}", 65 | "{\"timestamp\":\"2014-12-21T10:15:56.000\",\"value\":2.0}", 66 | "{\"timestamp\":\"2014-11-24T10:15:56.000\",\"value\":1.0}"], 67 | ["{\"timestamp\":\"2015-11-21T10:15:56.000\",\"value\":-5.0}", 68 | "{\"timestamp\":\"2014-12-21T10:15:56.000\",\"value\":-4.0}", 69 | "{\"timestamp\":\"2014-11-25T10:15:56.000\",\"value\":-3.0}", 70 | "{\"timestamp\":\"2014-11-21T10:16:56.000\",\"value\":-2.0}", 71 | "{\"timestamp\":\"2014-11-21T10:15:59.000\",\"value\":-1.0}"], 72 | ["{\"timestamp\":\"2014-11-21T10:15:56.000\",\"value\":0.0}"], 73 | ["{\"timestamp\":\"2014-11-21T10:15:56.000\",\"value\":5.0}", 74 | "{\"timestamp\":\"2015-11-21T10:15:56.000\",\"value\":-5.0}"]], 75 | Result = vs_func_lib:max(Data,<<"adjkkvcj--sdffs">>), 76 | ?assertEqual(["{\"stream_id\":\"adjkkvcj--sdffs\",\"timestamp\":\"2015-11-21T10:15:56.000\",\"value\":5.0}", 77 | "{\"stream_id\":\"adjkkvcj--sdffs\",\"timestamp\":\"2014-11-21T10:15:56.000\",\"value\":0.0}", 78 | "{\"stream_id\":\"adjkkvcj--sdffs\",\"timestamp\":\"2015-11-21T10:15:56.000\",\"value\":-1.0}", 79 | "{\"stream_id\":\"adjkkvcj--sdffs\",\"timestamp\":\"2014-12-21T10:15:56.000\",\"value\":5.0}"],Result). 80 | 81 | %% @doc 82 | %% Function: avg_test/0 83 | %% Purpose: Used test the avg function 84 | %% Returns: ok | {error, term()} 85 | %% 86 | %% @end 87 | -spec avg_test() -> ok | {error, term()}. 88 | 89 | avg_test() -> 90 | Data = [["{\"timestamp\":\"2014-11-21T10:15:56.000\",\"value\":5.0}", 91 | "{\"timestamp\":\"2014-11-21T10:16:56.000\",\"value\":4.0}", 92 | "{\"timestamp\":\"2014-11-21T11:15:56.000\",\"value\":3.0}", 93 | "{\"timestamp\":\"2014-12-21T10:15:56.000\",\"value\":2.0}", 94 | "{\"timestamp\":\"2014-11-24T10:15:56.000\",\"value\":1.0}"], 95 | ["{\"timestamp\":\"2015-11-21T10:15:56.000\",\"value\":-5.0}", 96 | "{\"timestamp\":\"2014-12-21T10:15:56.000\",\"value\":-4.0}", 97 | "{\"timestamp\":\"2014-11-25T10:15:56.000\",\"value\":-3.0}", 98 | "{\"timestamp\":\"2014-11-21T10:16:56.000\",\"value\":-2.0}", 99 | "{\"timestamp\":\"2014-11-21T10:15:59.000\",\"value\":-1.0}"], 100 | ["{\"timestamp\":\"2014-11-21T10:15:56.000\",\"value\":0.0}"], 101 | ["{\"timestamp\":\"2014-11-21T10:15:56.000\",\"value\":5.0}", 102 | "{\"timestamp\":\"2015-11-21T10:15:56.000\",\"value\":-5.0}"]], 103 | Result = vs_func_lib:mean(Data,<<"adjkkvcj--sdffs">>), 104 | ?assertEqual(["{\"stream_id\":\"adjkkvcj--sdffs\",\"timestamp\":\"2015-11-21T10:15:56.000\",\"value\":0.0}", 105 | "{\"stream_id\":\"adjkkvcj--sdffs\",\"timestamp\":\"2014-11-21T10:15:56.000\",\"value\":0.0}", 106 | "{\"stream_id\":\"adjkkvcj--sdffs\",\"timestamp\":\"2015-11-21T10:15:56.000\",\"value\":-3.0}", 107 | "{\"stream_id\":\"adjkkvcj--sdffs\",\"timestamp\":\"2014-12-21T10:15:56.000\",\"value\":3.0}"],Result). 108 | 109 | %% @doc 110 | %% Function: sum_test/0 111 | %% Purpose: Used test the total(sum) function 112 | %% Returns: ok | {error, term()} 113 | %% 114 | %% @end 115 | -spec sum_test() -> ok | {error, term()}. 116 | 117 | sum_test() -> 118 | Data = [["{\"timestamp\":\"2014-11-21T10:15:56.000\",\"value\":5.0}", 119 | "{\"timestamp\":\"2014-11-21T10:16:56.000\",\"value\":4.0}", 120 | "{\"timestamp\":\"2014-11-21T11:15:56.000\",\"value\":3.0}", 121 | "{\"timestamp\":\"2014-12-21T10:15:56.000\",\"value\":2.0}", 122 | "{\"timestamp\":\"2014-11-24T10:15:56.000\",\"value\":1.0}"], 123 | ["{\"timestamp\":\"2015-11-21T10:15:56.000\",\"value\":-5.0}", 124 | "{\"timestamp\":\"2014-12-21T10:15:56.000\",\"value\":-4.0}", 125 | "{\"timestamp\":\"2014-11-25T10:15:56.000\",\"value\":-3.0}", 126 | "{\"timestamp\":\"2014-11-21T10:16:56.000\",\"value\":-2.0}", 127 | "{\"timestamp\":\"2014-11-21T10:15:59.000\",\"value\":-1.0}"], 128 | ["{\"timestamp\":\"2014-11-21T10:15:56.000\",\"value\":0.0}"], 129 | ["{\"timestamp\":\"2014-11-21T10:15:56.000\",\"value\":5.0}", 130 | "{\"timestamp\":\"2015-11-21T10:15:56.000\",\"value\":-5.0}"]], 131 | Result = vs_func_lib:total(Data,<<"adjkkvcj--sdffs">>), 132 | ?assertEqual(["{\"stream_id\":\"adjkkvcj--sdffs\",\"timestamp\":\"2015-11-21T10:15:56.000\",\"value\":0.0}", 133 | "{\"stream_id\":\"adjkkvcj--sdffs\",\"timestamp\":\"2014-11-21T10:15:56.000\",\"value\":0.0}", 134 | "{\"stream_id\":\"adjkkvcj--sdffs\",\"timestamp\":\"2015-11-21T10:15:56.000\",\"value\":-15.0}", 135 | "{\"stream_id\":\"adjkkvcj--sdffs\",\"timestamp\":\"2014-12-21T10:15:56.000\",\"value\":15.0}"],Result). -------------------------------------------------------------------------------- /test/vstreams_tests.erl: -------------------------------------------------------------------------------- 1 | %% @author Iakovos Koutsoumpakis 2 | %% [www.csproj13.student.it.uu.se] 3 | %% @version 1.0 4 | %% @copyright [Copyright information] 5 | %% 6 | %% @doc == datapoints_tests == 7 | %% This module contains several tests to test 8 | %% the virtual streams functionality. 9 | %% 10 | %% @end 11 | 12 | -module(vstreams_tests). 13 | -include_lib("eunit/include/eunit.hrl"). 14 | 15 | -define(WEBMACHINE_URL, api_help:get_webmachine_url()). 16 | -define(STREAMS_URL, ?WEBMACHINE_URL ++ "/streams/"). 17 | -define(VSTREAMS_URL, ?WEBMACHINE_URL ++ "/vstreams/"). 18 | -define(VSDATAPOINTS_URL, ?WEBMACHINE_URL ++ "/data/"). 19 | -define(TEST_VALUE, "1"). 20 | -define(INDEX, "sensorcloud"). 21 | 22 | 23 | %% @doc 24 | %% Function: init_test/0 25 | %% Purpose: Used to start the inets to be able to do HTTP requests 26 | %% Returns: ok | {error, term()} 27 | %% 28 | %% Side effects: Start inets 29 | %% @end 30 | -spec init_test() -> ok | {error, term()}. 31 | 32 | init_test() -> 33 | inets:start(). 34 | 35 | 36 | %% @doc 37 | %% Function: post_test/0 38 | %% Purpose: Test a post request 39 | %% Returns: ok | {error, term()} 40 | %% 41 | %% @end 42 | -spec post_test() -> ok | {error, term()}. 43 | post_test() -> 44 | {ok, {{_Version, 200, _ReasonPhrase}, _Headers, Body1}} = httpc:request(post, {?WEBMACHINE_URL++"/users", [],"application/json", "{\"username\" : \"vstreamuser\"}"}, [], []), 45 | UserId = lib_json:get_field(Body1,"_id"), 46 | api_help:refresh(), 47 | {ok, {{_Version2, 200, _ReasonPhrase2}, _Headers2, Body2}} = post_request(?STREAMS_URL, "application/json", "{ \"name\" : \"teststream1\", \"min_val\": \"0.0\" , \"max_val\": \"1.0\", \"user_id\" : \"" ++ lib_json:to_string(UserId) ++ "\" }"), 48 | Streamid1 = lib_json:get_field(Body2, "_id"), 49 | api_help:refresh(), 50 | {ok, {{_Version3, 200, _ReasonPhrase3}, _Headers3, Body3}} = post_request(?STREAMS_URL, "application/json", "{ \"name\" : \"teststream2\", \"min_val\": \"0.0\" , \"max_val\": \"1.0\", \"user_id\" : \"" ++ lib_json:to_string(UserId) ++ "\" }"), 51 | Streamid2 = lib_json:get_field(Body3, "_id"), 52 | api_help:refresh(), 53 | post_request(?STREAMS_URL ++ lib_json:to_string(Streamid1) ++ "/data", "application/json", "{ \"value\":1}"), 54 | post_request(?STREAMS_URL ++ lib_json:to_string(Streamid1) ++ "/data", "application/json", "{ \"value\":1}"), 55 | post_request(?STREAMS_URL ++ lib_json:to_string(Streamid1) ++ "/data", "application/json", "{ \"value\":1}"), 56 | post_request(?STREAMS_URL ++ lib_json:to_string(Streamid1) ++ "/data", "application/json", "{ \"value\":1}"), 57 | post_request(?STREAMS_URL ++ lib_json:to_string(Streamid1) ++ "/data", "application/json", "{ \"value\":1}"), 58 | 59 | post_request(?STREAMS_URL ++ lib_json:to_string(Streamid2) ++ "/data", "application/json", "{ \"value\":2}"), 60 | post_request(?STREAMS_URL ++ lib_json:to_string(Streamid2) ++ "/data", "application/json", "{ \"value\":2}"), 61 | post_request(?STREAMS_URL ++ lib_json:to_string(Streamid2) ++ "/data", "application/json", "{ \"value\":2}"), 62 | post_request(?STREAMS_URL ++ lib_json:to_string(Streamid2) ++ "/data", "application/json", "{ \"value\":2}"), 63 | post_request(?STREAMS_URL ++ lib_json:to_string(Streamid2) ++ "/data", "application/json", "{ \"value\":2}"), 64 | api_help:refresh(), 65 | Response3 = post_request(?VSTREAMS_URL, "application/json", "{\"user_id\" : \"" ++ lib_json:to_string(UserId) ++ "\", \"name\" : \"post_testvstream1\", \"description\" : \"test\", 66 | \"streams_involved\" : [\"" ++ lib_json:to_string(Streamid1) ++ "\", \"" ++ lib_json:to_string(Streamid2) ++ "\"], 67 | \"timestampfrom\" : \"now-1h\", \"function\" : [\"mean\", \"1s\"]}"), 68 | check_returned_code(Response3, 200), 69 | api_help:refresh(). 70 | 71 | 72 | 73 | %% @doc 74 | %% Function: get_vstream_test/0 75 | %% Purpose: Test the get_stream function by doing some HTTP requests 76 | %% Returns: ok | {error, term()} 77 | %% 78 | %% Side effects: creates a document in elasticsearch 79 | %% @end 80 | -spec get_vstream_test() -> ok | {error, term()}. 81 | 82 | get_vstream_test() -> 83 | {ok, {{_Version1, 200, _ReasonPhrase1}, _Headers1, Body1}} = httpc:request(post, {?WEBMACHINE_URL++"/vstreams/_search",[],"application/json", "{\"query\":{\"term\" : { \"name\" : \"post_testvstream1\"}}}"}, [], []), 84 | StreamId = lib_json:get_field(Body1,"hits[0].id"), 85 | {ok, {{_Version2, 200, _ReasonPhrase2}, _Headers2, Body2}} = httpc:request(get, {?WEBMACHINE_URL++"/vstreams/" ++ lib_json:to_string(StreamId), []}, [], []) 86 | . 87 | 88 | 89 | %% @doc 90 | %% Function: put_vstream_test/0 91 | %% Purpose: Test the put_vstream function by doing some HTTP requests 92 | %% Returns: ok | {error, term()} 93 | %% 94 | %% Side effects: creates 2 document in elasticsearch and updates them 95 | %% @end 96 | -spec put_vstream_test() -> ok | {error, term()}. 97 | put_vstream_test() -> 98 | {ok, {{_Version1, 200, _ReasonPhrase1}, _Headers1, Body1}} = httpc:request(post, {?WEBMACHINE_URL++"/vstreams/_search",[],"application/json", "{\"query\":{\"term\" : { \"name\" : \"post_testvstream1\"}}}"}, [], []), 99 | StreamId = lib_json:get_field(Body1,"hits[0].id"), 100 | {ok, {{_Version2, 200, _ReasonPhrase2}, _Headers2, Body2}} = httpc:request(put, {?WEBMACHINE_URL++"/vstreams/" ++ lib_json:to_string(StreamId), [], "application/json", "{\n\"name\" : \"updated_testvstream1\"}"}, [], []), 101 | api_help:refresh(), 102 | {ok, {{_Version3, 200, _ReasonPhrase3}, _Headers3, Body3}} = httpc:request(post, {?WEBMACHINE_URL++"/vstreams/_search",[],"application/json", "{\"query\":{\"term\" : { \"name\" : \"post_testvstream1\"}}}"}, [], []), 103 | ?assertEqual(length(lib_json:get_field(lib_json:to_string(Body3), "hits")), 0), 104 | {ok, {{_Version4, 200, _ReasonPhrase4}, _Headers4, Body4}} = httpc:request(post, {?WEBMACHINE_URL++"/vstreams/_search",[],"application/json", "{\"query\":{\"term\" : { \"name\" : \"updated_testvstream1\"}}}"}, [], []), 105 | ?assertEqual(length(lib_json:get_field(lib_json:to_string(Body4), "hits")), 1) 106 | . 107 | 108 | 109 | %% @doc 110 | %% Function: delete_vstream_test/0 111 | %% Purpose: Test the delete_vstream function by doing some HTTP requests 112 | %% Returns: ok | {error, term()} 113 | %% 114 | %% Side effects: creates 2 document in elasticsearch and deletes them 115 | %% @end 116 | -spec delete_vstream_test() -> ok | {error, term()}. 117 | 118 | delete_vstream_test() -> 119 | %delete vstream, make sure there are no vsdatapoints left 120 | {ok, {{_Version1, 200, _ReasonPhrase1}, _Headers1, Body1}} = httpc:request(post, {?WEBMACHINE_URL++"/vstreams/_search",[],"application/json", "{\"query\":{\"term\" : { \"name\" : \"updated_testvstream1\"}}}"}, [], []), 121 | StreamId = lib_json:get_field(Body1,"hits[0].id"), 122 | {ok, {{_Version2, 200, _ReasonPhrase2}, _Headers2, Body2}} = httpc:request(delete, {?WEBMACHINE_URL++"/vstreams/" ++ lib_json:to_string(StreamId), []}, [], []), 123 | api_help:refresh(), 124 | {ok, {{_Version3, 200, _ReasonPhrase3}, _Headers3, Body3}} = httpc:request(post, {?WEBMACHINE_URL++"/vstreams/_search",[],"application/json", "{\"query\":{\"term\" : { \"name\" : \"updated_testvstream1\"}}}"}, [], []), 125 | ?assertEqual(length(lib_json:get_field(lib_json:to_string(Body3), "hits")), 0), 126 | {ok, {{_Version3, 200, _ReasonPhrase4}, _Headers4, Body4}} = httpc:request(get, {?WEBMACHINE_URL++"/vstreams/" ++ lib_json:to_string(StreamId) ++ "/data/_search", []}, [], []), 127 | ?assertEqual(length(lib_json:get_field(lib_json:to_string(Body4), "data")), 0), 128 | api_help:refresh(), 129 | {ok, {{_Version6, 200, _ReasonPhrase6}, _Headers6, Body6}} = httpc:request(delete, {?WEBMACHINE_URL++"/users/vstreamuser", []}, [], []) 130 | . 131 | 132 | 133 | %% @doc 134 | %% Checks if the Response has the correct http return code 135 | %% @end 136 | -spec check_returned_code(string(), integer()) -> ok. 137 | check_returned_code(Response, Code) -> 138 | {ok, Rest} = Response, 139 | {Header,_,_} = Rest, 140 | ?assertMatch({_, Code, _}, Header). 141 | 142 | 143 | post_request(URL, ContentType, Body) -> request(post, {URL, [], ContentType, Body}). 144 | get_request(URL) -> request(get, {URL, []}). 145 | request(Method, Request) -> 146 | httpc:request(Method, Request, [], []). 147 | --------------------------------------------------------------------------------