├── .gitignore
├── README.md
├── computer-vision
├── README.md
├── classify-image
│ └── template.yml
├── data
│ └── sample.jpeg
└── detect-object
│ └── template.yml
├── log-processing
├── Makefile
├── README.md
├── anonymize-log
│ ├── code
│ │ ├── Cargo.toml
│ │ └── src
│ │ │ └── lib.rs
│ └── func
│ │ ├── Makefile
│ │ └── template.yml
├── data
│ └── sample-logs.log
├── filter-log
│ ├── code
│ │ ├── Cargo.toml
│ │ └── src
│ │ │ └── lib.rs
│ └── func
│ │ ├── Makefile
│ │ └── template.yml
└── runtime
│ ├── README.md
│ ├── bootstrap
│ ├── .cargo
│ │ └── config.toml
│ ├── Cargo.toml
│ └── src
│ │ └── main.rs
│ └── build.Dockerfile
├── media-processing
├── README.md
├── convert-audio
│ ├── ffmpeg
│ └── index.py
├── data
│ └── video.mp4
├── get-media-meta
│ ├── ffprobe
│ └── index.py
└── template.yml
├── smart-manufacturing
├── README.md
├── build.Dockerfile
├── create_tb.sql
├── detect-anomaly
│ ├── CMakeLists.txt
│ ├── Dockerfile
│ ├── function.hpp
│ ├── json.hpp
│ ├── main.cpp
│ └── template.yaml
├── ingest-data
│ ├── CMakeLists.txt
│ ├── Dockerfile
│ ├── function.hpp
│ ├── json.hpp
│ ├── main.cpp
│ └── template.yaml
└── runtime.Dockerfile
└── smart-parking
├── README.md
├── query-vacancy
├── code
│ ├── handler.js
│ ├── index.js
│ └── package.json
└── template.yml
└── reserve-spot
├── code
├── handler.js
├── index.js
└── package.json
└── template.yml
/.gitignore:
--------------------------------------------------------------------------------
1 | # Created by https://www.toptal.com/developers/gitignore/api/macos,sublimetext
2 | # Edit at https://www.toptal.com/developers/gitignore?templates=macos,sublimetext
3 |
4 | ### macOS ###
5 | # General
6 | .DS_Store
7 | .AppleDouble
8 | .LSOverride
9 |
10 | # Icon must end with two \r
11 | Icon
12 |
13 |
14 | # Thumbnails
15 | ._*
16 |
17 | # Files that might appear in the root of a volume
18 | .DocumentRevisions-V100
19 | .fseventsd
20 | .Spotlight-V100
21 | .TemporaryItems
22 | .Trashes
23 | .VolumeIcon.icns
24 | .com.apple.timemachine.donotpresent
25 |
26 | # Directories potentially created on remote AFP share
27 | .AppleDB
28 | .AppleDesktop
29 | Network Trash Folder
30 | Temporary Items
31 | .apdisk
32 |
33 | ### macOS Patch ###
34 | # iCloud generated files
35 | *.icloud
36 |
37 | ### SublimeText ###
38 | # Cache files for Sublime Text
39 | *.tmlanguage.cache
40 | *.tmPreferences.cache
41 | *.stTheme.cache
42 |
43 | # Workspace files are user-specific
44 | *.sublime-workspace
45 |
46 | # Project files should be checked into the repository, unless a significant
47 | # proportion of contributors will probably not be using Sublime Text
48 | # *.sublime-project
49 |
50 | # SFTP configuration file
51 | sftp-config.json
52 | sftp-config-alt*.json
53 |
54 | # Package control specific files
55 | Package Control.last-run
56 | Package Control.ca-list
57 | Package Control.ca-bundle
58 | Package Control.system-ca-bundle
59 | Package Control.cache/
60 | Package Control.ca-certs/
61 | Package Control.merged-ca-bundle
62 | Package Control.user-ca-bundle
63 | oscrypto-ca-bundle.crt
64 | bh_unicode_properties.cache
65 |
66 | # Sublime-github package stores a github token in this file
67 | # https://packagecontrol.io/packages/sublime-github
68 | GitHub.sublime-settings
69 |
70 | # End of https://www.toptal.com/developers/gitignore/api/macos,sublimetext
71 |
--------------------------------------------------------------------------------
/README.md:
--------------------------------------------------------------------------------
1 |
2 | ## FaaS Scheduling Benchmark
3 |
4 | This benchmark suite is designed for evaluating the scheduler in a Function-as-a-Service platform. It consists of functions from various application domains so as to simulate the mixed workload in production.
5 |
6 | ### Prerequisite
7 |
8 | If you want to deploy and run the functions, you need to ...
9 |
10 | 1. ... have an [Alibaba Cloud](https://us.alibabacloud.com/) account because the functions are implemented for [Function Compute](https://www.alibabacloud.com/product/function-compute).
11 |
12 | 2. ... know how to use [`fun` CLI](https://github.com/alibaba/funcraft) tool. If not, you could check out this [Get Started](https://github.com/alibaba/funcraft/blob/master/docs/usage/getting_started.md) guide.
13 |
14 |
15 | ### Repo Structure
16 |
17 | The benchmark suite has five applications, each with two functions. You could follow the README in each application directory to deploy and invoke the function.
18 |
19 |
20 |
21 |
22 | Application |
23 | Function |
24 | Programming Language |
25 | Dependencies |
26 |
27 |
28 |
29 |
30 | Smart Parking |
31 | Query Vacancy |
32 | JavaScript |
33 | Redis |
34 |
35 |
36 | Reserve Spot |
37 | JavaScript |
38 | Redis, Kafka |
39 |
40 |
41 | Log Processing |
42 | Anonymize Log |
43 | Rust |
44 | Kafka |
45 |
46 |
47 | Filter Log |
48 | Rust |
49 | Kafka |
50 |
51 |
52 | Computer Vision |
53 | Detect Object |
54 | Python |
55 | TensorFlow |
56 |
57 |
58 | Classify Image |
59 | Python |
60 | TensorFlow |
61 |
62 |
63 | Media Processing |
64 | Get Media Meta |
65 | Python |
66 | OSS |
67 |
68 |
69 | Convert Audio |
70 | Python |
71 | OSS |
72 |
73 |
74 | Smart Manufacturing |
75 | Ingest Data |
76 | C++ |
77 | MySQL |
78 |
79 |
80 | Detect Anomaly |
81 | C++ |
82 | MySQL |
83 |
84 |
85 |
86 |
87 | ### Portability
88 |
89 | The benchmark suite only supports [Function Compute](https://www.alibabacloud.com/product/function-compute). To port it to other FaaS platforms, you have to change the way how arguments are passed into the function and how the functions are deployed.
90 |
91 |
92 | ### Citation
93 |
94 | If you have used this benchmark in your research project, please cite the following paper.
95 |
96 | ```
97 | @inproceedings{tian2022owl,
98 | title={Owl: Performance-Aware Scheduling for Resource-Efficient Function-as-a-Service Cloud},
99 | author={Tian, Huangshi and Li, Suyi and Wang, Ao and Wang, Wei and Wu, Tianlong and Yang, Haoran},
100 | booktitle={Proceedings of the ACM Symposium on Cloud Computing 2022},
101 | year={2022}
102 | }
103 | ```
104 |
105 | ### Acknowledgement
106 |
107 | [@SimonZYC](https://github.com/SimonZYC) has contributed to this benchmark.
108 |
--------------------------------------------------------------------------------
/computer-vision/README.md:
--------------------------------------------------------------------------------
1 |
2 | ## Computer Vision Application
3 |
4 | This application mimics a CV company that provides image processing service through API.
5 | - The [Classify Image](#classify-image-function) function classifes an image into a certain category.
6 | - The [Detect Object](#detect-object-function) function detects whether an image contains a certain object.
7 |
8 | ### Classify Image Function
9 |
10 | To deploy a function,
11 |
12 | ```
13 | cd faas-scheduling-benchmark/computer-vision/classify-image
14 | fun deploy -y
15 | ```
16 |
17 | To invoke a function,
18 |
19 | ```
20 | # The can be found in the result printed out by `fun deploy`.
21 | curl -s -i http:///2016-08-15/proxy/computer-vision/classify-image/ \
22 | -X POST \
23 | -d 'https://github.com/All-less/faas-scheduling-benchmark/raw/master/computer-vision/data/sample.jpeg'
24 | ```
25 |
26 | ### Detect Object Function
27 |
28 | To deploy a function,
29 |
30 | ```
31 | cd faas-scheduling-benchmark/computer-vision/detect-object
32 | fun deploy -y
33 | ```
34 |
35 | To invoke a function,
36 |
37 | ```
38 | # The can be found in the result printed out by `fun deploy`.
39 | curl -s -X POST -i http:///2016-08-15/proxy/computer-vision/detect-object/
40 |
41 | # Currently the detect-object function does not accept custom images. It comes with a set
42 | # of images and every time the function is invoked, it will randomly pick one and run the
43 | # detection algorithm on that image.
44 | ```
45 |
--------------------------------------------------------------------------------
/computer-vision/classify-image/template.yml:
--------------------------------------------------------------------------------
1 | ROSTemplateFormatVersion: '2015-09-01'
2 | Transform: 'Aliyun::Serverless-2018-04-03'
3 | Resources:
4 | computer-vision:
5 | Type: 'Aliyun::Serverless::Service'
6 | Properties:
7 | Policies:
8 | - AliyunContainerRegistryReadOnlyAccess
9 | - AliyunLogFullAccess
10 | InternetAccess: true
11 |
12 | classify-image:
13 | Type: 'Aliyun::Serverless::Function'
14 | Properties:
15 | Runtime: custom-container
16 | Timeout: 180
17 | CAPort: 8080
18 | Handler: not-used
19 | MemorySize: 3072
20 | CodeUri: ./
21 | CustomContainerConfig:
22 | Image: 'docker.io/allless/classify-image:v0.1'
23 | Command: '[ "gunicorn" ]'
24 | Args: '[ "-w", "1", "--threads", "10", "-b", "0.0.0.0:8080", "--keep-alive", "900", "server:app" ]'
25 | Events:
26 | http-trigger:
27 | Type: HTTP
28 | Properties:
29 | AuthType: ANONYMOUS
30 | Methods: ['GET', 'POST', 'PUT']
31 |
--------------------------------------------------------------------------------
/computer-vision/data/sample.jpeg:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/All-less/faas-scheduling-benchmark/84ca34c6fd85ab10b497abfb584a4c7cb38801b5/computer-vision/data/sample.jpeg
--------------------------------------------------------------------------------
/computer-vision/detect-object/template.yml:
--------------------------------------------------------------------------------
1 | ROSTemplateFormatVersion: '2015-09-01'
2 | Transform: 'Aliyun::Serverless-2018-04-03'
3 | Resources:
4 | computer-vision:
5 | Type: 'Aliyun::Serverless::Service'
6 | Properties:
7 | Policies:
8 | - AliyunContainerRegistryReadOnlyAccess
9 | - AliyunLogFullAccess
10 | InternetAccess: true
11 |
12 | detect-object:
13 | Type: 'Aliyun::Serverless::Function'
14 | Properties:
15 | Runtime: custom-container
16 | Timeout: 300
17 | CAPort: 8080
18 | Handler: not-used
19 | MemorySize: 3072
20 | CodeUri: ./
21 | CustomContainerConfig:
22 | Image: 'docker.io/allless/detect-object:v0.1'
23 | Command: '[ "gunicorn" ]'
24 | Args: '[ "-w", "1", "--threads", "10", "-b", "0.0.0.0:8080", "--keep-alive", "900", "server:app" ]'
25 | Events:
26 | http-trigger:
27 | Type: HTTP
28 | Properties:
29 | AuthType: ANONYMOUS
30 | Methods: ['GET', 'POST', 'PUT']
31 |
32 |
--------------------------------------------------------------------------------
/log-processing/Makefile:
--------------------------------------------------------------------------------
1 |
2 | PROJ_DIR = $(shell pwd)
3 | RUST_SHIM_DIR = ${PROJ_DIR}/runtime
4 |
5 | export
6 |
7 | rust-img:
8 | docker build -t fc-rust-env -f ${RUST_SHIM_DIR}/build.Dockerfile ${RUST_SHIM_DIR}
9 |
10 | build-anonymization: rust-img
11 | ${MAKE} -C ${PROJ_DIR}/anonymize-log/func build
12 |
13 | build-filter: rust-img
14 | ${MAKE} -C ${PROJ_DIR}/filter-log/func build
15 |
16 | deploy-anonymization: build-anonymization
17 | ${MAKE} -C ${PROJ_DIR}/anonymize-log/func deploy
18 |
19 | deploy-filter: build-filter
20 | ${MAKE} -C ${PROJ_DIR}/filter-log/func deploy
21 |
--------------------------------------------------------------------------------
/log-processing/README.md:
--------------------------------------------------------------------------------
1 |
2 | ## Log Processing Application
3 |
4 | This application mimics a storage service provider that processes its logs according to regulation requirement.
5 | - The [Anonymize Log](#anonymize-log-function) function anonymizes the sensitive personal information in the log.
6 | - The [Filter Log](#filter-log-function) function filters the logs and stores them to an object storage.
7 |
8 | ### Prerequisites
9 |
10 | Both functions require a running [Kafka](https://kafka.apache.org/).
11 | - After deploying Kafka, edit the `KAFKA_BROKER` and `KAFKA_TOPIC` entries in `{anonymize-log,filter-log}/func/template.yml`.
12 | - Create three topics in Kafka: `original-logs`, `anonymized-logs` and `filtered-logs`.
13 | - Then publish the logs in `data/sample-logs.logs` to `original-logs`.
14 |
15 | ### Anonymize Log Function
16 |
17 | To build the function,
18 |
19 | ```
20 | cd faas-scheduling-benchmark/log-processing/anonymize-log
21 | make deploy-anonymization
22 | ```
23 |
24 | To invoke the function,
25 |
26 | ```
27 | # The can be found in the result printed out by `fun deploy`.
28 | curl -s -X POST -i http:///2016-08-15/proxy/log-processing/anonymize-log/
29 | ```
30 |
31 | ### Filter Log Function
32 |
33 | To build the function,
34 |
35 | ```
36 | cd faas-scheduling-benchmark/log-processing/filter-log
37 | make deploy-filter
38 | ```
39 |
40 | To invoke the function,
41 |
42 | ```
43 | # The can be found in the result printed out by `fun deploy`.
44 | curl -s -X POST -i http:///2016-08-15/proxy/log-processing/filter-log/
45 | ```
46 |
--------------------------------------------------------------------------------
/log-processing/anonymize-log/code/Cargo.toml:
--------------------------------------------------------------------------------
1 | [package]
2 | name = "handler"
3 | version = "0.1.0"
4 | authors = ["Mark Sta Ana "]
5 |
6 | [dependencies]
7 | kafka = "0.8"
8 | custom_error = "1.6.0"
9 | regex = "1"
10 |
--------------------------------------------------------------------------------
/log-processing/anonymize-log/code/src/lib.rs:
--------------------------------------------------------------------------------
1 | extern crate kafka;
2 | extern crate custom_error;
3 | extern crate regex;
4 |
5 | use std::str;
6 | use std::env;
7 | use std::time::Duration;
8 |
9 |
10 | use custom_error::custom_error;
11 | use kafka::producer::{Producer, Record, RequiredAcks};
12 | use kafka::consumer::{Consumer, FetchOffset, GroupOffsetStorage};
13 | use kafka::error::Error as KafkaError;
14 | use regex::Regex;
15 |
16 |
17 | custom_error! { pub ProjError
18 | Decoding{ source: str::Utf8Error } = "error decoding utf-8",
19 | Kafka{ source: KafkaError } = "error interacting with Kafka",
20 | }
21 |
22 | pub fn handle(_req : String) -> String {
23 |
24 | return match consume() {
25 | Ok(count) => format!("Successfully anonymized {} messages.", count),
26 | Err(e) => format!("Error occurs. {}", e),
27 | };
28 | }
29 |
30 | pub fn consume() -> Result {
31 |
32 | // to_owned() convert a string literal into a String.
33 | let broker = match env::var("KAFKA_BROKER") {
34 | Ok(val) => val,
35 | Err(_) => "kafka-headless.kafka.svc.cluster.local:9092".to_owned(),
36 | };
37 | let read_topic = match env::var("KAFKA_READ_TOPIC") {
38 | Ok(val) => val,
39 | Err(_) => "original".to_owned(),
40 | };
41 | let write_topic = match env::var("KAFKA_WRITE_TOPIC") {
42 | Ok(val) => val,
43 | Err(_) => "anonymized".to_owned(),
44 | };
45 | let group = "anonymize".to_owned();
46 |
47 | let mut con = try!(Consumer::from_hosts(vec![broker.clone()])
48 | .with_topic(read_topic)
49 | .with_group(group)
50 | .with_fallback_offset(FetchOffset::Earliest)
51 | .with_offset_storage(GroupOffsetStorage::Kafka)
52 | .create());
53 | let mut producer = try!(Producer::from_hosts(vec!(broker.clone()))
54 | .with_ack_timeout(Duration::from_secs(1))
55 | .with_required_acks(RequiredAcks::One)
56 | .create());
57 |
58 | let mut msg: Vec = Vec::new();
59 | let re = Regex::new(r"(?P\d{1,3}.\d{1,3}.\d{1,3}.)(?P\d{1,3})").unwrap();
60 |
61 | loop {
62 | let mss = try!(con.poll());
63 |
64 | if mss.is_empty() { return Ok(msg.len()); }
65 |
66 | for ms in mss.iter() {
67 | for m in ms.messages() {
68 | // println!("{}:{}@{}: {:?}", ms.topic(), ms.partition(), m.offset, m.value);
69 | let replaced = re.replace_all(str::from_utf8(m.value).unwrap(), "$h***");
70 | producer.send(&Record::from_value(write_topic.as_str(), replaced.as_bytes())).unwrap();
71 | msg.push(try!(str::from_utf8(m.value)).to_string());
72 | }
73 | let _ = con.consume_messageset(ms);
74 | }
75 | // try!(con.commit_consumed());
76 | }
77 | }
78 |
79 | #[cfg(test)]
80 | mod tests {
81 | #[test]
82 | fn test_regex() {
83 | use super::*;
84 | let re = Regex::new(r"(?P\d{1,3}.\d{1,3}.\d{1,3}.)(?P\d{1,3})").unwrap();
85 | let array: &[u8] = &[84, 104, 105, 115, 32, 105, 115, 32, 97, 32, 108, 111, 103,
86 | 32, 108, 105, 110, 101, 32, 102, 114, 111, 109, 32, 49, 50,
87 | 51, 46, 52, 53, 54, 46, 55, 56, 57, 46, 49, 50, 51, 46];
88 | let replaced = re.replace_all(str::from_utf8(array).unwrap(), "$h***");
89 | assert_eq!("This is a log line from 123.456.789.***.", replaced);
90 | }
91 | }
92 |
--------------------------------------------------------------------------------
/log-processing/anonymize-log/func/Makefile:
--------------------------------------------------------------------------------
1 | build: PKG_DIR = ${PROJ_DIR}/anonymize-log/package
2 | build:
3 | docker run --rm -it \
4 | -v ${RUST_SHIM_DIR}/bootstrap:/opt/rust-shim \
5 | -v ${PROJ_DIR}/anonymize-log/code:/opt/function \
6 | fc-rust-env bash -c "cd /opt/rust-shim && cargo build"
7 | mkdir -p ${PKG_DIR}
8 | cp ${RUST_SHIM_DIR}/bootstrap/target/debug/bootstrap ${PKG_DIR}
9 |
10 | deploy:
11 | fun deploy -y
12 |
--------------------------------------------------------------------------------
/log-processing/anonymize-log/func/template.yml:
--------------------------------------------------------------------------------
1 | ROSTemplateFormatVersion: '2015-09-01'
2 | Transform: 'Aliyun::Serverless-2018-04-03'
3 | Resources:
4 | log-processing:
5 | Type: 'Aliyun::Serverless::Service'
6 | Properties:
7 | Description: 'Anonymize the logs read from Kafka.'
8 | Policies:
9 | - AliyunLogFullAccess
10 | InternetAccess: true
11 |
12 | anonymize-log:
13 | Type: 'Aliyun::Serverless::Function'
14 | Properties:
15 | Handler: handler.handle
16 | Runtime: custom
17 | MemorySize: 1024
18 | Timeout: 60
19 | CodeUri: './package'
20 | EnvironmentVariables:
21 | 'KAFKA_BROKER': ''
22 | 'KAFKA_READ_TOPIC': 'original-logs'
23 | 'KAFKA_WRITE_TOPIC': 'anonymized-logs'
24 | Events:
25 | http-trigger:
26 | Type: HTTP
27 | Properties:
28 | AuthType: ANONYMOUS
29 | Methods: ['GET', 'POST', 'PUT']
30 |
--------------------------------------------------------------------------------
/log-processing/filter-log/code/Cargo.toml:
--------------------------------------------------------------------------------
1 | [package]
2 | name = "handler"
3 | version = "0.1.0"
4 | authors = ["Mark Sta Ana "]
5 |
6 | [dependencies]
7 | kafka = "0.8"
8 | custom_error = "1.6.0"
9 | regex = "1"
10 | redis = "*"
11 |
12 |
--------------------------------------------------------------------------------
/log-processing/filter-log/code/src/lib.rs:
--------------------------------------------------------------------------------
1 | extern crate kafka;
2 | extern crate custom_error;
3 | extern crate regex;
4 |
5 | use std::str;
6 | use std::env;
7 | use std::time::Duration;
8 |
9 | use custom_error::custom_error;
10 | use kafka::producer::{Producer, Record, RequiredAcks};
11 | use kafka::consumer::{Consumer, FetchOffset, GroupOffsetStorage};
12 | use kafka::error::Error as KafkaError;
13 | use regex::Regex;
14 |
15 |
16 | custom_error! { pub ProjError
17 | Decoding{ source: str::Utf8Error } = "error decoding utf-8",
18 | Kafka{ source: KafkaError } = "error interacting with Kafka",
19 | }
20 |
21 | pub fn handle(_req : String) -> String {
22 | return match consume() {
23 | Ok(count) => format!("Successfully filtered {} messages.", count),
24 | Err(e) => format!("Error occurs. {}", e),
25 | };
26 | }
27 |
28 | pub fn consume() -> Result {
29 |
30 | // to_owned() convert a string literal into a String.
31 | let broker = match env::var("KAFKA_BROKER") {
32 | Ok(val) => val,
33 | Err(_) => "kafka-headless.kafka.svc.cluster.local:9092".to_owned(),
34 | };
35 | let read_topic = match env::var("KAFKA_READ_TOPIC") {
36 | Ok(val) => val,
37 | Err(_) => "original".to_owned(),
38 | };
39 | let write_topic = match env::var("KAFKA_WRITE_TOPIC") {
40 | Ok(val) => val,
41 | Err(_) => "filtered".to_owned(),
42 | };
43 | let group = "filter".to_owned();
44 |
45 | let mut con = try!(Consumer::from_hosts(vec![broker.clone()])
46 | .with_topic(read_topic)
47 | .with_group(group)
48 | .with_fallback_offset(FetchOffset::Earliest)
49 | .with_offset_storage(GroupOffsetStorage::Kafka)
50 | .create());
51 | println!("after creating con");
52 | let mut producer = try!(Producer::from_hosts(vec!(broker.clone()))
53 | .with_ack_timeout(Duration::from_secs(1))
54 | .with_required_acks(RequiredAcks::One)
55 | .create());
56 |
57 | let mut msg: Vec = Vec::new();
58 | let re = Regex::new(r".*input_userauth_request.*").unwrap();
59 |
60 | loop {
61 | let mss = try!(con.poll());
62 |
63 | if mss.is_empty() { return Ok(msg.len()); }
64 |
65 | for ms in mss.iter() {
66 | for m in ms.messages() {
67 | // println!("{}:{}@{}: {:?}", ms.topic(), ms.partition(), m.offset, m.value);
68 | if !re.is_match(str::from_utf8(m.value).unwrap()) {
69 | producer.send(&Record::from_value(write_topic.as_str(), m.value)).unwrap();
70 | }
71 | msg.push(try!(str::from_utf8(m.value)).to_string());
72 | }
73 | let _ = con.consume_messageset(ms);
74 | }
75 | // try!(con.commit_consumed());
76 | }
77 | }
78 |
79 |
--------------------------------------------------------------------------------
/log-processing/filter-log/func/Makefile:
--------------------------------------------------------------------------------
1 | build: PKG_DIR = ${PROJ_DIR}/filter-log/package
2 | build:
3 | docker run --rm -it \
4 | -v ${RUST_SHIM_DIR}/bootstrap:/opt/rust-shim \
5 | -v ${PROJ_DIR}/filter-log/code:/opt/function \
6 | fc-rust-env bash -c "cd /opt/rust-shim && cargo build"
7 | mkdir -p ${PKG_DIR}
8 | cp ${RUST_SHIM_DIR}/bootstrap/target/debug/bootstrap ${PKG_DIR}
9 |
10 | deploy:
11 | fun deploy -y
12 |
--------------------------------------------------------------------------------
/log-processing/filter-log/func/template.yml:
--------------------------------------------------------------------------------
1 | ROSTemplateFormatVersion: '2015-09-01'
2 | Transform: 'Aliyun::Serverless-2018-04-03'
3 | Resources:
4 | log-processing:
5 | Type: 'Aliyun::Serverless::Service'
6 | Properties:
7 | Description: 'Anonymize the logs read from Kafka.'
8 | Policies:
9 | - AliyunLogFullAccess
10 | InternetAccess: true
11 |
12 | filter-log:
13 | Type: 'Aliyun::Serverless::Function'
14 | Properties:
15 | Handler: handler.handle
16 | Runtime: custom
17 | MemorySize: 1024
18 | Timeout: 60
19 | CodeUri: './package'
20 | EnvironmentVariables:
21 | 'KAFKA_BROKER': ''
22 | 'KAFKA_READ_TOPIC': 'original-logs'
23 | 'KAFKA_WRITE_TOPIC': 'filtered-logs'
24 | Events:
25 | http-trigger:
26 | Type: HTTP
27 | Properties:
28 | AuthType: ANONYMOUS
29 | Methods: ['GET', 'POST', 'PUT']
30 |
--------------------------------------------------------------------------------
/log-processing/runtime/README.md:
--------------------------------------------------------------------------------
1 |
2 | ## Rust-Shim for Aliyun FC (Function Compute)
3 |
4 | A scaffold for porting Rust functions to FC.
5 |
6 |
7 | ### Code Structure
8 |
9 | - The `bootstrap` is a package that handles the FC protocol and relays the input and output of your function.
10 | - You have to wrap your function in a Rust package named `handler`, which exposes a function with the signature `fn handle(String) -> String` (UTF-8 encoded).
11 | - Your function package is supposed to be put in a directory called `function` and side by side with the `bootstrap` for building.
12 |
13 | ```
14 | ├── function
15 | └── bootstrap
16 | ```
17 |
18 |
--------------------------------------------------------------------------------
/log-processing/runtime/bootstrap/.cargo/config.toml:
--------------------------------------------------------------------------------
1 | [source.crates-io]
2 | replace-with = 'tuna'
3 |
4 | [source.tuna]
5 | registry = "https://mirrors.tuna.tsinghua.edu.cn/git/crates.io-index.git"
6 |
7 | [source.rustcc]
8 | registry="git://crates.rustcc.com/crates.io-index"
9 |
--------------------------------------------------------------------------------
/log-processing/runtime/bootstrap/Cargo.toml:
--------------------------------------------------------------------------------
1 | [package]
2 | name = "bootstrap"
3 | version = "0.1.0"
4 | edition = "2018"
5 |
6 |
7 | [dependencies]
8 | tokio = { version = "0.2", features = ["full"] }
9 | warp = "0.2"
10 | bytes = "0.5"
11 | handler = { path = "../function" }
12 |
13 | [source]
14 |
15 | [source.crates-io]
16 | replace-with = 'ustc'
17 |
18 | [source.ustc]
19 | registry = "git://mirrors.ustc.edu.cn/crates.io-index"
20 |
--------------------------------------------------------------------------------
/log-processing/runtime/bootstrap/src/main.rs:
--------------------------------------------------------------------------------
1 | use warp::Filter;
2 |
3 | extern crate handler;
4 |
5 |
6 | #[tokio::main]
7 | async fn main() {
8 | // POST /invoke
9 | let invoke = warp::path!("invoke")
10 | .and(warp::post())
11 | .and(warp::body::bytes())
12 | .map(|body: bytes::Bytes| {
13 | match String::from_utf8((&body[..]).to_vec()) {
14 | Err(error) => error.to_string(),
15 | Ok(s) => handler::handle(s)
16 | }
17 | });
18 |
19 | // POST /2016-08-15/proxy///
20 | let http_invoke = warp::path!("2016-08-15" / "proxy")
21 | .and(warp::post())
22 | .and(warp::path::param())
23 | .and(warp::path::param())
24 | .and(warp::body::bytes())
25 | .map(|service: String, function: String, body: bytes::Bytes| {
26 | match String::from_utf8((&body[..]).to_vec()) {
27 | Err(error) => error.to_string(),
28 | Ok(s) => handler::handle(s)
29 | }
30 | });
31 |
32 | // POST /*
33 | let wildcard = warp::post()
34 | .and(warp::path::tail())
35 | .and(warp::body::bytes())
36 | .map(|tail: warp::filters::path::Tail, body: bytes::Bytes| {
37 | match String::from_utf8((&body[..]).to_vec()) {
38 | Err(error) => error.to_string(),
39 | Ok(s) => handler::handle(s)
40 | }
41 | });
42 |
43 | let routes = invoke
44 | .or(http_invoke)
45 | .or(wildcard);
46 | println!("listening on 0.0.0.0:9000");
47 | warp::serve(routes)
48 | .run(([0, 0, 0, 0], 9000))
49 | .await;
50 | }
51 |
--------------------------------------------------------------------------------
/log-processing/runtime/build.Dockerfile:
--------------------------------------------------------------------------------
1 | FROM aliyunfc/runtime-custom:build
2 |
3 | ENV RUSTUP_HOME=/usr/local/rustup \
4 | CARGO_HOME=/usr/local/cargo \
5 | PATH=/usr/local/cargo/bin:$PATH \
6 | RUST_VERSION=1.45.2
7 |
8 | RUN set -eux; \
9 | echo 'deb http://mirrors.aliyun.com/debian/ stretch main non-free contrib' > /etc/apt/sources.list ; \
10 | echo 'deb-src http://mirrors.aliyun.com/debian/ stretch main non-free contrib' >> /etc/apt/sources.list ; \
11 | echo 'deb http://mirrors.aliyun.com/debian-security stretch/updates main' >> /etc/apt/sources.list ; \
12 | echo 'deb-src http://mirrors.aliyun.com/debian-security stretch/updates main' >> /etc/apt/sources.list ; \
13 | echo 'deb http://mirrors.aliyun.com/debian/ stretch-updates main non-free contrib' >> /etc/apt/sources.list ; \
14 | echo 'deb-src http://mirrors.aliyun.com/debian/ stretch-updates main non-free contrib' >> /etc/apt/sources.list ; \
15 | echo 'deb http://mirrors.aliyun.com/debian/ stretch-backports main non-free contrib' >> /etc/apt/sources.list ; \
16 | echo 'deb-src http://mirrors.aliyun.com/debian/ stretch-backports main non-free contrib' >> /etc/apt/sources.list ; \
17 | apt-get update; \
18 | apt-get install -y --no-install-recommends \
19 | ca-certificates \
20 | gcc \
21 | libc6-dev \
22 | wget \
23 | libssl-dev \
24 | pkg-config \
25 | ; \
26 | curl https://cdn.jsdelivr.net/gh/rust-lang-nursery/rustup.rs/rustup-init.sh > rustup-init ; \
27 | chmod +x rustup-init ; \
28 | RUSTUP_UPDATE_ROOT=https://mirrors.ustc.edu.cn/rust-static/rustup ./rustup-init -y --no-modify-path --profile minimal --default-toolchain $RUST_VERSION; \
29 | rm rustup-init; \
30 | chmod -R a+w $RUSTUP_HOME $CARGO_HOME; \
31 | rustup --version; \
32 | cargo --version; \
33 | rustc --version; \
34 | apt-get remove -y --auto-remove \
35 | wget \
36 | ; \
37 | rm -rf /var/lib/apt/lists/*;
38 |
39 |
--------------------------------------------------------------------------------
/media-processing/README.md:
--------------------------------------------------------------------------------
1 |
2 | ## Media Processing Application
3 |
4 | This application processes the media content uploaded by the users.
5 | - The [Convert Audio](#convert-audio-function) function converts the format of a media file.
6 | - The [Get Media Meta](#get-media-meta-function) function extracts the meta information from a media file.
7 |
8 | ### Prerequisite
9 |
10 | Both functions access [OSS storage](https://www.alibabacloud.com/product/object-storage-service) (an Alibaba-cloud equivalent of S3). You have to create an OSS bucket and upload some videos to it. (`video-processing/data/video.mp4` is an example.)
11 |
12 |
13 | ### Convert Audio Function
14 |
15 | To deploy the function,
16 |
17 | ```
18 | cd faas-scheduling-benchmark/media-processing
19 | fun deploy -y convert-audio
20 | ```
21 |
22 | To invoke the function,
23 |
24 | ```
25 | # Replace all bracketed placeholders "" with your choice.
26 | # - The can be found in the result printed out by `fun deploy`.
27 | # - The "object_url" is where you have uploaded the video file.
28 | curl -s -i http:///2016-08-15/proxy/media-processing/convert-audio/ \
29 | -X POST \
30 | -d "{ \"object_url\": \"https://.oss-.aliyuncs.com/\",
31 | \"output_bucket\": \"\",
32 | \"output_prefix\": \"\",
33 | \"dst_type\": \"wav\",
34 | \"params\": \"-ac 1 -ar 4000\"
35 | }"
36 | ```
37 |
38 | The function converts an audio file into the WAV format and writes it to `//`.
39 |
40 | ### Get Media Meta Function
41 |
42 | To deploy the function,
43 |
44 | ```
45 | cd faas-scheduling-benchmark/media-processing
46 | fun deploy -y get-media-meta
47 | ```
48 |
49 | To invoke the function,
50 |
51 | ```
52 | curl -s -i http:///2016-08-15/proxy/media-processing/get-media-meta/ \
53 | -X POST \
54 | -d "{ \"object_url\": \"https://.oss-.aliyuncs.com/\" }"
55 |
--------------------------------------------------------------------------------
/media-processing/convert-audio/ffmpeg:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/All-less/faas-scheduling-benchmark/84ca34c6fd85ab10b497abfb584a4c7cb38801b5/media-processing/convert-audio/ffmpeg
--------------------------------------------------------------------------------
/media-processing/convert-audio/index.py:
--------------------------------------------------------------------------------
1 | # -*- coding: utf-8 -*-
2 | import subprocess
3 | import oss2
4 | import logging
5 | import json
6 | import os
7 | import time
8 |
9 | logging.getLogger("oss2.api").setLevel(logging.ERROR)
10 | logging.getLogger("oss2.auth").setLevel(logging.ERROR)
11 |
12 | LOGGER = logging.getLogger()
13 |
14 | '''
15 | 1. function and bucket locate in same region
16 | 2. service's role has OSSFullAccess
17 | 3. event format
18 | {
19 | "object_url": "http://xxxx.xxx/xxx.mp3",
20 | "output_bucket": "test",
21 | "output_prefix": "",
22 | "dst_type": "wav",
23 | "params": "-ac 1 -ar 4000"
24 |
25 | "bucket_name" : "test-bucket",
26 | "object_key" : "a.mp3",
27 | "output_dir" : "output/",
28 | "dst_type": ".wav",
29 | "ac": 1,
30 | "ar": 4000
31 | }
32 | '''
33 |
34 | def handler(env, start_resp):
35 | context = env['fc.context']
36 | size = int(env.get('CONTENT_LENGTH', 0))
37 | body = json.loads(env['wsgi.input'].read(size))
38 |
39 | object_url = body['object_url']
40 | name = object_url.split('/')[-1].split('.')[0]
41 | dst_type = body['dst_type']
42 | path = f'/tmp/{name}.{dst_type}'
43 |
44 | cmd = ['/code/ffmpeg', '-i', object_url] + \
45 | body['params'].split(' ') + [path]
46 | try:
47 | subprocess.run(
48 | cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE, check=True)
49 | except subprocess.CalledProcessError as e:
50 | start_resp('200 OK', [('Content-type', 'text/plain')])
51 | return [ json.dumps({
52 | 'code': e.returncode,
53 | 'cmd': e.cmd,
54 | 'stdout': e.stdout.decode('utf-8') if e.stdout is not None else '',
55 | 'stderr': e.stderr.decode('utf-8') if e.stderr is not None else ''
56 | }).encode('utf-8') ]
57 |
58 | creds = context.credentials
59 | auth = oss2.StsAuth(creds.accessKeyId,
60 | creds.accessKeySecret, creds.securityToken)
61 | oss_client = oss2.Bucket(auth,
62 | 'oss-%s-internal.aliyuncs.com' % context.region, body['output_bucket'])
63 |
64 | creds = context.credentials
65 | auth = oss2.StsAuth(creds.accessKeyId,
66 | creds.accessKeySecret, creds.securityToken)
67 | oss_client = oss2.Bucket(
68 | auth, 'oss-%s-internal.aliyuncs.com' % context.region, body['output_bucket'])
69 |
70 | key = os.path.join(body['output_prefix'], f'{name}.{dst_type}')
71 | oss_client.put_object_from_file(key, path)
72 | os.remove(path)
73 |
74 | start_resp('200 OK', [('Content-type', 'text/plain')])
75 | return [ 'ok'.encode('utf-8') ]
76 |
--------------------------------------------------------------------------------
/media-processing/data/video.mp4:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/All-less/faas-scheduling-benchmark/84ca34c6fd85ab10b497abfb584a4c7cb38801b5/media-processing/data/video.mp4
--------------------------------------------------------------------------------
/media-processing/get-media-meta/ffprobe:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/All-less/faas-scheduling-benchmark/84ca34c6fd85ab10b497abfb584a4c7cb38801b5/media-processing/get-media-meta/ffprobe
--------------------------------------------------------------------------------
/media-processing/get-media-meta/index.py:
--------------------------------------------------------------------------------
1 | # -*- coding: utf-8 -*-
2 | import subprocess
3 | import oss2
4 | import logging
5 | import json
6 | import os
7 | import time
8 |
9 | logging.getLogger("oss2.api").setLevel(logging.ERROR)
10 | logging.getLogger("oss2.auth").setLevel(logging.ERROR)
11 |
12 | LOGGER = logging.getLogger()
13 |
14 | '''
15 | 1. function and bucket locate in same region
16 | 2. service's role has OSSReadAccess
17 | 3. event format
18 | {
19 | "object_url": "http://xxxx.xxx/xxx.mp4"
20 | }
21 | '''
22 |
23 | # a decorator for print the excute time of a function
24 | def print_excute_time(func):
25 | def wrapper(*args, **kwargs):
26 | local_time = time.time()
27 | ret = func(*args, **kwargs)
28 | LOGGER.info('current Function [%s] excute time is %.2f seconds' %
29 | (func.__name__, time.time() - local_time))
30 | return ret
31 | return wrapper
32 |
33 |
34 | @print_excute_time
35 | def handler(env, start_resp):
36 | size = int(env.get('CONTENT_LENGTH', 0))
37 | evt = json.loads(env['wsgi.input'].read(size))
38 | object_url = evt["object_url"]
39 |
40 | cmd = ["/code/ffprobe", "-v", "quiet", "-show_format", "-show_streams",
41 | "-print_format", "json", "-i", object_url]
42 | try:
43 | raw_result = subprocess.check_output(cmd)
44 | start_resp('200 OK', [('Content-type', 'text/plain')])
45 | return [ raw_result ]
46 | except subprocess.CalledProcessError as e:
47 | start_resp('200 OK', [('Content-type', 'text/plain')])
48 | return [ json.dumps({
49 | 'code': e.returncode,
50 | 'cmd': e.cmd,
51 | 'stdout': e.stdout.decode('utf-8') if e.stdout is not None else '',
52 | 'stderr': e.stderr.decode('utf-8') if e.stderr is not None else ''
53 | }).encode('utf-8') ]
54 |
--------------------------------------------------------------------------------
/media-processing/template.yml:
--------------------------------------------------------------------------------
1 | ROSTemplateFormatVersion: '2015-09-01'
2 | Transform: 'Aliyun::Serverless-2018-04-03'
3 | Resources:
4 | media-processing:
5 | Type: 'Aliyun::Serverless::Service'
6 | Properties:
7 | Description: Scenarios that can be solved by OSS + FC
8 | Policies:
9 | - AliyunOSSFullAccess
10 | - AliyunFCFullAccess
11 | InternetAccess: true
12 |
13 | get-media-meta:
14 | Type: 'Aliyun::Serverless::Function'
15 | Properties:
16 | Handler: index.handler
17 | Runtime: python3
18 | Timeout: 600
19 | MemorySize: 256
20 | CodeUri: ./get-media-meta
21 | Events:
22 | http-trigger:
23 | Type: HTTP
24 | Properties:
25 | AuthType: ANONYMOUS
26 | Methods: ['GET', 'POST', 'PUT']
27 |
28 | convert-audio:
29 | Type: 'Aliyun::Serverless::Function'
30 | Properties:
31 | Handler: index.handler
32 | Runtime: python3
33 | Timeout: 600
34 | MemorySize: 256
35 | CodeUri: ./convert-audio
36 | Events:
37 | http-trigger:
38 | Type: HTTP
39 | Properties:
40 | AuthType: ANONYMOUS
41 | Methods: ['GET', 'POST', 'PUT']
42 |
--------------------------------------------------------------------------------
/smart-manufacturing/README.md:
--------------------------------------------------------------------------------
1 |
2 | ## Smart Manufacturing Application
3 |
4 | This application mimics a factory that processes its sensor data and displays the anomaly on monitors.
5 | - The [Ingest Data](#ingest-data-function) function ingests the data generated from the sensor network.
6 | - The [Detect Anomaly](#detect-anomaly-function) function detects the anomaly patterns in the sensor data.
7 |
8 | ### Prerequisite
9 |
10 | 1. Both functions are deployed as a [custom container](https://www.alibabacloud.com/help/en/function-compute/latest/create-a-function) function. It means we will not use the default language runtime provided by FaaS and build our own runtime container instead. To build them, we have to prepare two auxiliary images.
11 |
12 | ```
13 | # this image creates a build environment for CPP
14 | docker build -f build.Dockerfile -t localhost/smart-manufacturing/build-env:latest .
15 |
16 | # this image is the base for the custom container to run a CPP binary
17 | docker build -f runtime.Dockerfile -t localhost/smart-manufacturing/runtime-env:latest .
18 | ```
19 |
20 | 2. Both functions depend on an external [MySQL database](https://www.mysql.com/). After deploying it, edit the `MYSQL_ADDRESS`, `MYSQL_USERNAME` and `MYSQL_PASSWORD` entries in both `{detect-anomaly,ingest-data}/template.yml`. Then execute the statements in `create_tb.sql` to create the required tables.
21 |
22 | ### Ingest Data Function
23 |
24 | To deploy the function,
25 |
26 | ```
27 | cd faas-scheduling-benchmark/smart-manufacturing/ingest-data
28 | docker build -t /: .
29 |
30 | # update `ingest-data.Properties.CustomContainerConfig.Image` entry with the image name that you have just chosen
31 | vim template.yaml
32 |
33 | fun deploy -y
34 | ```
35 |
36 | To invoke the function,
37 |
38 | ```
39 | # The can be found in the result printed out by `fun deploy`.
40 | curl -s -i http:///2016-08-15/proxy/smart-manufacturing/ingest-data/ \
41 | -X POST \
42 | -d "{\"time_arrive\":\"19/07/2008 11:55:00\",\"pass\":-1,\"data\":\"3030.93 2564 2187.7333 1411.1265 1.3602 100 97.6133 0.1242 1.5005 0.01 62 -0.0034 0.9455 202.4396 0 7.9558 414.871 10.0433 0.968 192.3963 12.519 1.4026 -5419 2916.5 -4043.75 751 0.8955 1.773 3.049 64.2333 2.0222 0.1632 3.5191 83.3971 9.5126 50.617 64.2588 49.383 66.3141 86.9555 117.5132 61.29 4.515 70 352.7173 10.1841 130.3691 723.3092 1.3072 141.2282 1 624.3145 218.3174 0 4.592 4.841 2834 0.9317 0.9484 4.7057 -1.7264 350.9264 10.6231 108.6427 16.1445 21.7264 29.5367 693.7724 0.9226 148.6009 1 608.17 84.0793 NaN NaN 0 0.0126 -0.0206 0.0141 -0.0307 -0.0083 -0.0026 -0.0567 -0.0044 7.2163 0.132 NaN 2.3895 0.969 1747.6049 0.1841 8671.9301 -0.3 274 -0.0055 -0.0001 0.0001 0.0003 -0.2786 0 0.3974 -0.0251 0.0002 0.0002 0.135 -0.0042 0.0003 0.0056 0 -0.2468 0.3196 NaN NaN NaN NaN 0.946 0 748.6115 0.9908 58.4306 0.6002 0.9804 6.3788 15.88 2.639 15.94 15.93 0.8656 3.353 0.4098 3.188 -0.0473 0.7243 0.996 2.2967 1000.7263 39.2373 123 111.3 75.2 46.2 350.671 0.3948 0 6.78 0.0034 0.0898 0.085 0.0358 0.0328 12.2566 0 4.271 10.284 0. 4734 0.0167 11.8901 0.41 0.0506 NaN NaN 1017 967 1066 368 0.09 0.048 0.095 2 0.9 0.069 0.046 0.725 0.1139 0.3183 0.5888 0.3184 0.9499 0.3979 0.16 0 0 20.95 0.333 12.49 16.713 0.0803 5.72 0 11. 19 65.363 0 0 0 0 0 0 0.292 5.38 20.1 0.296 10.62 10.3 5.38 4.04 16.23 0.2951 8.64 0 10.3 97.314 0 0.0772 0.0599 0.07 0.0547 0.0704 0.052 0.0301 0.1135 3.4789 0.001 NaN 0.0707 0.0211 175.2173 0.0315 1940.3994 0 0.0744 0.0546 0 0 0 0 0 0 0 0 0 0.0027 0.004 0 0 0 0 NaN NaN NaN NaN 0.0188 0 219.9453 0.0011 2.8374 0.0189 0.005 0.4269 0 0 0 0 0 0 0 0 0 0 0 0.0472 40.855 4.5152 30.9815 3 3.9606 22.9057 15.9525 110.2144 0.131 0 2.5883 0.001 0.0319 0.0197 0.012 0.0109 3.9321 0 1.5123 3.5811 0.1337 0.0055 3.8447 0.1077 0.0167 NaN NaN 418.1363 398.3185 496.1582 158.333 0.0373 0.02 02 0.0462 0.6083 0.3032 0.02 0.0174 0.2827 0.0434 0.1342 0.2419 0.1343 0.367 0.1431 0.061 0 0 0 6.2698 0.1181 3.8208 5.3737 0.0254 1.6252 0 3.2461 18.0118 0 0 0 0 0 0 0.0752 1.5989 6.5893 0.09 13 3.0911 8.4654 1.5989 1.2293 5.3406 0.0867 2.8551 0 2.9971 31.8843 NaN NaN 0 0.0215 0.0274 0.0315 0.0238 0.0206 0.0238 0.0144 0.0491 1.2708 0.0004 NaN 0.0229 0.0065 55.2039 0.0105 560.2658 0 0.017 0.0148 0.0124 0.0114 0 0 0 0 0 0 0 0.001 0.0013 0 0 0 0 NaN NaN NaN NaN 0.0055 0 61.5932 0.0003 0.9967 0.0082 0.0017 0.1437 0 0 0 0 0 0 0 0 0 0 0 0.0151 14.2396 1.4392 5.6188 3.6721 2.9 329 2.1118 24.8504 29.0271 0 6.9458 2.738 5.9846 525.0965 0 3.4641 6.0544 0 53.684 2.4788 4.7141 1.7275 6.18 3.275 3.6084 18.7673 33.1562 26.3617 49.0013 10.0503 2.7073 3.1158 3.1136 44.5055 4 2.2737 1.3071 0.8693 1.1975 0.6288 0.9163 0.6448 1.4324 0.4576 0.1362 0 0 0 5.9396 3.2698 9.5805 2.3106 6.1463 4.0502 0 1.7924 29.9394 0 0 0 0 0 0 6.2052 311.6377 5.7277 2.7864 9.7752 63.7987 24.7625 13.6778 2.3394 31.9893 5.8142 0 1.6936 115.7408 0 613.3069 291.4842 494.6996 178.1759 843.1138 0 53.1098 0 48.2091 0.7578 NaN 2.957 2.1739 10.0261 17.1202 22.3756 0 0 0 0 0 0 0 0 0 0 0 0 64.6707 0 0 0 0 0 NaN NaN NaN NaN 1.9864 0 29.3804 0.1094 4.856 3.1406 0.5064 6.6926 0 0 0 0 0 0 0 0 0 0 0 2.057 4.0825 11.5074 0.1096 0.0078 0.0026 7.116 1.0616 395.57 75.752 0.4234 12.93 0.78 0.1827 5.7349 0.3363 39.8842 3.2687 1.0297 1.0344 0.4385 0.1039 42.3877 NaN NaN NaN NaN NaN NaN NaN NaN 533.85 2.1113 8.95 0.3157 3.0624 0.1026 1.6765 14.9509 NaN NaN NaN NaN 0.5005 0.011 8 0.0035 2.363 NaN NaN NaN NaN\"}"
43 | ```
44 |
45 | ### Detect Anomaly Function
46 |
47 | To deploy the function,
48 |
49 | ```
50 | cd faas-scheduling-benchmark/smart-manufacturing/detect-anomaly
51 | docker build -t /: .
52 |
53 | # update `detect-anomaly.Properties.CustomContainerConfig.Image` entry with the image name that you have just chosen
54 | vim template.yaml
55 |
56 | fun deploy -y
57 | ```
58 |
59 | To invoke the function,
60 |
61 | ```
62 | # The can be found in the result printed out by `fun deploy`.
63 | curl -s -i http:///2016-08-15/proxy/smart-manufacturing/detect-anomaly/ \
64 | -X POST \
65 | -d "{\"time_arrive\":\"19/07/2008 11:55:00\",\"pass\":-1,\"data\":\"3030.93 2564 2187.7333 1411.1265 1.3602 100 97.6133 0.1242 1.5005 0.01 62 -0.0034 0.9455 202.4396 0 7.9558 414.871 10.0433 0.968 192.3963 12.519 1.4026 -5419 2916.5 -4043.75 751 0.8955 1.773 3.049 64.2333 2.0222 0.1632 3.5191 83.3971 9.5126 50.617 64.2588 49.383 66.3141 86.9555 117.5132 61.29 4.515 70 352.7173 10.1841 130.3691 723.3092 1.3072 141.2282 1 624.3145 218.3174 0 4.592 4.841 2834 0.9317 0.9484 4.7057 -1.7264 350.9264 10.6231 108.6427 16.1445 21.7264 29.5367 693.7724 0.9226 148.6009 1 608.17 84.0793 NaN NaN 0 0.0126 -0.0206 0.0141 -0.0307 -0.0083 -0.0026 -0.0567 -0.0044 7.2163 0.132 NaN 2.3895 0.969 1747.6049 0.1841 8671.9301 -0.3 274 -0.0055 -0.0001 0.0001 0.0003 -0.2786 0 0.3974 -0.0251 0.0002 0.0002 0.135 -0.0042 0.0003 0.0056 0 -0.2468 0.3196 NaN NaN NaN NaN 0.946 0 748.6115 0.9908 58.4306 0.6002 0.9804 6.3788 15.88 2.639 15.94 15.93 0.8656 3.353 0.4098 3.188 -0.0473 0.7243 0.996 2.2967 1000.7263 39.2373 123 111.3 75.2 46.2 350.671 0.3948 0 6.78 0.0034 0.0898 0.085 0.0358 0.0328 12.2566 0 4.271 10.284 0. 4734 0.0167 11.8901 0.41 0.0506 NaN NaN 1017 967 1066 368 0.09 0.048 0.095 2 0.9 0.069 0.046 0.725 0.1139 0.3183 0.5888 0.3184 0.9499 0.3979 0.16 0 0 20.95 0.333 12.49 16.713 0.0803 5.72 0 11. 19 65.363 0 0 0 0 0 0 0.292 5.38 20.1 0.296 10.62 10.3 5.38 4.04 16.23 0.2951 8.64 0 10.3 97.314 0 0.0772 0.0599 0.07 0.0547 0.0704 0.052 0.0301 0.1135 3.4789 0.001 NaN 0.0707 0.0211 175.2173 0.0315 1940.3994 0 0.0744 0.0546 0 0 0 0 0 0 0 0 0 0.0027 0.004 0 0 0 0 NaN NaN NaN NaN 0.0188 0 219.9453 0.0011 2.8374 0.0189 0.005 0.4269 0 0 0 0 0 0 0 0 0 0 0 0.0472 40.855 4.5152 30.9815 3 3.9606 22.9057 15.9525 110.2144 0.131 0 2.5883 0.001 0.0319 0.0197 0.012 0.0109 3.9321 0 1.5123 3.5811 0.1337 0.0055 3.8447 0.1077 0.0167 NaN NaN 418.1363 398.3185 496.1582 158.333 0.0373 0.02 02 0.0462 0.6083 0.3032 0.02 0.0174 0.2827 0.0434 0.1342 0.2419 0.1343 0.367 0.1431 0.061 0 0 0 6.2698 0.1181 3.8208 5.3737 0.0254 1.6252 0 3.2461 18.0118 0 0 0 0 0 0 0.0752 1.5989 6.5893 0.09 13 3.0911 8.4654 1.5989 1.2293 5.3406 0.0867 2.8551 0 2.9971 31.8843 NaN NaN 0 0.0215 0.0274 0.0315 0.0238 0.0206 0.0238 0.0144 0.0491 1.2708 0.0004 NaN 0.0229 0.0065 55.2039 0.0105 560.2658 0 0.017 0.0148 0.0124 0.0114 0 0 0 0 0 0 0 0.001 0.0013 0 0 0 0 NaN NaN NaN NaN 0.0055 0 61.5932 0.0003 0.9967 0.0082 0.0017 0.1437 0 0 0 0 0 0 0 0 0 0 0 0.0151 14.2396 1.4392 5.6188 3.6721 2.9 329 2.1118 24.8504 29.0271 0 6.9458 2.738 5.9846 525.0965 0 3.4641 6.0544 0 53.684 2.4788 4.7141 1.7275 6.18 3.275 3.6084 18.7673 33.1562 26.3617 49.0013 10.0503 2.7073 3.1158 3.1136 44.5055 4 2.2737 1.3071 0.8693 1.1975 0.6288 0.9163 0.6448 1.4324 0.4576 0.1362 0 0 0 5.9396 3.2698 9.5805 2.3106 6.1463 4.0502 0 1.7924 29.9394 0 0 0 0 0 0 6.2052 311.6377 5.7277 2.7864 9.7752 63.7987 24.7625 13.6778 2.3394 31.9893 5.8142 0 1.6936 115.7408 0 613.3069 291.4842 494.6996 178.1759 843.1138 0 53.1098 0 48.2091 0.7578 NaN 2.957 2.1739 10.0261 17.1202 22.3756 0 0 0 0 0 0 0 0 0 0 0 0 64.6707 0 0 0 0 0 NaN NaN NaN NaN 1.9864 0 29.3804 0.1094 4.856 3.1406 0.5064 6.6926 0 0 0 0 0 0 0 0 0 0 0 2.057 4.0825 11.5074 0.1096 0.0078 0.0026 7.116 1.0616 395.57 75.752 0.4234 12.93 0.78 0.1827 5.7349 0.3363 39.8842 3.2687 1.0297 1.0344 0.4385 0.1039 42.3877 NaN NaN NaN NaN NaN NaN NaN NaN 533.85 2.1113 8.95 0.3157 3.0624 0.1026 1.6765 14.9509 NaN NaN NaN NaN 0.5005 0.011 8 0.0035 2.363 NaN NaN NaN NaN\"}"
66 | ```
67 |
--------------------------------------------------------------------------------
/smart-manufacturing/build.Dockerfile:
--------------------------------------------------------------------------------
1 | FROM ubuntu:bionic
2 |
3 | WORKDIR /opt
4 |
5 | RUN apt-get update -y && \
6 | apt-get install -yq git build-essential tar curl zip unzip cmake autoconf libtool pkg-config libmysqlcppconn-dev libboost-all-dev && \
7 | git clone https://github.com/microsoft/vcpkg && \
8 | ./vcpkg/bootstrap-vcpkg.sh && \
9 | ./vcpkg/vcpkg install http-parser fmt restinio
10 |
11 |
--------------------------------------------------------------------------------
/smart-manufacturing/create_tb.sql:
--------------------------------------------------------------------------------
1 | USE db;
2 |
3 | -- remove newlines when pasting to MySQL shell
4 | CREATE TABLE IF NOT EXISTS sensor_data(batch_id INT AUTO_INCREMENT PRIMARY KEY, pass INT, arrived_at DATETIME,
5 | s0 FLOAT, s1 FLOAT, s2 FLOAT, s3 FLOAT, s4 FLOAT, s5 FLOAT, s6 FLOAT, s7 FLOAT, s8 FLOAT, s9 FLOAT, s10 FLOAT,
6 | s11 FLOAT, s12 FLOAT, s13 FLOAT, s14 FLOAT, s15 FLOAT, s16 FLOAT, s17 FLOAT, s18 FLOAT, s19 FLOAT, s20 FLOAT,
7 | s21 FLOAT, s22 FLOAT, s23 FLOAT, s24 FLOAT, s25 FLOAT, s26 FLOAT, s27 FLOAT, s28 FLOAT, s29 FLOAT, s30 FLOAT,
8 | s31 FLOAT, s32 FLOAT, s33 FLOAT, s34 FLOAT, s35 FLOAT, s36 FLOAT, s37 FLOAT, s38 FLOAT, s39 FLOAT, s40 FLOAT,
9 | s41 FLOAT, s42 FLOAT, s43 FLOAT, s44 FLOAT, s45 FLOAT, s46 FLOAT, s47 FLOAT, s48 FLOAT, s49 FLOAT, s50 FLOAT,
10 | s51 FLOAT, s52 FLOAT, s53 FLOAT, s54 FLOAT, s55 FLOAT, s56 FLOAT, s57 FLOAT, s58 FLOAT, s59 FLOAT, s60 FLOAT,
11 | s61 FLOAT, s62 FLOAT, s63 FLOAT, s64 FLOAT, s65 FLOAT, s66 FLOAT, s67 FLOAT, s68 FLOAT, s69 FLOAT, s70 FLOAT,
12 | s71 FLOAT, s72 FLOAT, s73 FLOAT, s74 FLOAT, s75 FLOAT, s76 FLOAT, s77 FLOAT, s78 FLOAT, s79 FLOAT, s80 FLOAT,
13 | s81 FLOAT, s82 FLOAT, s83 FLOAT, s84 FLOAT, s85 FLOAT, s86 FLOAT, s87 FLOAT, s88 FLOAT, s89 FLOAT, s90 FLOAT,
14 | s91 FLOAT, s92 FLOAT, s93 FLOAT, s94 FLOAT, s95 FLOAT, s96 FLOAT, s97 FLOAT, s98 FLOAT, s99 FLOAT, s100 FLOAT,
15 | s101 FLOAT, s102 FLOAT, s103 FLOAT, s104 FLOAT, s105 FLOAT, s106 FLOAT, s107 FLOAT, s108 FLOAT, s109 FLOAT, s110 FLOAT,
16 | s111 FLOAT, s112 FLOAT, s113 FLOAT, s114 FLOAT, s115 FLOAT, s116 FLOAT, s117 FLOAT, s118 FLOAT, s119 FLOAT, s120 FLOAT,
17 | s121 FLOAT, s122 FLOAT, s123 FLOAT, s124 FLOAT, s125 FLOAT, s126 FLOAT, s127 FLOAT, s128 FLOAT, s129 FLOAT, s130 FLOAT,
18 | s131 FLOAT, s132 FLOAT, s133 FLOAT, s134 FLOAT, s135 FLOAT, s136 FLOAT, s137 FLOAT, s138 FLOAT, s139 FLOAT, s140 FLOAT,
19 | s141 FLOAT, s142 FLOAT, s143 FLOAT, s144 FLOAT, s145 FLOAT, s146 FLOAT, s147 FLOAT, s148 FLOAT, s149 FLOAT, s150 FLOAT,
20 | s151 FLOAT, s152 FLOAT, s153 FLOAT, s154 FLOAT, s155 FLOAT, s156 FLOAT, s157 FLOAT, s158 FLOAT, s159 FLOAT, s160 FLOAT,
21 | s161 FLOAT, s162 FLOAT, s163 FLOAT, s164 FLOAT, s165 FLOAT, s166 FLOAT, s167 FLOAT, s168 FLOAT, s169 FLOAT, s170 FLOAT,
22 | s171 FLOAT, s172 FLOAT, s173 FLOAT, s174 FLOAT, s175 FLOAT, s176 FLOAT, s177 FLOAT, s178 FLOAT, s179 FLOAT, s180 FLOAT,
23 | s181 FLOAT, s182 FLOAT, s183 FLOAT, s184 FLOAT, s185 FLOAT, s186 FLOAT, s187 FLOAT, s188 FLOAT, s189 FLOAT, s190 FLOAT,
24 | s191 FLOAT, s192 FLOAT, s193 FLOAT, s194 FLOAT, s195 FLOAT, s196 FLOAT, s197 FLOAT, s198 FLOAT, s199 FLOAT, s200 FLOAT,
25 | s201 FLOAT, s202 FLOAT, s203 FLOAT, s204 FLOAT, s205 FLOAT, s206 FLOAT, s207 FLOAT, s208 FLOAT, s209 FLOAT, s210 FLOAT,
26 | s211 FLOAT, s212 FLOAT, s213 FLOAT, s214 FLOAT, s215 FLOAT, s216 FLOAT, s217 FLOAT, s218 FLOAT, s219 FLOAT, s220 FLOAT,
27 | s221 FLOAT, s222 FLOAT, s223 FLOAT, s224 FLOAT, s225 FLOAT, s226 FLOAT, s227 FLOAT, s228 FLOAT, s229 FLOAT, s230 FLOAT,
28 | s231 FLOAT, s232 FLOAT, s233 FLOAT, s234 FLOAT, s235 FLOAT, s236 FLOAT, s237 FLOAT, s238 FLOAT, s239 FLOAT, s240 FLOAT,
29 | s241 FLOAT, s242 FLOAT, s243 FLOAT, s244 FLOAT, s245 FLOAT, s246 FLOAT, s247 FLOAT, s248 FLOAT, s249 FLOAT, s250 FLOAT,
30 | s251 FLOAT, s252 FLOAT, s253 FLOAT, s254 FLOAT, s255 FLOAT, s256 FLOAT, s257 FLOAT, s258 FLOAT, s259 FLOAT, s260 FLOAT,
31 | s261 FLOAT, s262 FLOAT, s263 FLOAT, s264 FLOAT, s265 FLOAT, s266 FLOAT, s267 FLOAT, s268 FLOAT, s269 FLOAT, s270 FLOAT,
32 | s271 FLOAT, s272 FLOAT, s273 FLOAT, s274 FLOAT, s275 FLOAT, s276 FLOAT, s277 FLOAT, s278 FLOAT, s279 FLOAT, s280 FLOAT,
33 | s281 FLOAT, s282 FLOAT, s283 FLOAT, s284 FLOAT, s285 FLOAT, s286 FLOAT, s287 FLOAT, s288 FLOAT, s289 FLOAT, s290 FLOAT,
34 | s291 FLOAT, s292 FLOAT, s293 FLOAT, s294 FLOAT, s295 FLOAT, s296 FLOAT, s297 FLOAT, s298 FLOAT, s299 FLOAT, s300 FLOAT,
35 | s301 FLOAT, s302 FLOAT, s303 FLOAT, s304 FLOAT, s305 FLOAT, s306 FLOAT, s307 FLOAT, s308 FLOAT, s309 FLOAT, s310 FLOAT,
36 | s311 FLOAT, s312 FLOAT, s313 FLOAT, s314 FLOAT, s315 FLOAT, s316 FLOAT, s317 FLOAT, s318 FLOAT, s319 FLOAT, s320 FLOAT,
37 | s321 FLOAT, s322 FLOAT, s323 FLOAT, s324 FLOAT, s325 FLOAT, s326 FLOAT, s327 FLOAT, s328 FLOAT, s329 FLOAT, s330 FLOAT,
38 | s331 FLOAT, s332 FLOAT, s333 FLOAT, s334 FLOAT, s335 FLOAT, s336 FLOAT, s337 FLOAT, s338 FLOAT, s339 FLOAT, s340 FLOAT,
39 | s341 FLOAT, s342 FLOAT, s343 FLOAT, s344 FLOAT, s345 FLOAT, s346 FLOAT, s347 FLOAT, s348 FLOAT, s349 FLOAT, s350 FLOAT,
40 | s351 FLOAT, s352 FLOAT, s353 FLOAT, s354 FLOAT, s355 FLOAT, s356 FLOAT, s357 FLOAT, s358 FLOAT, s359 FLOAT, s360 FLOAT,
41 | s361 FLOAT, s362 FLOAT, s363 FLOAT, s364 FLOAT, s365 FLOAT, s366 FLOAT, s367 FLOAT, s368 FLOAT, s369 FLOAT, s370 FLOAT,
42 | s371 FLOAT, s372 FLOAT, s373 FLOAT, s374 FLOAT, s375 FLOAT, s376 FLOAT, s377 FLOAT, s378 FLOAT, s379 FLOAT, s380 FLOAT,
43 | s381 FLOAT, s382 FLOAT, s383 FLOAT, s384 FLOAT, s385 FLOAT, s386 FLOAT, s387 FLOAT, s388 FLOAT, s389 FLOAT, s390 FLOAT,
44 | s391 FLOAT, s392 FLOAT, s393 FLOAT, s394 FLOAT, s395 FLOAT, s396 FLOAT, s397 FLOAT, s398 FLOAT, s399 FLOAT, s400 FLOAT,
45 | s401 FLOAT, s402 FLOAT, s403 FLOAT, s404 FLOAT, s405 FLOAT, s406 FLOAT, s407 FLOAT, s408 FLOAT, s409 FLOAT, s410 FLOAT,
46 | s411 FLOAT, s412 FLOAT, s413 FLOAT, s414 FLOAT, s415 FLOAT, s416 FLOAT, s417 FLOAT, s418 FLOAT, s419 FLOAT, s420 FLOAT,
47 | s421 FLOAT, s422 FLOAT, s423 FLOAT, s424 FLOAT, s425 FLOAT, s426 FLOAT, s427 FLOAT, s428 FLOAT, s429 FLOAT, s430 FLOAT,
48 | s431 FLOAT, s432 FLOAT, s433 FLOAT, s434 FLOAT, s435 FLOAT, s436 FLOAT, s437 FLOAT, s438 FLOAT, s439 FLOAT, s440 FLOAT,
49 | s441 FLOAT, s442 FLOAT, s443 FLOAT, s444 FLOAT, s445 FLOAT, s446 FLOAT, s447 FLOAT, s448 FLOAT, s449 FLOAT, s450 FLOAT,
50 | s451 FLOAT, s452 FLOAT, s453 FLOAT, s454 FLOAT, s455 FLOAT, s456 FLOAT, s457 FLOAT, s458 FLOAT, s459 FLOAT, s460 FLOAT,
51 | s461 FLOAT, s462 FLOAT, s463 FLOAT, s464 FLOAT, s465 FLOAT, s466 FLOAT, s467 FLOAT, s468 FLOAT, s469 FLOAT, s470 FLOAT,
52 | s471 FLOAT, s472 FLOAT, s473 FLOAT, s474 FLOAT, s475 FLOAT, s476 FLOAT, s477 FLOAT, s478 FLOAT, s479 FLOAT, s480 FLOAT,
53 | s481 FLOAT, s482 FLOAT, s483 FLOAT, s484 FLOAT, s485 FLOAT, s486 FLOAT, s487 FLOAT, s488 FLOAT, s489 FLOAT, s490 FLOAT,
54 | s491 FLOAT, s492 FLOAT, s493 FLOAT, s494 FLOAT, s495 FLOAT, s496 FLOAT, s497 FLOAT, s498 FLOAT, s499 FLOAT, s500 FLOAT,
55 | s501 FLOAT, s502 FLOAT, s503 FLOAT, s504 FLOAT, s505 FLOAT, s506 FLOAT, s507 FLOAT, s508 FLOAT, s509 FLOAT, s510 FLOAT,
56 | s511 FLOAT, s512 FLOAT, s513 FLOAT, s514 FLOAT, s515 FLOAT, s516 FLOAT, s517 FLOAT, s518 FLOAT, s519 FLOAT, s520 FLOAT,
57 | s521 FLOAT, s522 FLOAT, s523 FLOAT, s524 FLOAT, s525 FLOAT, s526 FLOAT, s527 FLOAT, s528 FLOAT, s529 FLOAT, s530 FLOAT,
58 | s531 FLOAT, s532 FLOAT, s533 FLOAT, s534 FLOAT, s535 FLOAT, s536 FLOAT, s537 FLOAT, s538 FLOAT, s539 FLOAT, s540 FLOAT,
59 | s541 FLOAT, s542 FLOAT, s543 FLOAT, s544 FLOAT, s545 FLOAT, s546 FLOAT, s547 FLOAT, s548 FLOAT, s549 FLOAT, s550 FLOAT,
60 | s551 FLOAT, s552 FLOAT, s553 FLOAT, s554 FLOAT, s555 FLOAT, s556 FLOAT, s557 FLOAT, s558 FLOAT, s559 FLOAT, s560 FLOAT,
61 | s561 FLOAT, s562 FLOAT, s563 FLOAT, s564 FLOAT, s565 FLOAT, s566 FLOAT, s567 FLOAT, s568 FLOAT, s569 FLOAT, s570 FLOAT,
62 | s571 FLOAT, s572 FLOAT, s573 FLOAT, s574 FLOAT, s575 FLOAT, s576 FLOAT, s577 FLOAT, s578 FLOAT, s579 FLOAT, s580 FLOAT,
63 | s581 FLOAT, s582 FLOAT, s583 FLOAT, s584 FLOAT, s585 FLOAT, s586 FLOAT, s587 FLOAT, s588 FLOAT, s589 FLOAT
64 | );
65 |
66 | CREATE TABLE IF NOT EXISTS nan_count(
67 | pass INT,
68 | arrived_at DATETIME,
69 | count INT,
70 | alert_level INT
71 | );
72 |
--------------------------------------------------------------------------------
/smart-manufacturing/detect-anomaly/CMakeLists.txt:
--------------------------------------------------------------------------------
1 | cmake_minimum_required(VERSION 3.8.0)
2 | project (detect-anomaly)
3 | add_executable(main main.cpp)
4 |
5 | # RESTinio dependencies:
6 | # 1. ASIO or Boost::ASIO (goes as headers, vcpkg knows where)
7 | # 2. HTTP parser
8 | find_package(unofficial-http-parser REQUIRED)
9 | # 3. fmtlib
10 | find_package(fmt REQUIRED)
11 | # RESTinio itself
12 | find_package(restinio REQUIRED)
13 |
14 | target_link_libraries(main mysqlcppconn)
15 | target_link_libraries(main pthread)
16 | # Make your project dependent on restinio,
17 | # and let cmake deal with all the headers paths and linked libs.
18 | target_link_libraries(main restinio::restinio)
19 |
20 |
--------------------------------------------------------------------------------
/smart-manufacturing/detect-anomaly/Dockerfile:
--------------------------------------------------------------------------------
1 | # build binary
2 | FROM localhost/smart-manufacturing/build-env:latest AS build
3 |
4 | WORKDIR /opt
5 |
6 | COPY . .
7 |
8 | RUN mkdir build && \
9 | cmake -B build/ -S . -DCMAKE_TOOLCHAIN_FILE=./vcpkg/scripts/buildsystems/vcpkg.cmake && \
10 | make
11 |
12 |
13 | # copy binary to a runtime image
14 | FROM localhost/smart-manufacturing/runtime-env:latest
15 |
16 | WORKDIR /opt
17 |
18 | COPY --from=build /opt/main .
19 |
20 | EXPOSE 8080
21 |
22 | ENTRYPOINT [ "/opt/main" ]
23 |
--------------------------------------------------------------------------------
/smart-manufacturing/detect-anomaly/function.hpp:
--------------------------------------------------------------------------------
1 | #include
2 | #include
3 | #include
4 | #include
5 | #include
6 | #include
7 | #include
8 | #include
9 | #include
10 | #include "json.hpp"
11 |
12 | #include "mysql_connection.h"
13 |
14 | #include
15 | #include
16 | #include
17 | #include
18 |
19 | #define BASE_LEVEL 3
20 | using namespace std;
21 |
22 | // Tally the number of NaN and assign an alert level to a data sample.
23 | //
24 | // Sample input data:
25 | // {"time_arrive":"19/07/2008 11:55:00","pass":-1,"data":"3030.93 2564 2187.7333 1411.1265 1.3602 100 97.6133 0.1242 1.5005 0.0162 -0.0034 0.9455 202.4396 0 7.9558 414.871 10.0433 0.968 192.3963 12.519 1.4026 -5419 2916.5 -4043.75 751 0.8955 1.773 3.049 64.2333 2.0222 0.1632 3.5191 83.3971 9.5126 50.617 64.2588 49.383 66.3141 86.9555 117.5132 61.29 4.515 70 352.7173 10.1841 130.3691 723.3092 1.3072 141.2282 1 624.3145 218.3174 0 4.592 4.841 2834 0.9317 0.9484 4.7057 -1.7264 350.9264 10.6231 108.6427 16.1445 21.7264 29.5367 693.7724 0.9226 148.6009 1 608.17 84.0793 NaN NaN 0 0.0126 -0.0206 0.0141 -0.0307 -0.0083 -0.0026 -0.0567 -0.0044 7.2163 0.132 NaN 2.3895 0.969 1747.6049 0.1841 8671.9301 -0.3274 -0.0055 -0.0001 0.0001 0.0003 -0.2786 0 0.3974 -0.0251 0.0002 0.0002 0.135 -0.0042 0.0003 0.0056 0 -0.2468 0.3196 NaN NaN NaN NaN 0.946 0 748.6115 0.9908 58.4306 0.6002 0.9804 6.3788 15.88 2.639 15.94 15.93 0.8656 3.353 0.4098 3.188 -0.0473 0.7243 0.996 2.2967 1000.7263 39.2373 123 111.3 75.2 46.2 350.671 0.3948 0 6.78 0.0034 0.0898 0.085 0.0358 0.0328 12.2566 0 4.271 10.284 0.4734 0.0167 11.8901 0.41 0.0506 NaN NaN 1017 967 1066 368 0.09 0.048 0.095 2 0.9 0.069 0.046 0.725 0.1139 0.3183 0.5888 0.3184 0.9499 0.3979 0.16 0 0 20.95 0.333 12.49 16.713 0.0803 5.72 0 11.19 65.363 0 0 0 0 0 0 0.292 5.38 20.1 0.296 10.62 10.3 5.38 4.04 16.23 0.2951 8.64 0 10.3 97.314 0 0.0772 0.0599 0.07 0.0547 0.0704 0.052 0.0301 0.1135 3.4789 0.001 NaN 0.0707 0.0211 175.2173 0.0315 1940.3994 0 0.0744 0.0546 0 0 0 0 0 0 0 0 0 0.0027 0.004 0 0 0 0 NaN NaN NaN NaN 0.0188 0 219.9453 0.0011 2.8374 0.0189 0.005 0.4269 0 0 0 0 0 0 0 0 0 0 0 0.0472 40.855 4.5152 30.9815 33.9606 22.9057 15.9525 110.2144 0.131 0 2.5883 0.001 0.0319 0.0197 0.012 0.0109 3.9321 0 1.5123 3.5811 0.1337 0.0055 3.8447 0.1077 0.0167 NaN NaN 418.1363 398.3185 496.1582 158.333 0.0373 0.0202 0.0462 0.6083 0.3032 0.02 0.0174 0.2827 0.0434 0.1342 0.2419 0.1343 0.367 0.1431 0.061 0 0 0 6.2698 0.1181 3.8208 5.3737 0.0254 1.6252 0 3.2461 18.0118 0 0 0 0 0 0 0.0752 1.5989 6.5893 0.0913 3.0911 8.4654 1.5989 1.2293 5.3406 0.0867 2.8551 0 2.9971 31.8843 NaN NaN 0 0.0215 0.0274 0.0315 0.0238 0.0206 0.0238 0.0144 0.0491 1.2708 0.0004 NaN 0.0229 0.0065 55.2039 0.0105 560.2658 0 0.017 0.0148 0.0124 0.0114 0 0 0 0 0 0 0 0.001 0.0013 0 0 0 0 NaN NaN NaN NaN 0.0055 0 61.5932 0.0003 0.9967 0.0082 0.0017 0.1437 0 0 0 0 0 0 0 0 0 0 0 0.0151 14.2396 1.4392 5.6188 3.6721 2.9329 2.1118 24.8504 29.0271 0 6.9458 2.738 5.9846 525.0965 0 3.4641 6.0544 0 53.684 2.4788 4.7141 1.7275 6.18 3.275 3.6084 18.7673 33.1562 26.3617 49.0013 10.0503 2.7073 3.1158 3.1136 44.5055 42.2737 1.3071 0.8693 1.1975 0.6288 0.9163 0.6448 1.4324 0.4576 0.1362 0 0 0 5.9396 3.2698 9.5805 2.3106 6.1463 4.0502 0 1.7924 29.9394 0 0 0 0 0 0 6.2052 311.6377 5.7277 2.7864 9.7752 63.7987 24.7625 13.6778 2.3394 31.9893 5.8142 0 1.6936 115.7408 0 613.3069 291.4842 494.6996 178.1759 843.1138 0 53.1098 0 48.2091 0.7578 NaN 2.957 2.1739 10.0261 17.1202 22.3756 0 0 0 0 0 0 0 0 0 0 0 0 64.6707 0 0 0 0 0 NaN NaN NaN NaN 1.9864 0 29.3804 0.1094 4.856 3.1406 0.5064 6.6926 0 0 0 0 0 0 0 0 0 0 0 2.057 4.0825 11.5074 0.1096 0.0078 0.0026 7.116 1.0616 395.57 75.752 0.4234 12.93 0.78 0.1827 5.7349 0.3363 39.8842 3.2687 1.0297 1.0344 0.4385 0.1039 42.3877 NaN NaN NaN NaN NaN NaN NaN NaN 533.85 2.1113 8.95 0.3157 3.0624 0.1026 1.6765 14.9509 NaN NaN NaN NaN 0.5005 0.0118 0.0035 2.363 NaN NaN NaN NaN"}
26 |
27 | // split string by delimiter
28 | size_t split(const std::string &txt, std::vector &strs, char ch)
29 | {
30 | size_t pos = txt.find( ch );
31 | size_t initialPos = 0;
32 | strs.clear();
33 |
34 | // Decompose statement
35 | while( pos != std::string::npos ) {
36 | strs.push_back( txt.substr( initialPos, pos - initialPos ) );
37 | initialPos = pos + 1;
38 |
39 | pos = txt.find( ch, initialPos );
40 | }
41 |
42 | // Add the last one
43 | strs.push_back( txt.substr( initialPos, std::min( pos, txt.size() ) - initialPos + 1 ) );
44 |
45 | return strs.size();
46 | }
47 |
48 | string date_convert(string date_str){
49 | int year, month, day, hour, minute, second;
50 | char buf[21];
51 | sscanf(date_str.c_str(), "%02d/%02d/%d %02d:%02d:%02d", &day, &month, &year, &hour, &minute, &second);
52 | sprintf(buf, "%d-%02d-%02d %02d:%02d:%02d", year, month, day, hour, minute, second);
53 | string format_date = buf;
54 | return format_date;
55 | }
56 |
57 | int simulate_compute(vector data, float sum, int count){
58 | // Average, max, min
59 | float avg = sum / count, max_ele = *max_element(data.begin(), data.end()), min_ele = *min_element(data.begin(), data.end());
60 | bool flag = false;
61 |
62 | srand((unsigned)time(NULL));
63 | int threshold = rand() % int(max_ele);
64 | if (avg > threshold)
65 | flag = true;
66 | if (min_ele < avg - threshold)
67 | flag = true;
68 |
69 | // Just for computing
70 | for (vector::iterator it = data.begin(); it != data.end(); it++) {
71 | float temp = *it - threshold;
72 | }
73 |
74 | return flag ? 1 : 0;
75 | }
76 |
77 | string handle(string req) {
78 | auto data_json = nlohmann::json::parse(req);
79 |
80 | // Format arrive time
81 | int pass = data_json["pass"].get();
82 | string format_date = date_convert(data_json["time_arrive"].get());
83 |
84 | float sum = 0;
85 | int count = 0, nan_count = 0, level;
86 | vector data;
87 | vector v;
88 | string s = "s";
89 | string data_str = data_json["data"].get();
90 | split(data_str, v, ' ');
91 | for (size_t i = 0; i < v.size(); ++i) {
92 | if(v[i] != "NaN") {
93 | float temp = stof(v[i]);
94 | data.push_back(temp);
95 | sum += temp;
96 | count += 1;
97 | }
98 | else{
99 | nan_count += 1;
100 | }
101 | }
102 |
103 | if (pass == 1) {
104 | if (nan_count > 8)
105 | level = BASE_LEVEL;
106 | else
107 | level = BASE_LEVEL + 1;
108 | } else {
109 | if (nan_count > 8)
110 | level = BASE_LEVEL + 2;
111 | else
112 | level = BASE_LEVEL + 3;
113 | }
114 |
115 | // Calculation Simulation
116 | level -= simulate_compute(data, sum, count);
117 |
118 | nlohmann::json output;
119 |
120 | try {
121 | sql::Driver *driver;
122 | sql::Connection *con;
123 | sql::Statement *stmt;
124 | sql::ResultSet *res;
125 |
126 | string addr (std::getenv("MYSQL_ADDRESS"));
127 | string username (std::getenv("MYSQL_USERNAME"));
128 | string password (std::getenv("MYSQL_PASSWORD"));
129 |
130 | /* Create a connection */
131 | driver = get_driver_instance();
132 | con = driver->connect(addr, username, password);
133 | /* Connect to the MySQL test database */
134 | con->setSchema("db");
135 |
136 | stmt = con->createStatement();
137 | // Concat SQL string
138 | string query1 = "SELECT * FROM nan_count ORDER BY arrived_at DESC LIMIT 2;";
139 |
140 | // execute the first query
141 | res = stmt -> executeQuery(query1);
142 |
143 | vector recent_pass, recent_nan_c, recent_level;
144 | while (res->next()) {
145 | recent_pass.push_back(res->getInt("pass"));
146 | recent_nan_c.push_back(res->getInt("count"));
147 | recent_level.push_back(res->getInt("alert_level"));
148 | }
149 |
150 | recent_pass.push_back(pass);
151 | recent_nan_c.push_back(nan_count);
152 | int recent_h_nan_counts = 0, recent_pass_counts = 0;
153 | for(vector::iterator it = recent_pass.begin(); it != recent_pass.end(); it++){
154 | if(*it == 1)
155 | recent_pass_counts += 1;
156 | }
157 | for(vector::iterator it = recent_nan_c.begin(); it != recent_nan_c.end(); it++){
158 | if(*it > 8)
159 | recent_h_nan_counts += 1;
160 | }
161 | if(recent_pass_counts > 1 || recent_h_nan_counts > 1)
162 | level -= 1;
163 |
164 | string query2 = "INSERT INTO nan_count(pass, arrived_at, count, alert_level) VALUES(" + to_string(pass) + ", \"" + format_date + "\", " + to_string(nan_count) + ", " + to_string(level) + ");";
165 | stmt = con -> createStatement();
166 | stmt -> execute(query2);
167 |
168 | output["time"] = format_date;
169 | output["pass"] = pass;
170 | output["nan_count"] = nan_count;
171 | output["alert_level"] = level;
172 |
173 | cout << output.dump() << endl;
174 |
175 | delete stmt;
176 | delete con;
177 |
178 | return "ok";
179 | } catch (sql::SQLException &e) {
180 | cout << "# ERR: SQLException in " << __FILE__;
181 | cout << "(" << __FUNCTION__ << ") on line " << __LINE__ << endl;
182 | cout << "# ERR: " << e.what();
183 | cout << " (MySQL error code: " << e.getErrorCode();
184 | cout << ", SQLState: " << e.getSQLState() << " )" << endl;
185 |
186 | return "error";
187 | }
188 | }
189 |
--------------------------------------------------------------------------------
/smart-manufacturing/detect-anomaly/main.cpp:
--------------------------------------------------------------------------------
1 | #include
2 |
3 | #include "mysql_connection.h"
4 | #include
5 | #include
6 | #include
7 | #include
8 |
9 | #include
10 |
11 | #include "function.hpp"
12 |
13 |
14 | template
15 | RESP
16 | init_resp(RESP resp)
17 | {
18 | resp.append_header(restinio::http_field::server, "RESTinio sample server /v.0.2");
19 | resp.append_header_date_field();
20 |
21 | return resp;
22 | }
23 |
24 | using router_t = restinio::router::express_router_t<>;
25 |
26 | auto create_request_handler()
27 | {
28 | auto router = std::make_unique< router_t >();
29 |
30 | router->http_get(
31 | "/",
32 | [](auto req, auto){
33 | init_resp(req->create_response())
34 | .append_header(restinio::http_field::content_type, "text/plain; charset=utf-8")
35 | .set_body("not implemented")
36 | .done();
37 |
38 | return restinio::request_accepted();
39 | });
40 |
41 | router->non_matched_request_handler(
42 | [](auto req){
43 | string s;
44 | try {
45 | s = handle(req->body());
46 | } catch(exception &e) {
47 | s = string(e.what());
48 | }
49 | return
50 | init_resp(req->create_response())
51 | .set_body(s)
52 | .done();
53 |
54 | });
55 |
56 | return router;
57 | }
58 |
59 | int main()
60 | {
61 | using namespace std::chrono;
62 |
63 | try {
64 | using traits_t =
65 | restinio::traits_t<
66 | restinio::asio_timer_manager_t,
67 | restinio::single_threaded_ostream_logger_t,
68 | router_t >;
69 |
70 | restinio::run(
71 | restinio::on_this_thread()
72 | .port(8080)
73 | .address("0.0.0.0")
74 | .request_handler(create_request_handler()));
75 | } catch(const std::exception & ex) {
76 | std::cerr << "Error: " << ex.what() << std::endl;
77 | return 1;
78 | }
79 |
80 | return 0;
81 | }
82 |
--------------------------------------------------------------------------------
/smart-manufacturing/detect-anomaly/template.yaml:
--------------------------------------------------------------------------------
1 | ROSTemplateFormatVersion: '2015-09-01'
2 | Transform: 'Aliyun::Serverless-2018-04-03'
3 | Resources:
4 | smart-manufacturing:
5 | Type: 'Aliyun::Serverless::Service'
6 | Properties:
7 | Policies:
8 | - AliyunContainerRegistryReadOnlyAccess
9 | - AliyunLogFullAccess
10 | InternetAccess: true
11 |
12 | detect-anomaly:
13 | Type: 'Aliyun::Serverless::Function'
14 | Properties:
15 | Runtime: custom-container
16 | Timeout: 60
17 | CAPort: 8080
18 | Handler: not-used
19 | MemorySize: 1024
20 | CodeUri: ./ # Root directory for the function or the Dockerfile path
21 | CustomContainerConfig:
22 | Image: '/:'
23 | Command: ''
24 | Args: ''
25 | EnvironmentVariables:
26 | 'MYSQL_ADDRESS': 'tcp://:'
27 | 'MYSQL_USERNAME': ''
28 | 'MYSQL_PASSWORD': ''
29 | Events:
30 | http-trigger:
31 | Type: HTTP
32 | Properties:
33 | AuthType: ANONYMOUS
34 | Methods: ['GET', 'POST', 'PUT']
35 |
--------------------------------------------------------------------------------
/smart-manufacturing/ingest-data/CMakeLists.txt:
--------------------------------------------------------------------------------
1 | cmake_minimum_required(VERSION 3.8.0)
2 | project (ingest-data)
3 | add_executable(main main.cpp)
4 |
5 | # RESTinio dependencies:
6 | # 1. ASIO or Boost::ASIO (goes as headers, vcpkg knows where)
7 | # 2. HTTP parser
8 | find_package(unofficial-http-parser REQUIRED)
9 | # 3. fmtlib
10 | find_package(fmt REQUIRED)
11 | # RESTinio itself
12 | find_package(restinio REQUIRED)
13 |
14 | target_link_libraries(main mysqlcppconn)
15 | target_link_libraries(main pthread)
16 | # Make your project dependent on restinio,
17 | # and let cmake deal with all the headers paths and linked libs.
18 | target_link_libraries(main restinio::restinio)
19 |
20 |
--------------------------------------------------------------------------------
/smart-manufacturing/ingest-data/Dockerfile:
--------------------------------------------------------------------------------
1 | # build binary
2 | FROM localhost/smart-manufacturing/build-env:latest AS build
3 |
4 | WORKDIR /opt
5 |
6 | COPY . .
7 |
8 | RUN mkdir build && \
9 | cmake -B build/ -S . -DCMAKE_TOOLCHAIN_FILE=./vcpkg/scripts/buildsystems/vcpkg.cmake && \
10 | make
11 |
12 |
13 | # copy binary to a runtime image
14 | FROM localhost/smart-manufacturing/runtime-env:latest
15 |
16 | WORKDIR /opt
17 |
18 | COPY --from=build /opt/main .
19 |
20 | EXPOSE 8080
21 |
22 | ENTRYPOINT [ "/opt/main" ]
23 |
--------------------------------------------------------------------------------
/smart-manufacturing/ingest-data/function.hpp:
--------------------------------------------------------------------------------
1 | #include
2 | #include
3 | #include
4 | #include
5 | #include
6 | #include
7 | #include
8 | #include
9 | #include
10 | #include
11 | #include "json.hpp"
12 |
13 | #include "mysql_connection.h"
14 |
15 | #include
16 | #include
17 | #include
18 | #include
19 |
20 | using namespace std;
21 |
22 | // Ingest a data sample and store it into database.
23 | //
24 | // Sample input data:
25 | // {"time_arrive":"19/07/2008 11:55:00","pass":-1,"data":"3030.93 2564 2187.7333 1411.1265 1.3602 100 97.6133 0.1242 1.5005 0.0162 -0.0034 0.9455 202.4396 0 7.9558 414.871 10.0433 0.968 192.3963 12.519 1.4026 -5419 2916.5 -4043.75 751 0.8955 1.773 3.049 64.2333 2.0222 0.1632 3.5191 83.3971 9.5126 50.617 64.2588 49.383 66.3141 86.9555 117.5132 61.29 4.515 70 352.7173 10.1841 130.3691 723.3092 1.3072 141.2282 1 624.3145 218.3174 0 4.592 4.841 2834 0.9317 0.9484 4.7057 -1.7264 350.9264 10.6231 108.6427 16.1445 21.7264 29.5367 693.7724 0.9226 148.6009 1 608.17 84.0793 NaN NaN 0 0.0126 -0.0206 0.0141 -0.0307 -0.0083 -0.0026 -0.0567 -0.0044 7.2163 0.132 NaN 2.3895 0.969 1747.6049 0.1841 8671.9301 -0.3274 -0.0055 -0.0001 0.0001 0.0003 -0.2786 0 0.3974 -0.0251 0.0002 0.0002 0.135 -0.0042 0.0003 0.0056 0 -0.2468 0.3196 NaN NaN NaN NaN 0.946 0 748.6115 0.9908 58.4306 0.6002 0.9804 6.3788 15.88 2.639 15.94 15.93 0.8656 3.353 0.4098 3.188 -0.0473 0.7243 0.996 2.2967 1000.7263 39.2373 123 111.3 75.2 46.2 350.671 0.3948 0 6.78 0.0034 0.0898 0.085 0.0358 0.0328 12.2566 0 4.271 10.284 0.4734 0.0167 11.8901 0.41 0.0506 NaN NaN 1017 967 1066 368 0.09 0.048 0.095 2 0.9 0.069 0.046 0.725 0.1139 0.3183 0.5888 0.3184 0.9499 0.3979 0.16 0 0 20.95 0.333 12.49 16.713 0.0803 5.72 0 11.19 65.363 0 0 0 0 0 0 0.292 5.38 20.1 0.296 10.62 10.3 5.38 4.04 16.23 0.2951 8.64 0 10.3 97.314 0 0.0772 0.0599 0.07 0.0547 0.0704 0.052 0.0301 0.1135 3.4789 0.001 NaN 0.0707 0.0211 175.2173 0.0315 1940.3994 0 0.0744 0.0546 0 0 0 0 0 0 0 0 0 0.0027 0.004 0 0 0 0 NaN NaN NaN NaN 0.0188 0 219.9453 0.0011 2.8374 0.0189 0.005 0.4269 0 0 0 0 0 0 0 0 0 0 0 0.0472 40.855 4.5152 30.9815 33.9606 22.9057 15.9525 110.2144 0.131 0 2.5883 0.001 0.0319 0.0197 0.012 0.0109 3.9321 0 1.5123 3.5811 0.1337 0.0055 3.8447 0.1077 0.0167 NaN NaN 418.1363 398.3185 496.1582 158.333 0.0373 0.0202 0.0462 0.6083 0.3032 0.02 0.0174 0.2827 0.0434 0.1342 0.2419 0.1343 0.367 0.1431 0.061 0 0 0 6.2698 0.1181 3.8208 5.3737 0.0254 1.6252 0 3.2461 18.0118 0 0 0 0 0 0 0.0752 1.5989 6.5893 0.0913 3.0911 8.4654 1.5989 1.2293 5.3406 0.0867 2.8551 0 2.9971 31.8843 NaN NaN 0 0.0215 0.0274 0.0315 0.0238 0.0206 0.0238 0.0144 0.0491 1.2708 0.0004 NaN 0.0229 0.0065 55.2039 0.0105 560.2658 0 0.017 0.0148 0.0124 0.0114 0 0 0 0 0 0 0 0.001 0.0013 0 0 0 0 NaN NaN NaN NaN 0.0055 0 61.5932 0.0003 0.9967 0.0082 0.0017 0.1437 0 0 0 0 0 0 0 0 0 0 0 0.0151 14.2396 1.4392 5.6188 3.6721 2.9329 2.1118 24.8504 29.0271 0 6.9458 2.738 5.9846 525.0965 0 3.4641 6.0544 0 53.684 2.4788 4.7141 1.7275 6.18 3.275 3.6084 18.7673 33.1562 26.3617 49.0013 10.0503 2.7073 3.1158 3.1136 44.5055 42.2737 1.3071 0.8693 1.1975 0.6288 0.9163 0.6448 1.4324 0.4576 0.1362 0 0 0 5.9396 3.2698 9.5805 2.3106 6.1463 4.0502 0 1.7924 29.9394 0 0 0 0 0 0 6.2052 311.6377 5.7277 2.7864 9.7752 63.7987 24.7625 13.6778 2.3394 31.9893 5.8142 0 1.6936 115.7408 0 613.3069 291.4842 494.6996 178.1759 843.1138 0 53.1098 0 48.2091 0.7578 NaN 2.957 2.1739 10.0261 17.1202 22.3756 0 0 0 0 0 0 0 0 0 0 0 0 64.6707 0 0 0 0 0 NaN NaN NaN NaN 1.9864 0 29.3804 0.1094 4.856 3.1406 0.5064 6.6926 0 0 0 0 0 0 0 0 0 0 0 2.057 4.0825 11.5074 0.1096 0.0078 0.0026 7.116 1.0616 395.57 75.752 0.4234 12.93 0.78 0.1827 5.7349 0.3363 39.8842 3.2687 1.0297 1.0344 0.4385 0.1039 42.3877 NaN NaN NaN NaN NaN NaN NaN NaN 533.85 2.1113 8.95 0.3157 3.0624 0.1026 1.6765 14.9509 NaN NaN NaN NaN 0.5005 0.0118 0.0035 2.363 NaN NaN NaN NaN"}
26 |
27 | // split string by delimiter
28 | size_t split(const std::string &txt, std::vector &strs, char ch)
29 | {
30 | size_t pos = txt.find( ch );
31 | size_t initialPos = 0;
32 | strs.clear();
33 |
34 | // Decompose statement
35 | while( pos != std::string::npos ) {
36 | strs.push_back( txt.substr( initialPos, pos - initialPos ) );
37 | initialPos = pos + 1;
38 |
39 | pos = txt.find( ch, initialPos );
40 | }
41 |
42 | // Add the last one
43 | strs.push_back( txt.substr( initialPos, std::min( pos, txt.size() ) - initialPos + 1 ) );
44 |
45 | return strs.size();
46 | }
47 |
48 | string date_convert(string date_str){
49 | int year, month, day, hour, minute, second;
50 | char buf[21];
51 | sscanf(date_str.c_str(), "%02d/%02d/%d %02d:%02d:%02d", &day, &month, &year, &hour, &minute, &second);
52 | sprintf(buf, "%d-%02d-%02d %02d:%02d:%02d", year, month, day, hour, minute, second);
53 | string format_date = buf;
54 | return format_date;
55 | }
56 |
57 | void simulate_compute(vector data, float sum, int count){
58 | // Average, max, min
59 | float avg = sum / count, max_ele = *max_element(data.begin(), data.end()), min_ele = *min_element(data.begin(), data.end());
60 | // Normalized
61 | vector normalized(count);
62 | for(int i = 0; i < count; i++){
63 | normalized[i] = data[i] / sum;
64 | }
65 |
66 | // scale random elements, scale
67 | int num = rand() % count;
68 | vector sam;
69 | std::experimental::sample(data.begin(), data.end(), std::back_inserter(sam), num, std::mt19937{std::random_device{}()});
70 | for(int i = 0; i < num; i++){
71 | sam[i] *= 1.5;
72 | }
73 |
74 | // simulate conversion
75 | int choose = rand() % count;
76 | float t = ((data[choose] - 600) / 1000 + 23.5) * 1.1125;
77 | choose = rand() % count;
78 | t = (data[choose] / 5.535 + 12.696) * 3.333;
79 | }
80 |
81 | string gen_sql_str(int pass, string format_date, vector col2add, vector data){
82 | // Required std::stringstream object
83 | stringstream ss;
84 |
85 | // Convert all but the last element to avoid a trailing ","
86 | copy(col2add.begin(), col2add.end() - 1,ostream_iterator(ss, ", "));
87 | // Now add the last element with no delimiter
88 | ss << col2add.back();
89 |
90 | string query = "INSERT INTO sensor_data(pass, arrived_at, " + ss.str() + ") VALUES(" + to_string(pass) + ", \"" + format_date + "\", ";
91 | ss.clear();
92 | ss.str("");
93 | copy(data.begin(), data.end() - 1, ostream_iterator(ss, ", "));
94 | // Now add the last element with no delimiter
95 | ss << data.back();
96 | query += ss.str() + ");";
97 |
98 | return query;
99 | }
100 |
101 | string handle(string body) {
102 | auto data_json = nlohmann::json::parse(body);
103 |
104 | // Format arrive time
105 | int pass = data_json["pass"].get();
106 | string format_date = date_convert(data_json["time_arrive"].get());
107 |
108 | float sum = 0; int count = 0;
109 | vector data;
110 | vector col2add;
111 | vector v;
112 | string s = "s";
113 | string data_str = data_json["data"].get();
114 | split(data_str, v, ' ');
115 | for(size_t i = 0; i < v.size(); ++i) {
116 | if(v[i] != "NaN"){
117 | col2add.push_back(s + to_string(i));
118 | float temp = stof(v[i]);
119 | data.push_back(temp);
120 | sum += temp;
121 | count += 1;
122 | }
123 | }
124 |
125 | // Calculation Simulation
126 | simulate_compute(data, sum, count);
127 |
128 | // Concat SQL string
129 | string query = gen_sql_str(pass, format_date, col2add, data);
130 |
131 | try {
132 | sql::Driver *driver;
133 | sql::Connection *con;
134 | sql::Statement *stmt;
135 |
136 | string addr (std::getenv("MYSQL_ADDRESS"));
137 | string username (std::getenv("MYSQL_USERNAME"));
138 | string password (std::getenv("MYSQL_PASSWORD"));
139 |
140 | /* Create a connection */
141 | driver = get_driver_instance();
142 | con = driver->connect(addr, username, password);
143 | /* Connect to the MySQL test database */
144 | con->setSchema("db");
145 |
146 | stmt = con->createStatement();
147 | stmt->execute(query);
148 |
149 | // delete res;
150 | delete stmt;
151 | delete con;
152 | return "ok";
153 | } catch (sql::SQLException &e) {
154 | cout << "# ERR: SQLException in " << __FILE__;
155 | cout << "(" << __FUNCTION__ << ") on line " << __LINE__ << endl;
156 | cout << "# ERR: " << e.what();
157 | cout << " (MySQL error code: " << e.getErrorCode();
158 | cout << ", SQLState: " << e.getSQLState() << " )" << endl;
159 | return "error";
160 | }
161 | }
162 |
--------------------------------------------------------------------------------
/smart-manufacturing/ingest-data/main.cpp:
--------------------------------------------------------------------------------
1 | #include
2 |
3 | #include "mysql_connection.h"
4 | #include
5 | #include
6 | #include
7 | #include
8 |
9 | #include
10 |
11 | #include "function.hpp"
12 |
13 |
14 | template
15 | RESP
16 | init_resp(RESP resp)
17 | {
18 | resp.append_header(restinio::http_field::server, "RESTinio sample server /v.0.2");
19 | resp.append_header_date_field();
20 |
21 | return resp;
22 | }
23 |
24 | using router_t = restinio::router::express_router_t<>;
25 |
26 | auto create_request_handler()
27 | {
28 | auto router = std::make_unique< router_t >();
29 |
30 | router->http_get(
31 | "/",
32 | [](auto req, auto){
33 | init_resp(req->create_response())
34 | .append_header(restinio::http_field::content_type, "text/plain; charset=utf-8")
35 | .set_body("not implemented")
36 | .done();
37 |
38 | return restinio::request_accepted();
39 | });
40 |
41 | router->non_matched_request_handler(
42 | [](auto req){
43 | string s;
44 | try {
45 | s = handle(req->body());
46 | } catch(exception &e) {
47 | s = string(e.what());
48 | }
49 | return
50 | init_resp(req->create_response())
51 | .set_body(s)
52 | .done();
53 |
54 | });
55 |
56 | return router;
57 | }
58 |
59 | int main()
60 | {
61 | using namespace std::chrono;
62 |
63 | try {
64 | using traits_t =
65 | restinio::traits_t<
66 | restinio::asio_timer_manager_t,
67 | restinio::single_threaded_ostream_logger_t,
68 | router_t >;
69 |
70 | restinio::run(
71 | restinio::on_this_thread()
72 | .port(8080)
73 | .address("0.0.0.0")
74 | .request_handler(create_request_handler()));
75 | } catch(const std::exception & ex) {
76 | std::cerr << "Error: " << ex.what() << std::endl;
77 | return 1;
78 | }
79 |
80 | return 0;
81 | }
82 |
--------------------------------------------------------------------------------
/smart-manufacturing/ingest-data/template.yaml:
--------------------------------------------------------------------------------
1 | ROSTemplateFormatVersion: '2015-09-01'
2 | Transform: 'Aliyun::Serverless-2018-04-03'
3 | Resources:
4 | smart-manufacturing:
5 | Type: 'Aliyun::Serverless::Service'
6 | Properties:
7 | Policies:
8 | - AliyunContainerRegistryReadOnlyAccess
9 | - AliyunLogFullAccess
10 | InternetAccess: true
11 |
12 | ingest-data:
13 | Type: 'Aliyun::Serverless::Function'
14 | Properties:
15 | Runtime: custom-container
16 | Timeout: 60
17 | CAPort: 8080
18 | Handler: not-used
19 | MemorySize: 1024
20 | CodeUri: ./ # Root directory for the function or the Dockerfile path
21 | CustomContainerConfig:
22 | Image: '/:'
23 | Command: ''
24 | Args: ''
25 | EnvironmentVariables:
26 | 'MYSQL_ADDRESS': 'tcp://:'
27 | 'MYSQL_USERNAME': ''
28 | 'MYSQL_PASSWORD': ''
29 | Events:
30 | http-trigger:
31 | Type: HTTP
32 | Properties:
33 | AuthType: ANONYMOUS
34 | Methods: ['GET', 'POST', 'PUT']
35 |
--------------------------------------------------------------------------------
/smart-manufacturing/runtime.Dockerfile:
--------------------------------------------------------------------------------
1 | FROM ubuntu:bionic
2 |
3 | RUN apt-get update -y && \
4 | apt-get install -yq libmysqlcppconn7v5 libboost-all-dev
5 |
--------------------------------------------------------------------------------
/smart-parking/README.md:
--------------------------------------------------------------------------------
1 |
2 | ## Smart Parking Application
3 |
4 | This application manages smart parking sites and provides a mobile app to users.
5 | - The [Query Vacancy](#query-vacancy-function) function queries the vacancy of a specific spot.
6 | - The [Reserve Spot](#reserve-spot-function) function reserves a specific parking spot.
7 |
8 | ### Prerequisites
9 |
10 | Both functions require a running [Redis](https://redis.io/) and the Reserve Spot function further needs [Kafka](https://kafka.apache.org/).
11 | - After deploying Redis, edit the `REDIS_URL` entry in both `{query-vacancy,reserve-spot}/code/template.yml`. Then create a key-value pair `vacancy:10` in Redis.
12 | - After deploying Kafka, create a topic with the name of your choice. Then edit the `KAFKA_BROKER` and `KAFKA_TOPIC` entries in `reserve-spot/code/template.yml`.
13 |
14 | ### Query Vacancy Function
15 |
16 | To deploy the function,
17 |
18 | ```
19 | cd faas-scheduling-benchmark/smart-parking/query-vacancy/code
20 | npm i # install node packages
21 |
22 | cd ..
23 | fun deploy -y
24 | ```
25 |
26 | To invoke the function,
27 |
28 | ```
29 | # The can be found in the result printed out by `fun deploy`.
30 | curl -s -i http:///2016-08-15/proxy/smart-parking/query-vacancy/
31 | ```
32 |
33 | ### Reserve Spot Function
34 |
35 | To deploy the function,
36 |
37 | ```
38 | cd faas-scheduling-benchmark/smart-parking/reserve-spot/code
39 | npm i
40 |
41 | cd ..
42 | fun deploy -y
43 | ```
44 |
45 | To invoke the function,
46 |
47 | ```
48 | curl -s -i http:///2016-08-15/proxy/smart-parking/reserve-spot/ \
49 | -X POST \
50 | -d '{"spot-id": "1234", "user-id": "5678", "site-id": "1234" }' \
51 | -H 'Content-Type: application/json'
52 | ```
53 |
--------------------------------------------------------------------------------
/smart-parking/query-vacancy/code/handler.js:
--------------------------------------------------------------------------------
1 | 'use strict'
2 |
3 | const { promisify } = require('util')
4 |
5 | module.exports = async (context, callback) => {
6 | const redis = require('redis')
7 | const url = process.env.REDIS_URL
8 | try {
9 | const client = redis.createClient({ url })
10 | const getAsync = promisify(client.get).bind(client)
11 | const res = await getAsync('vacancy')
12 | client.quit() // remember to close client
13 | return 'Current vacancy is ' + (res || 0) + '\n'
14 | } catch (e) {
15 | return 'Error querying vacancy: ' + e + '\n'
16 | }
17 | }
18 |
--------------------------------------------------------------------------------
/smart-parking/query-vacancy/code/index.js:
--------------------------------------------------------------------------------
1 | const getRawBody = require('raw-body');
2 | const handle = require('./handler')
3 |
4 | module.exports.handler = function(req, resp, context) {
5 |
6 | const params = {
7 | path: req.path,
8 | queries: req.queries,
9 | headers: req.headers,
10 | method : req.method,
11 | requestURI : req.url,
12 | clientIP : req.clientIP,
13 | }
14 |
15 | getRawBody(req, function(err, body) {
16 | handle(null, null).then((res) => {
17 | resp.send(res)
18 | }).catch((err) => {
19 | resp.send(JSON.stringify(err))
20 | })
21 | })
22 |
23 | };
24 |
25 |
--------------------------------------------------------------------------------
/smart-parking/query-vacancy/code/package.json:
--------------------------------------------------------------------------------
1 | {
2 | "name": "function",
3 | "version": "1.0.0",
4 | "description": "",
5 | "main": "handler.js",
6 | "scripts": {
7 | "test": "echo \"Error: no test specified\" && exit 1"
8 | },
9 | "keywords": [],
10 | "author": "",
11 | "license": "ISC",
12 | "dependencies": {
13 | "redis": "^3.0.2",
14 | "raw-body": "2.4.1"
15 | }
16 | }
17 |
--------------------------------------------------------------------------------
/smart-parking/query-vacancy/template.yml:
--------------------------------------------------------------------------------
1 | ROSTemplateFormatVersion: '2015-09-01'
2 | Transform: 'Aliyun::Serverless-2018-04-03'
3 | Resources:
4 | smart-parking:
5 | Type: 'Aliyun::Serverless::Service'
6 | Properties:
7 | Description: 'Query the vacancy count stored in Redis.'
8 | Policies:
9 | - AliyunContainerRegistryReadOnlyAccess
10 | - AliyunLogFullAccess
11 | InternetAccess: true
12 |
13 | query-vacancy:
14 | Type: 'Aliyun::Serverless::Function'
15 | Properties:
16 | Handler: index.handler
17 | Runtime: nodejs10
18 | CodeUri: './code'
19 | MemorySize: 256
20 | EnvironmentVariables:
21 | REDIS_URL: 'redis://:@:6379'
22 | Events:
23 | http-trigger:
24 | Type: HTTP
25 | Properties:
26 | AuthType: ANONYMOUS
27 | Methods: ['POST', 'GET']
28 |
--------------------------------------------------------------------------------
/smart-parking/reserve-spot/code/handler.js:
--------------------------------------------------------------------------------
1 | 'use strict'
2 |
3 | const { promisify } = require('util')
4 |
5 | module.exports = async (params, context, callback) => {
6 |
7 | try {
8 | const redis = require('redis')
9 | const url = process.env.REDIS_URL
10 | const client = redis.createClient({ url })
11 | const asyncDecr = promisify(client.decr).bind(client)
12 | const vacancy = await asyncDecr('vacancy')
13 | client.quit() // remember to close client
14 | } catch (e) {
15 | return 'Error operating Redis: ' + e
16 | }
17 |
18 | try {
19 | const { Kafka } = require('kafkajs')
20 | const producer = new Kafka({
21 | clientId: 'reserve-spot-client',
22 | brokers: [ process.env.KAFKA_BROKER ],
23 | ssl: false,
24 | sasl: null,
25 | }).producer()
26 | await producer.connect()
27 |
28 | const message = JSON.stringify({
29 | user: params.json['user-id'],
30 | time: Date.now(),
31 | operation: {
32 | name: 'RESERVE',
33 | site: params.json['site-id'],
34 | spot: params.json['spot-id'],
35 | }
36 | })
37 | await producer.send({
38 | topic: process.env.KAFKA_TOPIC,
39 | messages: [ { value: message } ],
40 | })
41 | } catch (e) {
42 | return 'Error operating Kafka: ' + e
43 | }
44 |
45 | return 'Reservation succeeded.'
46 | }
47 |
--------------------------------------------------------------------------------
/smart-parking/reserve-spot/code/index.js:
--------------------------------------------------------------------------------
1 | const getRawBody = require('raw-body');
2 | const handle = require('./handler')
3 |
4 | /*
5 | if you open the initializer feature, please implement the initializer function, as below:
6 | module.exports.initializer = function(context, callback) {
7 | console.log('initializing');
8 | callback(null, '');
9 | };
10 | */
11 |
12 | module.exports.handler = function(req, resp, context) {
13 |
14 | getRawBody(req, function(err, body) {
15 | const params = {
16 | path: req.path,
17 | queries: req.queries,
18 | headers: req.headers,
19 | method : req.method,
20 | requestURI : req.url,
21 | clientIP : req.clientIP,
22 | json: JSON.parse(body.toString()),
23 | }
24 |
25 | handle(params, null, null).then((res) => {
26 | resp.send(res)
27 | }).catch((err) => {
28 | resp.send(JSON.stringify(err))
29 | })
30 | })
31 |
32 | };
33 |
34 |
--------------------------------------------------------------------------------
/smart-parking/reserve-spot/code/package.json:
--------------------------------------------------------------------------------
1 | {
2 | "name": "function",
3 | "version": "1.0.0",
4 | "description": "",
5 | "main": "handler.js",
6 | "scripts": {
7 | "test": "echo \"Error: no test specified\" && exit 1"
8 | },
9 | "keywords": [],
10 | "author": "",
11 | "license": "ISC",
12 | "dependencies": {
13 | "kafkajs": "^1.15.0",
14 | "raw-body": "2.4.1",
15 | "redis": "^3.0.2"
16 | }
17 | }
18 |
--------------------------------------------------------------------------------
/smart-parking/reserve-spot/template.yml:
--------------------------------------------------------------------------------
1 | ROSTemplateFormatVersion: '2015-09-01'
2 | Transform: 'Aliyun::Serverless-2018-04-03'
3 | Resources:
4 | smart-parking:
5 | Type: 'Aliyun::Serverless::Service'
6 | Properties:
7 | Description: 'Reserve a parking spot'
8 | Policies:
9 | - AliyunContainerRegistryReadOnlyAccess
10 | - AliyunLogFullAccess
11 | InternetAccess: true
12 |
13 | reserve-spot:
14 | Type: 'Aliyun::Serverless::Function'
15 | Properties:
16 | Handler: index.handler
17 | Runtime: nodejs10
18 | CodeUri: './code'
19 | MemorySize: 256
20 | EnvironmentVariables:
21 | REDIS_URL: 'redis://:@:6379'
22 | KAFKA_BROKER: ''
23 | KAFKA_TOPIC: ''
24 | Events:
25 | http-trigger:
26 | Type: HTTP
27 | Properties:
28 | AuthType: ANONYMOUS
29 | Methods: ['POST', 'GET']
30 |
--------------------------------------------------------------------------------