├── .gitignore ├── LICENSE.md ├── README.md ├── analytics ├── README.md ├── demo-RTM-loco │ ├── README.md │ ├── demo-RTM-loco-java │ │ ├── README.md │ │ ├── pom.xml │ │ └── src │ │ │ ├── main │ │ │ ├── java │ │ │ │ └── com │ │ │ │ │ └── ge │ │ │ │ │ └── predix │ │ │ │ │ └── analytics │ │ │ │ │ ├── customdto │ │ │ │ │ ├── LocoRegressionRequest.java │ │ │ │ │ ├── LocoRegressionResponse.java │ │ │ │ │ └── WindLocoRTMValues.java │ │ │ │ │ └── demo │ │ │ │ │ └── java │ │ │ │ │ └── DemoRTMLocoJavaEntryPoint.java │ │ │ └── resources │ │ │ │ ├── config.json │ │ │ │ └── log4j.properties │ │ │ └── test │ │ │ ├── java │ │ │ └── com │ │ │ │ └── ge │ │ │ │ └── predix │ │ │ │ └── analytics │ │ │ │ └── demo │ │ │ │ └── java │ │ │ │ └── DemoRTMLocoTest.java │ │ │ └── resources │ │ │ └── LocoData.json │ ├── demo-RTM-loco-matlab-r2011b │ │ ├── README.md │ │ ├── data │ │ │ ├── LocoData.json │ │ │ └── locomotive │ │ │ │ └── LocomotiveRegression.m │ │ ├── pom.xml │ │ └── src │ │ │ └── main │ │ │ ├── java │ │ │ └── com │ │ │ │ └── ge │ │ │ │ └── predix │ │ │ │ └── insight │ │ │ │ └── analytic │ │ │ │ └── demo │ │ │ │ └── matlab │ │ │ │ └── LocoRegDemo.java │ │ │ └── resources │ │ │ ├── config.json │ │ │ ├── lib │ │ │ └── LocoRegression.jar │ │ │ ├── log4j.properties │ │ │ └── messagesource │ │ │ └── DemoAdderErrorMessages.properties │ ├── demo-RTM-loco-py │ │ ├── README.md │ │ ├── config.json │ │ └── rtm │ │ │ ├── RTMLocomotiveRegression.py │ │ │ └── __init__.py │ ├── demo-RTM-loco-template.json │ ├── sampleInput-small.json │ ├── sampleInput.json │ ├── sampleOutput-small.json │ └── sampleOutput.json ├── demo-adder │ ├── README.md │ ├── demo-adder-java │ │ ├── README.md │ │ ├── pom.xml │ │ └── src │ │ │ └── main │ │ │ ├── java │ │ │ └── com │ │ │ │ └── ge │ │ │ │ └── predix │ │ │ │ └── analytics │ │ │ │ ├── customdto │ │ │ │ └── AdderResponse.java │ │ │ │ └── demo │ │ │ │ └── java │ │ │ │ └── DemoAdderJavaEntryPoint.java │ │ │ └── resources │ │ │ ├── config.json │ │ │ └── log4j.properties │ ├── demo-adder-matlab-r2011b │ │ ├── README.md │ │ ├── pom.xml │ │ └── src │ │ │ └── main │ │ │ ├── java │ │ │ └── com │ │ │ │ └── ge │ │ │ │ └── predix │ │ │ │ └── insight │ │ │ │ └── analytic │ │ │ │ └── demo │ │ │ │ └── matlab │ │ │ │ └── DemoMatlabAdderEntryPoint.java │ │ │ └── resources │ │ │ ├── config.json │ │ │ ├── lib │ │ │ └── matlabadder.jar │ │ │ ├── log4j.properties │ │ │ └── messagesource │ │ │ └── DemoAdderErrorMessages.properties │ ├── demo-adder-py │ │ ├── README.md │ │ ├── analytic │ │ │ ├── __init__.py │ │ │ └── demoAdder.py │ │ ├── config.json │ │ └── testbench │ │ │ └── testDemoAdder.py │ └── demo-adder-template.json ├── demo-timeseries-adder-java │ ├── README.md │ ├── demo-timeseries-adder-template.json │ ├── pom.xml │ └── src │ │ ├── main │ │ ├── java │ │ │ └── com │ │ │ │ └── ge │ │ │ │ └── predix │ │ │ │ └── analytics │ │ │ │ ├── customdto │ │ │ │ ├── AdderResponse.java │ │ │ │ ├── Data.java │ │ │ │ └── TimeseriesOutput.java │ │ │ │ └── demo │ │ │ │ └── java │ │ │ │ └── DemoJavaAdderWithTimeseriesEntryPoint.java │ │ └── resources │ │ │ ├── analyticTemplate.json │ │ │ ├── config.json │ │ │ └── log4j.properties │ │ └── test │ │ ├── java │ │ └── com │ │ │ └── ge │ │ │ └── predix │ │ │ └── analytics │ │ │ └── demo │ │ │ └── java │ │ │ └── TestDemoJavaAdderWithTimeseries.java │ │ └── resources │ │ ├── analyticInputDataWithTimeseriesData.json │ │ └── analyticOutputDataWithTimeseriesData.json ├── demo-timeseries-adder-with-model │ ├── README.md │ ├── demo-timeseries-adder-with-model-java │ │ ├── README.md │ │ ├── pom.xml │ │ └── src │ │ │ ├── main │ │ │ ├── java │ │ │ │ └── com │ │ │ │ │ └── ge │ │ │ │ │ └── predix │ │ │ │ │ └── analytics │ │ │ │ │ ├── customdto │ │ │ │ │ ├── AdderResponse.java │ │ │ │ │ ├── Data.java │ │ │ │ │ └── TimeseriesOutput.java │ │ │ │ │ └── demo │ │ │ │ │ └── java │ │ │ │ │ └── DemoJavaAdderWithTimeseriesEntryPoint.java │ │ │ └── resources │ │ │ │ ├── analyticTemplate.json │ │ │ │ ├── config.json │ │ │ │ └── log4j.properties │ │ │ └── test │ │ │ ├── java │ │ │ └── com │ │ │ │ └── ge │ │ │ │ └── predix │ │ │ │ └── analytics │ │ │ │ └── demo │ │ │ │ └── java │ │ │ │ └── TestDemoJavaAdderWithTimeseries.java │ │ │ └── resources │ │ │ ├── analyticInputDataWithTimeseriesData.json │ │ │ ├── analyticOutputDataWithTimeseriesData.json │ │ │ └── sampleModel.txt │ ├── demo-timeseries-adder-with-model-py │ │ ├── README.md │ │ ├── analytics │ │ │ ├── __init__.py │ │ │ └── demoTimeSeriesAdder.py │ │ ├── config.json │ │ └── testbench │ │ │ ├── data │ │ │ ├── inputData │ │ │ │ └── analyticInput.json │ │ │ ├── outputData │ │ │ │ ├── expectedAnalyticOutput.json │ │ │ │ └── expectedAnalyticOutputForThresholdOf200.json │ │ │ └── sampleModel.txt │ │ │ └── demoTimeSeriesAdderUnitTestBench.py │ ├── demo-timeseries-adder-with-model-template.json │ └── sampleModel.txt ├── miners-rule │ ├── pom.xml │ └── src │ │ ├── main │ │ ├── java │ │ │ └── com │ │ │ │ └── ge │ │ │ │ └── predix │ │ │ │ └── analytics │ │ │ │ ├── customdto │ │ │ │ ├── CDM_TimeSeries.java │ │ │ │ ├── Input.java │ │ │ │ ├── Response.java │ │ │ │ ├── StressesTimeSeries.java │ │ │ │ └── TimeSeries.java │ │ │ │ └── demo │ │ │ │ └── java │ │ │ │ └── MinersRule.java │ │ └── resources │ │ │ ├── analyticTemplate.json │ │ │ ├── config.json │ │ │ └── log4j.properties │ │ └── test │ │ ├── java │ │ └── com │ │ │ └── ge │ │ │ └── predix │ │ │ └── analytics │ │ │ └── demo │ │ │ └── java │ │ │ └── TestMinersRule.java │ │ └── resources │ │ ├── minersRuleIn.json │ │ ├── minersRuleModel.txt │ │ ├── minersRuleOut.json │ │ └── sampleOrchestration │ │ ├── MinersRuleOrchestration.bpmn20.xml │ │ ├── MinersRulePortToFieldMap.json │ │ ├── minersRuleModelForAsset1.txt │ │ └── minersRuleModelForAssetGroup1.txt └── simple-linear-regression │ ├── README.MD │ ├── analytics │ ├── __init__.py │ ├── simple_linear_regression.py │ └── test_simple_linear_regression.py │ ├── assembly.xml │ ├── config.json │ ├── pom.xml │ └── simple_linear_regression_template.json ├── custom-data-connector ├── README.md └── postgresdb-ref-impl-data-connector │ ├── README.md │ ├── manifest.yml │ ├── pom.xml │ └── src │ ├── main │ ├── java │ │ └── com │ │ │ └── ge │ │ │ └── predix │ │ │ ├── analytics │ │ │ ├── customconnector │ │ │ │ └── refimpl │ │ │ │ │ └── postgresdb │ │ │ │ │ ├── ApiV1AnalyticsCustomdataResource.java │ │ │ │ │ ├── CustomDataResourceManager.java │ │ │ │ │ ├── DataSourceConfig.java │ │ │ │ │ ├── PostgresRefImplConnectorApplication.java │ │ │ │ │ ├── SQLBuilder.java │ │ │ │ │ ├── SQLBuilderBase.java │ │ │ │ │ ├── SQLBuilderForGenericSchema.java │ │ │ │ │ └── SQLBuilderForNormalizedSchema.java │ │ │ └── security │ │ │ │ └── CsrfSecurityRequestMatcher.java │ │ │ └── insight │ │ │ └── dto │ │ │ ├── customdataprovider │ │ │ ├── AnalyticReadDataRequest.java │ │ │ ├── AnalyticReadDataResponse.java │ │ │ ├── AnalyticWriteDataRequest.java │ │ │ ├── AnalyticWriteDataResponse.java │ │ │ ├── DataRequest.java │ │ │ ├── DataResponse.java │ │ │ ├── Field.java │ │ │ ├── ObjectFactory.java │ │ │ ├── OrchestrationExecutionContext.java │ │ │ └── package-info.java │ │ │ └── errorresponse │ │ │ ├── ErrorResponse.java │ │ │ ├── ObjectFactory.java │ │ │ └── package-info.java │ └── resources │ │ ├── META-INF │ │ └── authentication-context.xml │ │ ├── custom-connector-application.properties │ │ ├── db │ │ └── migration │ │ │ └── schema │ │ │ ├── V0_1_0__turbine_db_schema.sql │ │ │ ├── V0_2_0__turbine_db_load_data.sql │ │ │ ├── V0_3_0__generic_sensor_schema.sql │ │ │ └── V0_4_0__generic_sensor_sample_data.sql │ │ ├── logback.xml │ │ └── wadl │ │ └── CustomDataProviderServiceImpl.wadl │ └── test │ ├── java │ └── com │ │ └── ge │ │ └── predix │ │ └── analytics │ │ └── customconnector │ │ └── refimpl │ │ └── postgresdb │ │ └── ApiV1AnalyticsCustomdataResourceTest.java │ └── resources │ ├── analytic-read-data-request.json │ ├── custom-connector-application.properties │ ├── expected-analytic-read-data-response.json │ └── uaa-mock.json ├── orchestrations ├── OrchestrationWithOneAnalytic.xml ├── OrchestrationWithThirdPartyAnalytic.xml ├── OrchestrationWithTwoAnalytics.xml ├── README.md ├── demoAdderMultiStepOrchestration │ ├── Demo_adder_Multistep_Orchestration.postman_collection │ ├── InitialData │ │ ├── rawTimeSeriesData_KW.json │ │ └── rawTimeSeriesData_vibration.json │ ├── Orchestration │ │ ├── TwoStepOrchestration.bpmn20.xml │ │ ├── step1-portToFieldMap.json │ │ ├── step2-portToFieldMap.json │ │ └── thresholdModel.json │ └── README.md ├── multiStepOrchestration │ ├── InitialData │ │ ├── README.md │ │ ├── asset-model.json │ │ ├── asset32-CDM-timeseries-data.json │ │ ├── asset32-bearing-temperature-timeseries-data.json │ │ ├── asset32-kw-timeseries-data.json │ │ ├── asset32-vibration-timeseries-data.json │ │ ├── asset32-vibration1-timeseries-data.json │ │ ├── asset37-CDM-timeseries-data.json │ │ ├── asset37-bearing-temperature-timeseries-data.json │ │ ├── asset37-kw-timeseries-data.json │ │ ├── asset37-vibration-timeseries-data.json │ │ ├── asset37-vibration1-timeseries-data.json │ │ ├── asset38-CDM-timeseries-data.json │ │ ├── asset38-bearing-temperature-timeseries-data.json │ │ ├── asset38-kw-timeseries-data.json │ │ ├── asset38-vibration-timeseries-data.json │ │ ├── asset38-vibration1-timeseries-data.json │ │ ├── assetGroupQuery.txt │ │ └── tagMapQuery.txt │ ├── Orchestration │ │ ├── Asset32CDMModel.txt │ │ ├── DemoAdderPortToFieldMap.json │ │ ├── MinersRuleOrchestration.bpmn20.xml │ │ ├── MinersRulePortToFieldMap.json │ │ ├── README.md │ │ └── TurbineCDMModel.txt │ ├── README.md │ └── TwoStepOrchestrationDemo.postman_collection.json └── oneStepOrchestration │ ├── README.md │ ├── SingleStepOrchestrationDemoUsingTagMap.postman_collection.json │ ├── orchestrationConfigurationFiles │ ├── orchestration-workflow.xml │ └── port-to-field-map-for-demoTimeseriesAdder.json │ └── supportingDataFiles │ ├── analytic-input-for-demo-timeseries-adder.json │ ├── tag-A-time-bounded-request.json │ ├── tag-B-time-bounded-request.json │ ├── time-series-tag-A-data.json │ └── time-series-tag-B-data.json ├── postman ├── Analytics_Framework.postman_collection.json ├── README.md ├── Template.postman_environment.json ├── UAA_Token_for_Analytics_Services.json.postman_collection ├── archive │ ├── Analytics_Catalog.json.postman_collection │ ├── Analytics_Runtime.json.postman_collection │ └── Template.postman_environment └── images │ ├── PostmanEnvironmentsDropdown.png │ ├── PostmanEnvironmentsPopup.png │ ├── PredixAnalytics.png │ ├── postman-logo.png │ └── separator.png └── user-provisioning ├── README.md └── analytics-ui-user-add.sh /.gitignore: -------------------------------------------------------------------------------- 1 | # generated files 2 | .idea 3 | *.iml 4 | **/target 5 | *.log 6 | .classpath 7 | .project 8 | **/org.eclipse.jdt.core.prefs 9 | **/org.eclipse.core.resources.prefs 10 | **/org.eclipse.m2e.core.prefs 11 | javabuilder.jar -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # Predix Analytics Samples 2 | 3 | A collection of samples for use with [Predix Analytics](https://docs.predix.io/en-US/content/service/analytics_services/analytics_framework/) services. 4 | 5 | ## Sample Analytics 6 | 7 | These are sample analytics written for use with Predix Analytics: 8 | 9 | - **[demo-adder](analytics/demo-adder):** A simple analytic that takes 2 numbers as input and returns their sum. It has been implemented in Java, Matlab (r2011b), and Python. 10 | - **[demo-timeseries-adder](analytics/demo-timeseries-adder-java):** Takes 2 arrays of timeseries data and returns a timeseries array that contains the sums at each timestamp. Currently available in Java. 11 | - **[demo-timeseries-adder-with-model](analytics/demo-timeseries-adder-with-model):** Takes 2 arrays of timeseries data and returns a timeseries array that contains the sums at each timestamp, adjusted by a threshold value provided in a trained model. Currently available in Java and Python. 12 | - **[demo-RTM-loco](analytics/demo-RTM-loco):** A reference analytic that is used to calculate locomotive efficiency using a linear regression model. It has been implemented in Java, Matlab (r2011b), and Python. 13 | - **[miners-rule](analytics/miners-rule):** A sample analytic that performs a Miner's Rule operation on 2 timeseries arrays and returns a timeseries array. Currently only available in Java. 14 | - **[simple-linear-regression](analytics/simple-linear-regression):**:new: A sample analytic that performs a simple linear regression with two arrays as input and returns the p-value, r-value, slope, intercept and standard error. Currently only available in Python 3. 15 | 16 | For more information on developing analytics for use with Predix Analytics, see [Analytic Development](https://docs.predix.io/en-US/content/service/analytics_services/analytics_framework/analytic-development) on Predix IO. 17 | 18 | ## Sample Orchestration Workflows 19 | 20 | **[Sample Orchestration Workflows](orchestrations)** 21 | 22 | You can find more information on [configuring](https://docs.predix.io/en-US/content/service/analytics_services/analytics_framework/orchestration-configuration) and [running](https://docs.predix.io/en-US/content/service/analytics_services/analytics_framework/orchestration-execution) orchestrations on Predix IO. 23 | 24 | ## Postman Collections 25 | 26 | **[Sample Postman Collections](postman)** 27 | 28 | Once you have created your instance of either the Analytics Catalog or Analytics Runtime service, you can use the sample Postman collections to customize your REST requests and test them out to aid in implementing your applications. 29 | 30 | ## Custom Data Connector 31 | 32 | **[Sample Custom Data Connectors](custom-data-connector)** 33 | 34 | These are sample custom data connector implementations to connect to various data sources 35 | -------------------------------------------------------------------------------- /analytics/README.md: -------------------------------------------------------------------------------- 1 | # Sample Analytics 2 | 3 | These are sample analytics written for use with [Predix Analytics](https://docs.predix.io/en-US/content/service/analytics_services/analytics_framework/): 4 | 5 | - **[demo-adder](demo-adder):** A simple analytic that takes 2 numbers as input and returns their sum. It has been implemented in Java, Matlab (r2011b), and Python. 6 | - **[demo-timeseries-adder-java](demo-timeseries-adder-java):** Takes 2 arrays of timeseries data and returns a timeseries array that contains the sums at each timestamp. Currently available in Java. 7 | - **[demo-timeseries-adder-with-model](demo-timeseries-adder-with-model):** Takes 2 arrays of timeseries data and returns a timeseries array that contains the sums at each timestamp, adjusted by a threshold value provided in a trained model. Currently available in Java and Python. 8 | - **[demo-RTM-loco](demo-RTM-loco):** A reference analytic that is used to calculate locomotive efficiency using a linear regression model. It has been implemented in Java, Matlab (r2011b), and Python. 9 | - **[miners-rule](miners-rule):** A sample analytic that performs a Miner's Rule operation on 2 timeseries arrays and returns a timeseries array. Currently only available in Java. 10 | - **[simple-linear-regression](simple-linear-regression):**:new: A sample analytic that performs a simple linear regression with two arrays as input and returns the p-value, r-value, slope, intercept and standard error. Currently only available in Python 3. 11 | 12 | For more information on developing analytics for use with Predix Analytics, see [Analytic Development](https://docs.predix.io/en-US/content/service/analytics_services/analytics_framework/analytic-development) on Predix IO. 13 | -------------------------------------------------------------------------------- /analytics/demo-RTM-loco/demo-RTM-loco-java/src/main/java/com/ge/predix/analytics/customdto/LocoRegressionRequest.java: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright (c) 2015 - 2016 General Electric Company. All rights reserved. 3 | * 4 | * The copyright to the computer software herein is the property of 5 | * General Electric Company. The software may be used and/or copied only 6 | * with the written permission of General Electric Company or in accordance 7 | * with the terms and conditions stipulated in the agreement/contract 8 | * under which the software has been supplied. 9 | */ 10 | 11 | package com.ge.predix.analytics.customdto; 12 | 13 | public class LocoRegressionRequest { 14 | 15 | protected WindLocoRTMValues test; 16 | protected WindLocoRTMValues train; 17 | 18 | public WindLocoRTMValues getTest() { 19 | return test; 20 | } 21 | 22 | public void setTest(WindLocoRTMValues test) { 23 | this.test = test; 24 | } 25 | 26 | public WindLocoRTMValues getTrain() { 27 | return train; 28 | } 29 | 30 | public void setTrain(WindLocoRTMValues train) { 31 | this.train = train; 32 | } 33 | } 34 | -------------------------------------------------------------------------------- /analytics/demo-RTM-loco/demo-RTM-loco-java/src/main/java/com/ge/predix/analytics/customdto/LocoRegressionResponse.java: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright (c) 2015 - 2016 General Electric Company. All rights reserved. 3 | * 4 | * The copyright to the computer software herein is the property of 5 | * General Electric Company. The software may be used and/or copied only 6 | * with the written permission of General Electric Company or in accordance 7 | * with the terms and conditions stipulated in the agreement/contract 8 | * under which the software has been supplied. 9 | */ 10 | 11 | package com.ge.predix.analytics.customdto; 12 | 13 | import java.util.Arrays; 14 | 15 | import com.fasterxml.jackson.annotation.JsonProperty; 16 | 17 | public class LocoRegressionResponse { 18 | 19 | @JsonProperty(value = "R2") 20 | protected double[] r2; 21 | 22 | @JsonProperty(value = "Prediction") 23 | protected double[] prediction; 24 | 25 | @Override public boolean equals(Object o) { 26 | if (this == o) 27 | return true; 28 | if (o == null || getClass() != o.getClass()) 29 | return false; 30 | 31 | LocoRegressionResponse that = (LocoRegressionResponse) o; 32 | 33 | if (!Arrays.equals(r2, that.r2)) 34 | return false; 35 | return Arrays.equals(prediction, that.prediction); 36 | 37 | } 38 | 39 | @Override public int hashCode() { 40 | int result = Arrays.hashCode(r2); 41 | result = 31 * result + Arrays.hashCode(prediction); 42 | return result; 43 | } 44 | 45 | @Override public String toString() { 46 | return "LocoRegressionResponse{" + 47 | "r2=" + Arrays.toString(r2) + 48 | ", prediction=" + Arrays.toString(prediction) + 49 | '}'; 50 | } 51 | 52 | public double[] getR2() { 53 | return r2; 54 | } 55 | 56 | public void setR2(double[] r2) { 57 | this.r2 = r2; 58 | } 59 | 60 | public double[] getPrediction() { 61 | return prediction; 62 | } 63 | 64 | public void setPrediction(double[] prediction) { 65 | this.prediction = prediction; 66 | } 67 | } 68 | -------------------------------------------------------------------------------- /analytics/demo-RTM-loco/demo-RTM-loco-java/src/main/java/com/ge/predix/analytics/customdto/WindLocoRTMValues.java: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright (c) 2015 - 2016 General Electric Company. All rights reserved. 3 | * 4 | * The copyright to the computer software herein is the property of 5 | * General Electric Company. The software may be used and/or copied only 6 | * with the written permission of General Electric Company or in accordance 7 | * with the terms and conditions stipulated in the agreement/contract 8 | * under which the software has been supplied. 9 | */ 10 | 11 | package com.ge.predix.analytics.customdto; 12 | 13 | import com.fasterxml.jackson.annotation.JsonProperty; 14 | 15 | public class WindLocoRTMValues { 16 | private double[] wind_speed; 17 | private double[] rtm; 18 | private double[] loco_speed; 19 | 20 | public double[] getWind_speed() { 21 | return wind_speed; 22 | } 23 | 24 | public void setWind_speed(double[] wind_speed) { 25 | this.wind_speed = wind_speed; 26 | } 27 | 28 | @JsonProperty("RTM") 29 | public double[] getRTM() { 30 | return rtm; 31 | } 32 | 33 | public void setRTM(double[] rtm) { 34 | this.rtm = rtm; 35 | } 36 | 37 | public double[] getLoco_speed() { 38 | return loco_speed; 39 | } 40 | 41 | public void setLoco_speed(double[] loco_speed) { 42 | this.loco_speed = loco_speed; 43 | } 44 | } 45 | -------------------------------------------------------------------------------- /analytics/demo-RTM-loco/demo-RTM-loco-java/src/main/resources/config.json: -------------------------------------------------------------------------------- 1 | { 2 | "className": "com.ge.predix.analytics.demo.java.DemoRTMLocoJavaEntryPoint", 3 | "methodName": "RTM" 4 | } -------------------------------------------------------------------------------- /analytics/demo-RTM-loco/demo-RTM-loco-java/src/main/resources/log4j.properties: -------------------------------------------------------------------------------- 1 | log4j.appender.Stdout=org.apache.log4j.ConsoleAppender 2 | log4j.appender.Stdout.layout=org.apache.log4j.PatternLayout 3 | 4 | log4j.logger.org.springframework=DEBUG 5 | 6 | 7 | # Set root logger level to DEBUG and its only appender to CONSOLE. 8 | log4j.rootLogger=DEBUG, FILE, CONSOLE 9 | 10 | #CONSOLE is set to be a ConsoleAppender. 11 | log4j.appender.CONSOLE=org.apache.log4j.ConsoleAppender 12 | 13 | 14 | # CONSOLE uses PatternLayout. 15 | log4j.appender.CONSOLE.layout=org.apache.log4j.PatternLayout 16 | log4j.appender.CONSOLE.layout.ConversionPattern= %-5p %c %x - %m%n 17 | 18 | 19 | # FILE is file logger with rotation 20 | log4j.appender.FILE=org.apache.log4j.RollingFileAppender 21 | log4j.appender.FILE.layout=org.apache.log4j.PatternLayout 22 | log4j.appender.FILE.layout.ConversionPattern=%d{ISO8601} %-5p %c %x - %m%n 23 | log4j.appender.FILE.File=pmp.log 24 | log4j.appender.FILE.MaxFileSize=10000KB 25 | # Keep three backup files 26 | log4j.appender.FILE.MaxBackupIndex=3 27 | -------------------------------------------------------------------------------- /analytics/demo-RTM-loco/demo-RTM-loco-java/src/test/java/com/ge/predix/analytics/demo/java/DemoRTMLocoTest.java: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright (c) 2015 - 2016 General Electric Company. All rights reserved. 3 | * 4 | * The copyright to the computer software herein is the property of 5 | * General Electric Company. The software may be used and/or copied only 6 | * with the written permission of General Electric Company or in accordance 7 | * with the terms and conditions stipulated in the agreement/contract 8 | * under which the software has been supplied. 9 | */ 10 | 11 | package com.ge.predix.analytics.demo.java; 12 | 13 | import java.io.IOException; 14 | import java.io.InputStream; 15 | 16 | import org.apache.commons.io.IOUtils; 17 | import org.junit.Assert; 18 | import org.junit.Test; 19 | 20 | public class DemoRTMLocoTest { 21 | @Test 22 | public void DemoRTMLocoTest() { 23 | DemoRTMLocoJavaEntryPoint drtm = new DemoRTMLocoJavaEntryPoint(); 24 | 25 | String jsonInput = null; 26 | try { 27 | jsonInput = getTestData(); 28 | } catch (IOException e) { 29 | e.printStackTrace(); 30 | Assert.fail(); 31 | } 32 | String result = drtm.RTM(jsonInput); 33 | //System.out.println(result); 34 | 35 | } 36 | 37 | private String getTestData() throws IOException { 38 | InputStream is = getClass().getClassLoader().getResourceAsStream("LocoData.json"); 39 | try { 40 | String fileContents = IOUtils.toString(is); 41 | return fileContents; 42 | } catch (IOException e) { 43 | e.printStackTrace(); 44 | Assert.fail(); 45 | } 46 | return null; 47 | } 48 | 49 | } 50 | -------------------------------------------------------------------------------- /analytics/demo-RTM-loco/demo-RTM-loco-matlab-r2011b/data/locomotive/LocomotiveRegression.m: -------------------------------------------------------------------------------- 1 | function Out_Str=LocomotiveRegression(DataStr) 2 | % Parse the data and call the regression function 3 | data = JSON.parse(DataStr); 4 | 5 | 6 | % for educational purposes, contents of "data" have been casted into single 7 | % variables 8 | 9 | % training data 10 | loco_sp_tr = cell2mat(data.train.loco_speed(:)); 11 | loco_sp_sq_tr = cell2mat(data.train.loco_speed(:)).^2; 12 | wind_sp_tr = cell2mat(data.train.wind_speed(:)); 13 | loco_rtm_tr = cell2mat(data.train.RTM(:)); 14 | 15 | feature_columns_tr = [loco_sp_tr, wind_sp_tr, loco_sp_sq_tr, ones(size(loco_rtm_tr))]; 16 | label_column_tr = loco_rtm_tr; 17 | 18 | %linear regression parameters 19 | lr_params = pinv(feature_columns_tr' * feature_columns_tr)* ... 20 | feature_columns_tr' * label_column_tr; 21 | %lm = fitlm(feature_columns_tr(:,1:3), label_column_tr) 22 | 23 | % calculate errors and statistical values for train data 24 | fitted_value = feature_columns_tr*lr_params; 25 | err_tr = loco_rtm_tr - fitted_value; 26 | distance = loco_rtm_tr - mean(loco_rtm_tr); 27 | R2 = 1 - sum(err_tr.^2)/sum(distance.^2); 28 | 29 | 30 | % test data 31 | loco_sp_tst = cell2mat(data.test.loco_speed(:)); 32 | loco_sp_sq_tst = cell2mat(data.test.loco_speed(:)).^2; 33 | wind_sp_tst = cell2mat(data.test.wind_speed(:)); 34 | loco_rtm_tst = cell2mat(data.test.RTM(:)); 35 | feature_columns_tst = [loco_sp_tst, wind_sp_tst, loco_sp_sq_tst, ones(size(loco_rtm_tst))]; 36 | 37 | %calculate linear model output using test data 38 | fitted_value_tst = feature_columns_tst*lr_params; 39 | 40 | 41 | %% Create JSON String 42 | Out_Str = ['{"R2": [' ]; %,'],"Prediction": [','%f',']}']; 43 | Out_Str = [Out_Str, num2str(R2)]; 44 | Out_Str = [Out_Str,'],"Prediction": [']; 45 | for ii = 1:length(fitted_value_tst) 46 | Out_Str = [Out_Str, num2str(fitted_value_tst(ii))]; 47 | Out_Str = [Out_Str,',']; 48 | end 49 | Out_Str = [Out_Str(1:end-1),']}']; 50 | end -------------------------------------------------------------------------------- /analytics/demo-RTM-loco/demo-RTM-loco-matlab-r2011b/src/main/java/com/ge/predix/insight/analytic/demo/matlab/LocoRegDemo.java: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright (c) 2015 - 2016 General Electric Company. All rights reserved. 3 | * 4 | * The copyright to the computer software herein is the property of 5 | * General Electric Company. The software may be used and/or copied only 6 | * with the written permission of General Electric Company or in accordance 7 | * with the terms and conditions stipulated in the agreement/contract 8 | * under which the software has been supplied. 9 | */ 10 | 11 | package com.ge.predix.insight.analytic.demo.matlab; 12 | 13 | import com.mathworks.toolbox.javabuilder.MWArray; 14 | import LocoRegression.*; 15 | 16 | public class LocoRegDemo { 17 | public String locoRegression(String jsonStr) { 18 | Object[] result = null; 19 | Class1 LocoReg = null; 20 | String resultStr = "error"; 21 | 22 | try { 23 | LocoReg = new Class1(); 24 | result = LocoReg.LocomotiveRegression(1, jsonStr); 25 | //System.out.println(result[0]); 26 | } catch (Exception e) { 27 | System.out.println("Exception: " + e.toString()); 28 | } finally { 29 | if (result != null) { 30 | if (result.length > 0) { 31 | resultStr = result[0].toString(); 32 | } 33 | } 34 | 35 | MWArray.disposeArray(result); 36 | LocoReg.dispose(); 37 | } 38 | return resultStr; 39 | } 40 | } 41 | 42 | -------------------------------------------------------------------------------- /analytics/demo-RTM-loco/demo-RTM-loco-matlab-r2011b/src/main/resources/config.json: -------------------------------------------------------------------------------- 1 | { 2 | "className":"com.ge.predix.insight.analytic.demo.matlab.LocoRegDemo", 3 | "methodName":"locoRegression", 4 | "matlabVersion":"r2011b" 5 | } 6 | -------------------------------------------------------------------------------- /analytics/demo-RTM-loco/demo-RTM-loco-matlab-r2011b/src/main/resources/lib/LocoRegression.jar: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/PredixDev/predix-analytics-sample/2f99ae7335d12c05597041c2734fbdd454be1e57/analytics/demo-RTM-loco/demo-RTM-loco-matlab-r2011b/src/main/resources/lib/LocoRegression.jar -------------------------------------------------------------------------------- /analytics/demo-RTM-loco/demo-RTM-loco-matlab-r2011b/src/main/resources/log4j.properties: -------------------------------------------------------------------------------- 1 | log4j.appender.Stdout=org.apache.log4j.ConsoleAppender 2 | log4j.appender.Stdout.layout=org.apache.log4j.PatternLayout 3 | 4 | log4j.logger.org.springframework=DEBUG 5 | 6 | 7 | # Set root logger level to DEBUG and its only appender to CONSOLE. 8 | log4j.rootLogger=DEBUG, FILE, CONSOLE 9 | 10 | #CONSOLE is set to be a ConsoleAppender. 11 | log4j.appender.CONSOLE=org.apache.log4j.ConsoleAppender 12 | 13 | 14 | # CONSOLE uses PatternLayout. 15 | log4j.appender.CONSOLE.layout=org.apache.log4j.PatternLayout 16 | log4j.appender.CONSOLE.layout.ConversionPattern= %-5p %c %x - %m%n 17 | 18 | 19 | # FILE is file logger with rotation 20 | log4j.appender.FILE=org.apache.log4j.RollingFileAppender 21 | log4j.appender.FILE.layout=org.apache.log4j.PatternLayout 22 | log4j.appender.FILE.layout.ConversionPattern=%d{ISO8601} %-5p %c %x - %m%n 23 | log4j.appender.FILE.File=pmp.log 24 | log4j.appender.FILE.MaxFileSize=10000KB 25 | # Keep three backup files 26 | log4j.appender.FILE.MaxBackupIndex=3 27 | -------------------------------------------------------------------------------- /analytics/demo-RTM-loco/demo-RTM-loco-matlab-r2011b/src/main/resources/messagesource/DemoAdderErrorMessages.properties: -------------------------------------------------------------------------------- 1 | ADDER001.SEVERITY=3 2 | ADDER001.MESSAGE=Exception performing Addition. 3 | ADDER001.DETAIL=Exception performing Addition: inputs were: {0}, exception was: {1} 4 | 5 | -------------------------------------------------------------------------------- /analytics/demo-RTM-loco/demo-RTM-loco-py/config.json: -------------------------------------------------------------------------------- 1 | { 2 | "entry-method": "rtm.LocomotiveRegression.driver", 3 | "conda-libs": [ 4 | "numpy", 5 | "scikit-learn==0.20", 6 | "scipy==1.2.2", 7 | "pandas==0.24.2" 8 | ] 9 | } -------------------------------------------------------------------------------- /analytics/demo-RTM-loco/demo-RTM-loco-py/rtm/RTMLocomotiveRegression.py: -------------------------------------------------------------------------------- 1 | import json 2 | import logging 3 | import pandas as pd 4 | from sklearn.linear_model import LinearRegression 5 | 6 | 7 | class LocomotiveRegression: 8 | def __init__(self): 9 | print("Locomotive Linear Regression") 10 | 11 | def driver(self, data): 12 | data_json = json.loads(data) 13 | 14 | # extract training and test data 15 | training_df = pd.DataFrame(data_json["train"]) 16 | testing_df = pd.DataFrame(data_json["test"]) 17 | 18 | # get the predictions from the analytic 19 | predictions, score = self.train_and_predict(training_df, testing_df) 20 | 21 | # package results into a dictionary to pass them on to 22 | # analytic framework 23 | result = {"Prediction": predictions, "R2": score} 24 | 25 | return result 26 | 27 | def train_and_predict(self, training_df, testing_df): 28 | """ 29 | Train a linear regression model for Locomotive dataset and 30 | perform predictions on the test dataset. 31 | 32 | Args: 33 | training_df: pandas.DataFrame with columns 34 | ['loco_speed', 'wind_speed', 'RTM'] 35 | testing_df: pandas.DataFrame with columns 36 | ['loco_speed', 'wind_speed'] 37 | 38 | Returns: 39 | """ 40 | 41 | # do feature transformation and 42 | # add locomotive speed squared as a non linear feature 43 | training_df["loco_speed_sqr"] = training_df["loco_speed"] ** 2 44 | testing_df["loco_speed_sqr"] = testing_df["loco_speed"] ** 2 45 | 46 | # dependent variables 47 | feature_columns = ['loco_speed', 'wind_speed', "loco_speed_sqr"] 48 | # target and prediction variable 49 | label_column = 'RTM' 50 | 51 | # transform data into numpy arrays to use with 52 | # scikit regression algorithm 53 | 54 | # training data 55 | train_feats = training_df[feature_columns].values 56 | train_targets = training_df[label_column].values 57 | 58 | # test data 59 | test_feats = testing_df[feature_columns].values 60 | test_targets = testing_df[label_column].values 61 | 62 | # train regression model and perform predictions 63 | lr = LinearRegression() 64 | model = lr.fit(train_feats, train_targets) 65 | 66 | # preform predictions for the test set using the fitted model 67 | # import pdb; pdb.set_trace() 68 | test_pred = model.predict(test_feats).tolist() 69 | logging.info("R^2: " + str(test_pred)) 70 | 71 | # prediction scores in terms of R^2 (unused) 72 | score = [model.score(test_feats, test_targets.T)] 73 | logging.info("Predictions: " + str(score)) 74 | 75 | # return predicted labels as a list 76 | return (test_pred, score) 77 | -------------------------------------------------------------------------------- /analytics/demo-RTM-loco/demo-RTM-loco-py/rtm/__init__.py: -------------------------------------------------------------------------------- 1 | from rtm.RTMLocomotiveRegression import LocomotiveRegression 2 | -------------------------------------------------------------------------------- /analytics/demo-RTM-loco/demo-RTM-loco-template.json: -------------------------------------------------------------------------------- 1 | { 2 | "analyticName": "Demo RTM Analytic", 3 | "analyticVersion": "v1", 4 | "inputPortDefinitions": [ 5 | { 6 | "portType": "COMPOSITE", 7 | "portName": "test", 8 | "variable": false, 9 | "required": true, 10 | "childrenPorts": [ 11 | { 12 | "portName": "wind_speed", 13 | "portType": "FIELD", 14 | "variable": false, 15 | "required": true, 16 | "dataType": "INTEGER_ARRAY" 17 | }, 18 | { 19 | "portName": "RTM", 20 | "portType": "FIELD", 21 | "variable": false, 22 | "required": true, 23 | "dataType": "DOUBLE_ARRAY" 24 | }, 25 | { 26 | "portName": "loco_speed", 27 | "portType": "FIELD", 28 | "variable": false, 29 | "required": true, 30 | "dataType": "DOUBLE_ARRAY" 31 | } 32 | ] 33 | }, 34 | { 35 | "portType": "COMPOSITE", 36 | "portName": "train", 37 | "variable": false, 38 | "required": true, 39 | "childrenPorts": [ 40 | { 41 | "portName": "wind_speed", 42 | "portType": "FIELD", 43 | "variable": false, 44 | "required": true, 45 | "dataType": "INTEGER_ARRAY" 46 | }, 47 | { 48 | "portName": "RTM", 49 | "portType": "FIELD", 50 | "variable": false, 51 | "required": true, 52 | "dataType": "DOUBLE_ARRAY" 53 | }, 54 | { 55 | "portName": "loco_speed", 56 | "portType": "FIELD", 57 | "variable": false, 58 | "required": true, 59 | "dataType": "DOUBLE_ARRAY" 60 | } 61 | ] 62 | } 63 | ], 64 | "outputPortDefinitions": [ 65 | { 66 | "portName": "Prediction", 67 | "portType": "FIELD", 68 | "variable": false, 69 | "required": true, 70 | "dataType": "DOUBLE_ARRAY" 71 | }, 72 | { 73 | "portName": "R2", 74 | "portType": "FIELD", 75 | "variable": false, 76 | "required": true, 77 | "dataType": "DOUBLE_ARRAY" 78 | } 79 | ] 80 | } -------------------------------------------------------------------------------- /analytics/demo-RTM-loco/sampleInput-small.json: -------------------------------------------------------------------------------- 1 | { 2 | "test": { 3 | "wind_speed": [ 4 | 8, 5 | 15, 6 | 11, 7 | 6, 8 | 24, 9 | 3, 10 | 21, 11 | 14, 12 | 25, 13 | 11 14 | ], 15 | "RTM": [ 16 | 369.11, 17 | 365.95, 18 | 365.87, 19 | 371.31, 20 | 365.69, 21 | 362.92, 22 | 364.29, 23 | 367.22, 24 | 363.9, 25 | 357.35 26 | ], 27 | "loco_speed": [ 28 | 44.1, 29 | 42.8, 30 | 40.06, 31 | 46.08, 32 | 47.23, 33 | 35.7, 34 | 51.36, 35 | 41.4, 36 | 44.65, 37 | 33.09 38 | ] 39 | }, 40 | "train": { 41 | "wind_speed": [ 42 | 17, 43 | 17, 44 | 22, 45 | 19, 46 | 11, 47 | 19, 48 | 10, 49 | 17, 50 | 28, 51 | 14 52 | ], 53 | "RTM": [ 54 | 360.84, 55 | 363.66, 56 | 364.56, 57 | 363.84, 58 | 363.14, 59 | 360.46, 60 | 365.07, 61 | 366.93, 62 | 360.67, 63 | 361.6 64 | ], 65 | "loco_speed": [ 66 | 38.28, 67 | 41.31, 68 | 44.52, 69 | 40.93, 70 | 38.4, 71 | 38.34, 72 | 38.63, 73 | 45.11, 74 | 46.43, 75 | 37.46 76 | ] 77 | } 78 | } 79 | -------------------------------------------------------------------------------- /analytics/demo-RTM-loco/sampleOutput-small.json: -------------------------------------------------------------------------------- 1 | { 2 | "R2": [ 3 | 0.69188 4 | ], 5 | "Prediction": [ 6 | 370.0659, 7 | 366.4111, 8 | 365.6598, 9 | 371.8394, 10 | 365.0185, 11 | 363.5657, 12 | 366.5063, 13 | 365.7035, 14 | 363.5706, 15 | 356.2802 16 | ] 17 | } -------------------------------------------------------------------------------- /analytics/demo-adder/README.md: -------------------------------------------------------------------------------- 1 | #demo-adder 2 | 3 | 3 implementations of a sample analytic for the Predix Analytics platform. 4 | Specific details on how to package these analytics can be found within the implementation folders: 5 | 6 | - **[demo-adder-java](demo-adder-java):** A Java implementation of the demo-adder 7 | - **[demo-adder-matlab-2011b](demo-adder-matlab-r2011b):** A Matlab (r2011b) implementation of the demo-adder 8 | - **[demo-adder-py](demo-adder-py):** A Python implementation of the demo-adder 9 | 10 | ## Analytic template 11 | This analytic takes in 2 numbers and returns their sum. This structure is outlined in this [analytic template](demo-adder-template.json). 12 | 13 | ## Input format 14 | The expected JSON input data format is as follows: 15 | `{"number1": 123, "number2": 456}` 16 | 17 | ## Output format 18 | The JSON output format from the analytic is as follows: 19 | `{"result":579}` 20 | 21 | For more information on developing analytics for the Predix Analytics platform, see [Analytic Development](https://docs.predix.io/en-US/content/service/analytics_services/analytics_framework/analytic-development) in the Predix Analytics Services documentation on Predix IO. -------------------------------------------------------------------------------- /analytics/demo-adder/demo-adder-java/README.md: -------------------------------------------------------------------------------- 1 | #demo-adder-java 2 | 3 | A Java-based sample analytic for the Predix Analytics platform. 4 | 5 | ## Compiled binaries 6 | Refer to the [Releases](https://github.com/PredixDev/predix-analytics-sample/releases) page for compiled binaries you can upload directly to Predix Analytics. 7 | 8 | ## Pre-requisites 9 | To build and run this analytic, you will need to have the following: 10 | 11 | - JDK 1.7+ 12 | - Maven 3+ 13 | 14 | ## Building, deploying and running the analytic 15 | 1. From this directory, run the `mvn clean package` command to build and perform the component test, or just get the latest demo-adder-java-1.0.0.jar binary from the [Releases](https://github.com/PredixDev/predix-analytics-sample/releases) page. 16 | 2. Create an analytic in the Analytics Catalog with the name "Demo Adder Java" and the version "v1". 17 | 3. Upload the jar file demo-adder-java-1.0.0.jar from the demo-adder-java/target directory and attach it to the created analytic entry. 18 | 4. Deploy and test the analytic on Predix Analytics platform. 19 | 20 | ## Analytic template 21 | This analytic takes in 2 numbers and returns their sum. This structure is outlined in this [analytic template](../demo-adder-template.json). 22 | 23 | ## Input format 24 | The expected JSON input data format is as follows: 25 | `{"number1": 123, "number2": 456}` 26 | 27 | ## Output format 28 | The JSON output format from the analytic is as follows: 29 | `{"result":579}` 30 | 31 | ## Developing a java-based analytic 32 | 1. Implement the analytic (and test functions) according to your development guidelines. 33 | 2. Create an entry-point method. The entry method signature must be in one of the following two formats: 34 | * For analytics that do not use trained models, use the following signature for your entry method: 35 | `public String entry_method(String inputJson)` 36 | * For analytics that use trained models, use the following signature for your entry method: 37 | `public String entry_method(String inputJson, Map inputModels)` 38 | * In either case, the `entry_method` can be any method name. `inputJson` is the JSON string input that will be passed to the analytic. The output of this method must also be a JSON string. 39 | * `inputModels` contains a map of trained models as defined in the port-to-field map. The entry method should properly handle the case of an empty map. 40 | 3. Create the JSON configuration file `src/main/resources/config.json` containing the className and MethodName definitions that instruct the generated wrapper code to call your designated entry point method with the request payload. 41 | 4. Build and prepare the analytic jar file including `config.json` file and dependent jar files. See [sample pom.xml](pom.xml) for reference. 42 | 43 | In this example, the entry-point is `add2Numbers` in the [DemoAdderJavaEntryPoint](src/main/java/com/ge/predix/analytics/demo/java/DemoAdderJavaEntryPoint.java) class. 44 | [config.json](src/main/resources/config.json) properly maps the entry point to the `add2Numbers` method of the `DemoAdderJavaEntryPoint` class. 45 | This method takes in a JSON String, maps it to a HashMap (see line 19), performs the computation, and returns the result as a new JSON String (see line 27). 46 | 47 | ## Deploying the analytic to the Predix Cloud 48 | When you upload the jar file as an 'Executable' artifact the platform wraps the executable as a web service exposing the analytic via a URI derived from the analytic name. 49 | Requests made to this generated URI will be passed to the entry point method. 50 | 51 | 52 | 53 | For more information, see [Java Analytic Development](https://docs.predix.io/en-US/content/service/analytics_services/analytics_framework/analytic-development#concept_188c43af-058f-48fa-81f4-367885ecb07a) in the Predix Analytics Services documentation on Predix IO. -------------------------------------------------------------------------------- /analytics/demo-adder/demo-adder-java/pom.xml: -------------------------------------------------------------------------------- 1 | 2 | 4 | 5 | 4.0.0 6 | com.ge.predix.insight 7 | demo-adder-java 8 | Demo adder analytic implementation 9 | jar 10 | Implementation of java demo analytic service 11 | 1.0.0 12 | 13 | 14 | 15 | 16 | org.slf4j 17 | slf4j-api 18 | 1.6.6 19 | provided 20 | 21 | 22 | com.fasterxml.jackson.core 23 | jackson-core 24 | 2.4.1 25 | provided 26 | 27 | 28 | com.fasterxml.jackson.core 29 | jackson-databind 30 | 2.4.1 31 | provided 32 | 33 | 34 | 35 | 36 | 37 | 38 | 39 | org.apache.maven.plugins 40 | maven-compiler-plugin 41 | 42 | 1.7 43 | 1.7 44 | 45 | 46 | 47 | org.apache.maven.plugins 48 | maven-dependency-plugin 49 | 2.8 50 | 51 | 52 | copy-dependencies 53 | prepare-package 54 | 55 | copy-dependencies 56 | 57 | 58 | ${project.build.directory}/classes/lib 59 | false 60 | false 61 | true 62 | 63 | 64 | 65 | 66 | 67 | 68 | 69 | 70 | 71 | 72 | -------------------------------------------------------------------------------- /analytics/demo-adder/demo-adder-java/src/main/java/com/ge/predix/analytics/customdto/AdderResponse.java: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright (c) 2015 - 2016 General Electric Company. All rights reserved. 3 | * 4 | * The copyright to the computer software herein is the property of 5 | * General Electric Company. The software may be used and/or copied only 6 | * with the written permission of General Electric Company or in accordance 7 | * with the terms and conditions stipulated in the agreement/contract 8 | * under which the software has been supplied. 9 | */ 10 | 11 | package com.ge.predix.analytics.customdto; 12 | 13 | public class AdderResponse { 14 | 15 | protected Long result; 16 | 17 | @Override 18 | public boolean equals(Object o) { 19 | if (this == o) 20 | return true; 21 | if (o == null || getClass() != o.getClass()) 22 | return false; 23 | 24 | AdderResponse that = (AdderResponse) o; 25 | 26 | if (result != null ? !result.equals(that.result) : that.result != null) 27 | return false; 28 | 29 | return true; 30 | } 31 | 32 | @Override 33 | public int hashCode() { 34 | return result != null ? result.hashCode() : 0; 35 | } 36 | 37 | @Override public String toString() { 38 | return "AdderOutput{" + 39 | "result=" + result + 40 | 41 | '}'; 42 | } 43 | 44 | /** 45 | * Gets the value of the result property. 46 | * 47 | 48 | * @return 49 | * possible object is 50 | * {@link Long } 51 | * 52 | */ 53 | public Long getResult() { 54 | return result; 55 | } 56 | 57 | /** 58 | * Sets the value of the result property. 59 | * 60 | * @param value 61 | * allowed object is 62 | * {@link Long } 63 | * 64 | */ 65 | public void setResult(Long value) { 66 | this.result = value; 67 | } 68 | 69 | } 70 | -------------------------------------------------------------------------------- /analytics/demo-adder/demo-adder-java/src/main/java/com/ge/predix/analytics/demo/java/DemoAdderJavaEntryPoint.java: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright (c) 2015 - 2016 General Electric Company. All rights reserved. 3 | * 4 | * The copyright to the computer software herein is the property of 5 | * General Electric Company. The software may be used and/or copied only 6 | * with the written permission of General Electric Company or in accordance 7 | * with the terms and conditions stipulated in the agreement/contract 8 | * under which the software has been supplied. 9 | */ 10 | 11 | package com.ge.predix.analytics.demo.java; 12 | 13 | import java.io.IOException; 14 | import java.util.HashMap; 15 | 16 | import org.slf4j.Logger; 17 | import org.slf4j.LoggerFactory; 18 | 19 | import com.fasterxml.jackson.databind.ObjectMapper; 20 | import com.ge.predix.analytics.customdto.AdderResponse; 21 | 22 | public class DemoAdderJavaEntryPoint { 23 | 24 | Logger logger = LoggerFactory.getLogger(DemoAdderJavaEntryPoint.class); 25 | ObjectMapper mapper = new ObjectMapper(); 26 | 27 | public String add2Numbers(String jsonStr) throws IOException { 28 | 29 | HashMap jsonDataMap = mapper.readValue(jsonStr, HashMap.class); 30 | long number1 = jsonDataMap.get("number1"); 31 | long number2 = jsonDataMap.get("number2"); 32 | 33 | AdderResponse output = null; 34 | output = new AdderResponse(); 35 | output.setResult(number1 + number2); 36 | 37 | return mapper.writeValueAsString(output); 38 | 39 | } 40 | 41 | } -------------------------------------------------------------------------------- /analytics/demo-adder/demo-adder-java/src/main/resources/config.json: -------------------------------------------------------------------------------- 1 | { 2 | "className": "com.ge.predix.analytics.demo.java.DemoAdderJavaEntryPoint", 3 | "methodName": "add2Numbers" 4 | } -------------------------------------------------------------------------------- /analytics/demo-adder/demo-adder-java/src/main/resources/log4j.properties: -------------------------------------------------------------------------------- 1 | log4j.appender.Stdout=org.apache.log4j.ConsoleAppender 2 | log4j.appender.Stdout.layout=org.apache.log4j.PatternLayout 3 | 4 | log4j.logger.org.springframework=DEBUG 5 | 6 | 7 | # Set root logger level to DEBUG and its only appender to CONSOLE. 8 | log4j.rootLogger=DEBUG, FILE, CONSOLE 9 | 10 | #CONSOLE is set to be a ConsoleAppender. 11 | log4j.appender.CONSOLE=org.apache.log4j.ConsoleAppender 12 | 13 | 14 | # CONSOLE uses PatternLayout. 15 | log4j.appender.CONSOLE.layout=org.apache.log4j.PatternLayout 16 | log4j.appender.CONSOLE.layout.ConversionPattern= %-5p %c %x - %m%n 17 | 18 | 19 | # FILE is file logger with rotation 20 | log4j.appender.FILE=org.apache.log4j.RollingFileAppender 21 | log4j.appender.FILE.layout=org.apache.log4j.PatternLayout 22 | log4j.appender.FILE.layout.ConversionPattern=%d{ISO8601} %-5p %c %x - %m%n 23 | log4j.appender.FILE.File=pmp.log 24 | log4j.appender.FILE.MaxFileSize=10000KB 25 | # Keep three backup files 26 | log4j.appender.FILE.MaxBackupIndex=3 27 | -------------------------------------------------------------------------------- /analytics/demo-adder/demo-adder-matlab-r2011b/README.md: -------------------------------------------------------------------------------- 1 | #demo-adder-matlab (Matlab r2011b) 2 | 3 | A Matlab-based sample analytic for Predix Analytics. 4 | 5 | ## Pre-requisites 6 | To build and run this analytic, you will need to have the following: 7 | 8 | - Matlab 2011b with Java Builder 9 | - JDK 1.7+ 10 | - Maven 3+ 11 | 12 | ## Building, deploying and running the analytic 13 | 1. Obtain the javabuilder.jar file corresponding to Matlab version r2011b and place it in the libs directory (`src/main/resources/libs`). 14 | 2. From the demo-adder-matlab-r2011b directory, run the `mvn clean package` command to build the analytic jar file. 15 | 3. Create an analytic in Analytics Catalog with the name "Demo Adder Matlab" and the version "v1". 16 | 4. Upload the generated jar file from the demo-adder-matlab-r2011b/target directory and attach it to the created analytic entry. 17 | 5. Deploy and test the analytic on the Predix Analytics platform. 18 | 19 | 20 | ## Input format 21 | The expected JSON input data format is as follows: 22 | `{"number1": 123, "number2": 456}` 23 | 24 | 25 | ## Developing a Matlab-based analytic 26 | 1. Implement the Matlab analytic, preferably such that it takes in data as a JSON string. 27 | 2. Generate the Java JAR for the Matlab analytic using the instructions in the document Matlab Builder for Java found at [http://soliton.ae.gatech.edu/classes/ae6382/documents/matlab/mathworks/javabuilder.pdf](http://soliton.ae.gatech.edu/classes/ae6382/documents/matlab/mathworks/javabuilder.pdf). Note the package, class name, and method name entered. 28 | 3. Create a Java module that consumes your Matlab analytic as a library. If you are using Maven, this means including the analytic JAR file as a dependency in the `pom.xml` file. 29 | 4. Obtain the javabuilder.jar file corresponding to the Matlab version in which your analytic was developed and configure the Java module to consume it as a library. 30 | 5. Create a Java entry-point method which takes in the input data as a string, calls your Matlab algorithm, and returns the output as a string. If your Matlab method does not accept a JSON string as input, this Java entry point method will need to do the JSON parsing and call your Matlab method with the correctly formatted parameters. 31 | 6. Create the JSON configuration file `src/main/resources/config.json` containing the className, MethodName, and matlabVersion definitions that instruct the generated wrapper code to call your designated entry point method with the request payload. 32 | 7. Create a JAR package out of the Java module using either Maven (`mvn clean package`) or other tools. 33 | 34 | In this example, the POM file consumes the **(3)** Java JAR for the Matlab analytic (matlabadder.jar) and **(4)** the javabuilder.jar for Matlab r2011b (which you will need to provide), which need to be located in the libs directory (`src/main/resources/libs`). The entry-point is **(5)** `add2Numbers` in the [DemoAdderJavaEntryPoint](src/main/java/com/ge/predix/insight/analytic/demo/matlab/DemoMatlabAdderEntryPoint.java) class. 35 | **(6)** [config.json](src/main/resources/config.json) properly maps the entry point to the `add2Numbers` method of the `DemoMatlabAdderEntryPoint` class. 36 | 37 | ## Deploying the analytic to the Predix Cloud 38 | When you upload the jar file **(7)** as an 'Executable' artifact the platform wraps the executable as a web service exposing the analytic via a URI derived from the analytic name. 39 | Requests made to this generated URI will be passed to the entry point method. 40 | 41 | 42 | For more information, see [Matlab Analytic Development](https://docs.predix.io/en-US/content/service/analytics_services/analytics_framework/analytic-development#concept_6ffa1ff9-cbe1-447d-912d-1437d829311e) in the Predix Analytics Services documentation on Predix IO. -------------------------------------------------------------------------------- /analytics/demo-adder/demo-adder-matlab-r2011b/src/main/java/com/ge/predix/insight/analytic/demo/matlab/DemoMatlabAdderEntryPoint.java: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright (c) 2015 - 2016 General Electric Company. All rights reserved. 3 | * 4 | * The copyright to the computer software herein is the property of 5 | * General Electric Company. The software may be used and/or copied only 6 | * with the written permission of General Electric Company or in accordance 7 | * with the terms and conditions stipulated in the agreement/contract 8 | * under which the software has been supplied. 9 | */ 10 | 11 | package com.ge.predix.insight.analytic.demo.matlab; 12 | 13 | import java.util.HashMap; 14 | 15 | import org.slf4j.Logger; 16 | import org.slf4j.LoggerFactory; 17 | 18 | import com.fasterxml.jackson.databind.ObjectMapper; 19 | import Addprj.Class1; 20 | 21 | public class DemoMatlabAdderEntryPoint { 22 | 23 | Logger logger = LoggerFactory.getLogger(DemoMatlabAdderEntryPoint.class); 24 | 25 | public String add2Numbers(String jsonStr) { 26 | 27 | try { 28 | 29 | HashMap jsonDataMap = new ObjectMapper().readValue(jsonStr, HashMap.class); 30 | Double number1 = jsonDataMap.get("number1"); 31 | Double number2 = jsonDataMap.get("number2"); 32 | 33 | String[] inputStrArray = { number1.toString(), number2.toString() }; 34 | Class1.main(inputStrArray); 35 | return jsonStr; 36 | } catch (Exception e) { 37 | e.printStackTrace(); 38 | return null; 39 | } 40 | 41 | } 42 | 43 | } -------------------------------------------------------------------------------- /analytics/demo-adder/demo-adder-matlab-r2011b/src/main/resources/config.json: -------------------------------------------------------------------------------- 1 | { 2 | "className":"com.ge.predix.insight.analytic.demo.matlab.DemoMatlabAdderEntryPoint", 3 | "methodName":"add2Numbers", 4 | "matlabVersion":"r2011b" 5 | } 6 | -------------------------------------------------------------------------------- /analytics/demo-adder/demo-adder-matlab-r2011b/src/main/resources/lib/matlabadder.jar: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/PredixDev/predix-analytics-sample/2f99ae7335d12c05597041c2734fbdd454be1e57/analytics/demo-adder/demo-adder-matlab-r2011b/src/main/resources/lib/matlabadder.jar -------------------------------------------------------------------------------- /analytics/demo-adder/demo-adder-matlab-r2011b/src/main/resources/log4j.properties: -------------------------------------------------------------------------------- 1 | log4j.appender.Stdout=org.apache.log4j.ConsoleAppender 2 | log4j.appender.Stdout.layout=org.apache.log4j.PatternLayout 3 | 4 | log4j.logger.org.springframework=DEBUG 5 | 6 | 7 | # Set root logger level to DEBUG and its only appender to CONSOLE. 8 | log4j.rootLogger=DEBUG, FILE, CONSOLE 9 | 10 | #CONSOLE is set to be a ConsoleAppender. 11 | log4j.appender.CONSOLE=org.apache.log4j.ConsoleAppender 12 | 13 | 14 | # CONSOLE uses PatternLayout. 15 | log4j.appender.CONSOLE.layout=org.apache.log4j.PatternLayout 16 | log4j.appender.CONSOLE.layout.ConversionPattern= %-5p %c %x - %m%n 17 | 18 | 19 | # FILE is file logger with rotation 20 | log4j.appender.FILE=org.apache.log4j.RollingFileAppender 21 | log4j.appender.FILE.layout=org.apache.log4j.PatternLayout 22 | log4j.appender.FILE.layout.ConversionPattern=%d{ISO8601} %-5p %c %x - %m%n 23 | log4j.appender.FILE.File=pmp.log 24 | log4j.appender.FILE.MaxFileSize=10000KB 25 | # Keep three backup files 26 | log4j.appender.FILE.MaxBackupIndex=3 27 | -------------------------------------------------------------------------------- /analytics/demo-adder/demo-adder-matlab-r2011b/src/main/resources/messagesource/DemoAdderErrorMessages.properties: -------------------------------------------------------------------------------- 1 | ADDER001.SEVERITY=3 2 | ADDER001.MESSAGE=Exception performing Addition. 3 | ADDER001.DETAIL=Exception performing Addition: inputs were: {0}, exception was: {1} 4 | 5 | -------------------------------------------------------------------------------- /analytics/demo-adder/demo-adder-py/README.md: -------------------------------------------------------------------------------- 1 | #demo-adder-py 2 | 3 | A python-based sample analytic for Predix Analytics. 4 | 5 | ## Compiled binaries 6 | Refer to the [Releases](https://github.com/PredixDev/predix-analytics-sample/releases) page for compiled binaries you can upload directly to Predix Analytics. 7 | 8 | ## Pre-requisites 9 | To run this analytic locally, you will need to have the following: 10 | - Python 2.7+ 11 | - Flask 0.10+ 12 | 13 | ## Building, deploying and running the analytic 14 | 1. Zip the contents of this directory, or just get the latest demo-adder-py binary from the [Releases](https://github.com/PredixDev/predix-analytics-sample/releases) page. 15 | 2. Create an analytic in Analytics Catalog with the name "demo-adder-py" and the version "v1". 16 | 3. Upload the zip file and attach it to the created analytic. 17 | 4. Deploy and test the analytic on Predix Analytics platform. 18 | 19 | ## Analytic template 20 | This analytic takes in 2 numbers and returns their sum. This structure is outlined in this [analytic template](../demo-adder-template.json). 21 | 22 | ## Input format 23 | The expected JSON input data format is as follows: 24 | `{"number1": 123, "number2": 456}` 25 | 26 | ## Output format 27 | The JSON output format from the analytic is as follows: 28 | `{"result":579}` 29 | 30 | ## Developing a Python-based analytic 31 | 1. Implement the analytic (and test functions) according to your development guidelines. 32 | 2. Create an entry method in your analytic class. The entry method signature must be in one of the following two formats: 33 | * For analytics that do not use trained models, use the following signature for your entry method: 34 | `def entry_method(self, inputJson):` 35 | * For analytics that use trained models, use the following signature for your entry method: 36 | `def entry_method(self, inputJson, inputModels):` 37 | * In either case, the `entry_method` can be any method name. `inputJson` is the JSON string input that will be passed to the analytic. The output of this method must also be a JSON string. 38 | * `inputModels` contains a dict() of trained models as defined in the port-to-field map. The entry method should properly handle the case of an empty dict. 39 | 3. Create a config.json file in the top level of the project directory. Specify the entry method in the format of `..`, conda-libs, and non-conda-libs. 40 | 4. Package all the analytic files and the config.json file into a ZIP file. 41 | 42 | For more information, see [Python Analytic Development](https://docs.predix.io/en-US/content/service/analytics_services/analytics_framework/analytic-development#concept_9cbf93d9-d4f2-4b42-8695-4c3195f04a79) in the Predix Analytics Services documentation on Predix IO. 43 | -------------------------------------------------------------------------------- /analytics/demo-adder/demo-adder-py/analytic/__init__.py: -------------------------------------------------------------------------------- 1 | from analytic.demoAdder import demoAdder 2 | -------------------------------------------------------------------------------- /analytics/demo-adder/demo-adder-py/analytic/demoAdder.py: -------------------------------------------------------------------------------- 1 | import json 2 | 3 | 4 | class demoAdder: 5 | def __init__(self): 6 | print("Create pyadder") 7 | 8 | def add(self, data): 9 | data_json = json.loads(data) 10 | number1 = data_json.get("number1") 11 | number2 = data_json.get("number2") 12 | sum = number1 + number2 13 | return {"result": sum} 14 | -------------------------------------------------------------------------------- /analytics/demo-adder/demo-adder-py/config.json: -------------------------------------------------------------------------------- 1 | { 2 | "entry-method": "analytic.demoAdder.add", 3 | "non-conda-libs": [ 4 | "boto" 5 | ], 6 | "conda-libs": [ 7 | "numpy" 8 | ] 9 | } -------------------------------------------------------------------------------- /analytics/demo-adder/demo-adder-py/testbench/testDemoAdder.py: -------------------------------------------------------------------------------- 1 | 2 | from analytic import demoAdder 3 | 4 | if __name__ == "__main__": 5 | dm = demoAdder() 6 | assert dm.add(34,45) == 79 7 | -------------------------------------------------------------------------------- /analytics/demo-adder/demo-adder-template.json: -------------------------------------------------------------------------------- 1 | { 2 | "analyticName": "demo adder", 3 | "analyticVersion": "v1", 4 | "inputPortDefinitions": [ 5 | { 6 | "portType": "FIELD", 7 | "portName": "number1", 8 | "variable": false, 9 | "dataType": "DOUBLE", 10 | "required": true 11 | }, 12 | { 13 | "portType": "FIELD", 14 | "portName": "number2", 15 | "variable": false, 16 | "dataType": "DOUBLE", 17 | "required": true 18 | } 19 | ], 20 | "outputPortDefinitions": [ 21 | { 22 | "portType": "FIELD", 23 | "portName": "result", 24 | "variable": false, 25 | "dataType": "DOUBLE", 26 | "required": true 27 | } 28 | ] 29 | } -------------------------------------------------------------------------------- /analytics/demo-timeseries-adder-java/demo-timeseries-adder-template.json: -------------------------------------------------------------------------------- 1 | { 2 | "analyticName": "java-timeseries-demo-adder", 3 | "analyticVersion": "1.0", 4 | "inputPortDefinitions": [ 5 | { 6 | "portType": "COMPOSITE", 7 | "portName": "data", 8 | "variable": false, 9 | "childrenPorts": [ 10 | { 11 | "portName": "time_series", 12 | "portType": "TIMESERIES_ARRAY", 13 | "required": true, 14 | "variable": false, 15 | "columns": [ 16 | { 17 | "portName": "numberArray1", 18 | "portType": "FIELD", 19 | "variable": false, 20 | "dataType": "DOUBLE_ARRAY", 21 | "required": true 22 | }, 23 | { 24 | "portName": "numberArray2", 25 | "portType": "FIELD", 26 | "variable": false, 27 | "dataType": "DOUBLE_ARRAY", 28 | "required": true 29 | } 30 | ] 31 | } 32 | ] 33 | } 34 | ], 35 | "outputPortDefinitions": [ 36 | { 37 | "portName": "data", 38 | "portType": "COMPOSITE", 39 | "required": true, 40 | "variable": false, 41 | "childrenPorts": [ 42 | { 43 | "portName": "time_series", 44 | "portType": "TIMESERIES_ARRAY", 45 | "required": true, 46 | "variable": false, 47 | "columns": [ 48 | { 49 | "portName": "sum", 50 | "portType": "FIELD", 51 | "variable": false, 52 | "dataType": "DOUBLE_ARRAY", 53 | "required": true 54 | } 55 | ] 56 | } 57 | ] 58 | } 59 | ] 60 | } 61 | -------------------------------------------------------------------------------- /analytics/demo-timeseries-adder-java/pom.xml: -------------------------------------------------------------------------------- 1 | 2 | 3 | 5 | 6 | 4.0.0 7 | com.ge.predix.insight 8 | demo-timeseries-adder-java 9 | Demo adder analytic with timeseries implementation in Java 10 | jar 11 | Implementation of demo analytic service with timeseries data 12 | 1.0.0 13 | 14 | 15 | 16 | org.slf4j 17 | slf4j-api 18 | 1.6.6 19 | provided 20 | 21 | 22 | com.fasterxml.jackson.core 23 | jackson-core 24 | 2.4.1 25 | provided 26 | 27 | 28 | com.fasterxml.jackson.core 29 | jackson-databind 30 | 2.4.1 31 | provided 32 | 33 | 34 | commons-io 35 | commons-io 36 | 1.4 37 | 38 | 39 | junit 40 | junit 41 | 4.12 42 | test 43 | 44 | 45 | org.skyscreamer 46 | jsonassert 47 | 1.3.0 48 | test 49 | 50 | 51 | 52 | 53 | 54 | 55 | org.apache.maven.plugins 56 | maven-compiler-plugin 57 | 3.0 58 | 59 | 1.7 60 | 1.7 61 | 62 | 63 | 64 | org.apache.maven.plugins 65 | maven-dependency-plugin 66 | 2.8 67 | 68 | 69 | copy-dependencies 70 | prepare-package 71 | 72 | copy-dependencies 73 | 74 | 75 | ${project.build.directory}/classes/lib 76 | false 77 | false 78 | true 79 | 80 | 81 | 82 | 83 | 84 | 85 | 86 | 87 | 88 | -------------------------------------------------------------------------------- /analytics/demo-timeseries-adder-java/src/main/java/com/ge/predix/analytics/customdto/AdderResponse.java: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright (c) 2015 - 2016 General Electric Company. All rights reserved. 3 | * 4 | * The copyright to the computer software herein is the property of 5 | * General Electric Company. The software may be used and/or copied only 6 | * with the written permission of General Electric Company or in accordance 7 | * with the terms and conditions stipulated in the agreement/contract 8 | * under which the software has been supplied. 9 | */ 10 | 11 | package com.ge.predix.analytics.customdto; 12 | 13 | public class AdderResponse { 14 | 15 | protected Data data; 16 | 17 | public Data getData() { 18 | return data; 19 | } 20 | 21 | public void setData(Data data) { 22 | this.data = data; 23 | } 24 | } 25 | -------------------------------------------------------------------------------- /analytics/demo-timeseries-adder-java/src/main/java/com/ge/predix/analytics/customdto/Data.java: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright (c) 2015 - 2016 General Electric Company. All rights reserved. 3 | * 4 | * The copyright to the computer software herein is the property of 5 | * General Electric Company. The software may be used and/or copied only 6 | * with the written permission of General Electric Company or in accordance 7 | * with the terms and conditions stipulated in the agreement/contract 8 | * under which the software has been supplied. 9 | */ 10 | 11 | package com.ge.predix.analytics.customdto; 12 | 13 | public class Data { 14 | protected TimeseriesOutput time_series; 15 | 16 | public TimeseriesOutput getTime_series() { 17 | return time_series; 18 | } 19 | 20 | public void setTime_series(TimeseriesOutput time_series) { 21 | this.time_series = time_series; 22 | } 23 | } 24 | -------------------------------------------------------------------------------- /analytics/demo-timeseries-adder-java/src/main/java/com/ge/predix/analytics/customdto/TimeseriesOutput.java: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright (c) 2015 - 2016 General Electric Company. All rights reserved. 3 | * 4 | * The copyright to the computer software herein is the property of 5 | * General Electric Company. The software may be used and/or copied only 6 | * with the written permission of General Electric Company or in accordance 7 | * with the terms and conditions stipulated in the agreement/contract 8 | * under which the software has been supplied. 9 | */ 10 | 11 | package com.ge.predix.analytics.customdto; 12 | 13 | import java.util.List; 14 | 15 | public class TimeseriesOutput { 16 | 17 | protected List sum; 18 | 19 | protected List time_stamp; 20 | 21 | public List getSum() { 22 | return sum; 23 | } 24 | 25 | public void setSum(List sum) { 26 | this.sum = sum; 27 | } 28 | 29 | public List getTime_stamp() { 30 | return time_stamp; 31 | } 32 | 33 | public void setTime_stamp(List time_stamp) { 34 | this.time_stamp = time_stamp; 35 | } 36 | } 37 | -------------------------------------------------------------------------------- /analytics/demo-timeseries-adder-java/src/main/java/com/ge/predix/analytics/demo/java/DemoJavaAdderWithTimeseriesEntryPoint.java: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright (c) 2015 - 2016 General Electric Company. All rights reserved. 3 | * 4 | * The copyright to the computer software herein is the property of 5 | * General Electric Company. The software may be used and/or copied only 6 | * with the written permission of General Electric Company or in accordance 7 | * with the terms and conditions stipulated in the agreement/contract 8 | * under which the software has been supplied. 9 | */ 10 | 11 | package com.ge.predix.analytics.demo.java; 12 | 13 | import java.io.IOException; 14 | import java.util.ArrayList; 15 | import java.util.List; 16 | 17 | import org.slf4j.Logger; 18 | import org.slf4j.LoggerFactory; 19 | 20 | import com.fasterxml.jackson.databind.JsonNode; 21 | import com.fasterxml.jackson.databind.ObjectMapper; 22 | import com.fasterxml.jackson.databind.SerializationFeature; 23 | import com.fasterxml.jackson.databind.node.ArrayNode; 24 | import com.ge.predix.analytics.customdto.AdderResponse; 25 | import com.ge.predix.analytics.customdto.Data; 26 | import com.ge.predix.analytics.customdto.TimeseriesOutput; 27 | 28 | public class DemoJavaAdderWithTimeseriesEntryPoint { 29 | 30 | Logger logger = LoggerFactory.getLogger(DemoJavaAdderWithTimeseriesEntryPoint.class); 31 | ObjectMapper mapper = new ObjectMapper(); 32 | 33 | public String add2NumberArrays(String jsonStr) throws IOException { 34 | 35 | JsonNode node = mapper.readTree(jsonStr); 36 | JsonNode dataNode = node.get("data"); 37 | JsonNode timeseriesNode = dataNode.get("time_series"); 38 | JsonNode timestampNode = timeseriesNode.get("time_stamp"); 39 | List results = null; 40 | List timestampArray = null; 41 | if (timestampNode.isArray()) { 42 | ArrayNode timestamps = (ArrayNode) timestampNode; 43 | 44 | JsonNode number1Node = timeseriesNode.get("numberArray1"); 45 | ArrayNode number1Values = (ArrayNode) number1Node; 46 | 47 | JsonNode number2Node = timeseriesNode.get("numberArray2"); 48 | ArrayNode number2Values = (ArrayNode) number2Node; 49 | 50 | results = new ArrayList<>(); 51 | timestampArray = new ArrayList<>(); 52 | 53 | for (int i = 0; i < timestamps.size(); i++) { 54 | timestampArray.add(timestamps.get(i).asText()); 55 | results.add(number1Values.get(i).asDouble() + number2Values.get(i).asDouble()); 56 | } 57 | } 58 | 59 | AdderResponse outputResponse = new AdderResponse(); 60 | Data data = new Data(); 61 | TimeseriesOutput output = new TimeseriesOutput(); 62 | data.setTime_series(output); 63 | output.setTime_stamp(timestampArray); 64 | output.setSum(results); 65 | outputResponse.setData(data); 66 | 67 | mapper.configure(SerializationFeature.INDENT_OUTPUT, true); 68 | 69 | return mapper.writeValueAsString(outputResponse); 70 | 71 | } 72 | 73 | } -------------------------------------------------------------------------------- /analytics/demo-timeseries-adder-java/src/main/resources/analyticTemplate.json: -------------------------------------------------------------------------------- 1 | { 2 | "analyticName": "java-timeseries-demo-adder", 3 | "analyticVersion": "1.0", 4 | "inputPortDefinitions": [ 5 | { 6 | "portType": "COMPOSITE", 7 | "portName": "data", 8 | "variable": false, 9 | "childrenPorts": [ 10 | { 11 | "portName": "time_series", 12 | "portType": "TIMESERIES_ARRAY", 13 | "required": true, 14 | "variable": false, 15 | "columns": [ 16 | { 17 | "portName": "numberArray1", 18 | "portType": "FIELD", 19 | "variable": false, 20 | "dataType": "DOUBLE_ARRAY", 21 | "required": true 22 | }, 23 | { 24 | "portName": "numberArray2", 25 | "portType": "FIELD", 26 | "variable": false, 27 | "dataType": "DOUBLE_ARRAY", 28 | "required": true 29 | } 30 | ] 31 | } 32 | ] 33 | } 34 | ], 35 | "outputPortDefinitions": [ 36 | { 37 | "portName": "data", 38 | "portType": "COMPOSITE", 39 | "required": true, 40 | "variable": false, 41 | "childrenPorts": [ 42 | { 43 | "portName": "time_series", 44 | "portType": "TIMESERIES_ARRAY", 45 | "required": true, 46 | "variable": false, 47 | "columns": [ 48 | { 49 | "portName": "sum", 50 | "portType": "FIELD", 51 | "variable": false, 52 | "dataType": "DOUBLE_ARRAY", 53 | "required": true 54 | } 55 | ] 56 | } 57 | ] 58 | } 59 | ] 60 | } 61 | -------------------------------------------------------------------------------- /analytics/demo-timeseries-adder-java/src/main/resources/config.json: -------------------------------------------------------------------------------- 1 | { 2 | "className": "com.ge.predix.analytics.demo.java.DemoJavaAdderWithTimeseriesEntryPoint", 3 | "methodName": "add2NumberArrays" 4 | } 5 | -------------------------------------------------------------------------------- /analytics/demo-timeseries-adder-java/src/main/resources/log4j.properties: -------------------------------------------------------------------------------- 1 | log4j.appender.Stdout=org.apache.log4j.ConsoleAppender 2 | log4j.appender.Stdout.layout=org.apache.log4j.PatternLayout 3 | log4j.logger.org.springframework=DEBUG 4 | # Set root logger level to DEBUG and its only appender to CONSOLE. 5 | log4j.rootLogger=DEBUG, FILE, CONSOLE 6 | #CONSOLE is set to be a ConsoleAppender. 7 | log4j.appender.CONSOLE=org.apache.log4j.ConsoleAppender 8 | # CONSOLE uses PatternLayout. 9 | log4j.appender.CONSOLE.layout=org.apache.log4j.PatternLayout 10 | log4j.appender.CONSOLE.layout.ConversionPattern=%-5p %c %x - %m%n 11 | # FILE is file logger with rotation 12 | log4j.appender.FILE=org.apache.log4j.RollingFileAppender 13 | log4j.appender.FILE.layout=org.apache.log4j.PatternLayout 14 | log4j.appender.FILE.layout.ConversionPattern=%d{ISO8601} %-5p %c %x - %m%n 15 | log4j.appender.FILE.File=java-timeseries-demo-adder.log 16 | log4j.appender.FILE.MaxFileSize=10000KB 17 | # Keep three backup files 18 | log4j.appender.FILE.MaxBackupIndex=3 19 | -------------------------------------------------------------------------------- /analytics/demo-timeseries-adder-java/src/test/java/com/ge/predix/analytics/demo/java/TestDemoJavaAdderWithTimeseries.java: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright (c) 2015 - 2016 General Electric Company. All rights reserved. 3 | * 4 | * The copyright to the computer software herein is the property of 5 | * General Electric Company. The software may be used and/or copied only 6 | * with the written permission of General Electric Company or in accordance 7 | * with the terms and conditions stipulated in the agreement/contract 8 | * under which the software has been supplied. 9 | */ 10 | 11 | package com.ge.predix.analytics.demo.java; 12 | 13 | import static org.junit.Assert.assertNotNull; 14 | 15 | import java.io.IOException; 16 | import java.io.InputStream; 17 | 18 | import org.apache.commons.io.IOUtils; 19 | import org.json.JSONException; 20 | import org.junit.Test; 21 | import org.skyscreamer.jsonassert.JSONAssert; 22 | 23 | public class TestDemoJavaAdderWithTimeseries { 24 | @Test 25 | public void testAdd2Numbers() throws IOException, JSONException { 26 | String inputDataString = getInputDataString(); 27 | DemoJavaAdderWithTimeseriesEntryPoint adder = new DemoJavaAdderWithTimeseriesEntryPoint(); 28 | 29 | String result = adder.add2NumberArrays(inputDataString); 30 | assertNotNull(result); 31 | JSONAssert.assertEquals(getExpectedResultString(), result, true); 32 | } 33 | 34 | private String getInputDataString() throws IOException { 35 | InputStream inputDataStream = getClass().getClassLoader().getResourceAsStream("analyticInputDataWithTimeseriesData.json"); 36 | return IOUtils.toString(inputDataStream); 37 | } 38 | 39 | private String getExpectedResultString() throws IOException { 40 | InputStream expectedOutputDataStream = getClass().getClassLoader().getResourceAsStream("analyticOutputDataWithTimeseriesData.json"); 41 | return IOUtils.toString(expectedOutputDataStream); 42 | } 43 | } 44 | -------------------------------------------------------------------------------- /analytics/demo-timeseries-adder-java/src/test/resources/analyticInputDataWithTimeseriesData.json: -------------------------------------------------------------------------------- 1 | { 2 | "data": { 3 | "time_series": { 4 | "numberArray1": [ 5 | 1.0, 6 | 2.0, 7 | 3.0, 8 | 4.0, 9 | 5.0, 10 | 6.0, 11 | 7.0, 12 | 8.0, 13 | 9.0, 14 | 10.0 15 | ], 16 | "numberArray2": [ 17 | 100.0, 18 | 200.0, 19 | 300.0, 20 | 400.0, 21 | 500.0, 22 | 600.0, 23 | 700.0, 24 | 800.0, 25 | 900.0, 26 | 1000.0 27 | ], 28 | "time_stamp": [ 29 | "1455733669601", 30 | "1455733669602", 31 | "1455733669603", 32 | "1455733669604", 33 | "1455733669605", 34 | "1455733669606", 35 | "1455733669607", 36 | "1455733669608", 37 | "1455733669609", 38 | "1455733669610" 39 | ] 40 | } 41 | } 42 | } -------------------------------------------------------------------------------- /analytics/demo-timeseries-adder-java/src/test/resources/analyticOutputDataWithTimeseriesData.json: -------------------------------------------------------------------------------- 1 | { 2 | "data": { 3 | "time_series": { 4 | "time_stamp": [ 5 | "1455733669601", 6 | "1455733669602", 7 | "1455733669603", 8 | "1455733669604", 9 | "1455733669605", 10 | "1455733669606", 11 | "1455733669607", 12 | "1455733669608", 13 | "1455733669609", 14 | "1455733669610" 15 | ], 16 | "sum": [ 17 | 101.0, 18 | 202.0, 19 | 303.0, 20 | 404.0, 21 | 505.0, 22 | 606.0, 23 | 707.0, 24 | 808.0, 25 | 909.0, 26 | 1010.0 27 | ] 28 | } 29 | } 30 | } -------------------------------------------------------------------------------- /analytics/demo-timeseries-adder-with-model/README.md: -------------------------------------------------------------------------------- 1 | #demo-timeseries-adder-with-model 2 | 3 | 2 implementations of a sample analytic for the Predix Analytics platform that processes timeseries data and uses a trained model. 4 | Specific details on how to package these analytics can be found within the implementation folders: 5 | 6 | - **[demo-timeseries-adder-with-model-java](demo-timeseries-adder-with-model-java):** A Java implementation of the demo-timeseries-adder-with-model 7 | - **[demo-timeseries-adder-with-model-py](demo-timeseries-adder-with-model-py):** A Python implementation of the demo-timeseries-adder-with-model 8 | 9 | ## Analytic template 10 | This analytic takes in 2 timeseries arrays and returns their sum, augmented by a "threshold" model. This structure is outlined in this [analytic template](demo-timeseries-adder-with-model-template.json). 11 | 12 | ## Input format 13 | The expected JSON input data format is as follows: 14 | ```json 15 | { 16 | "data": { 17 | "time_series": { 18 | "numberArray1": [ 19 | 1.0, 20 | 2.0, 21 | 3.0 22 | ], 23 | "numberArray2": [ 24 | 100.0, 25 | 200.0, 26 | 300.0 27 | ], 28 | "time_stamp": [ 29 | "1455733669601", 30 | "1455733669602", 31 | "1455733669603" 32 | ] 33 | } 34 | } 35 | } 36 | ``` 37 | 38 | ## Output format 39 | The JSON output format from the analytic is as follows: 40 | ```json 41 | { 42 | "data": { 43 | "time_series": { 44 | "time_stamp": [ 45 | "1455733669601", 46 | "1455733669602", 47 | "1455733669603" 48 | ], 49 | "sum": [ 50 | 101.0, 51 | 202.0, 52 | 303.0 53 | ] 54 | } 55 | } 56 | } 57 | ``` 58 | 59 | ## Model 60 | These analytics are written to accept a model with the key "threshold" that contains some numeric value. This is illustrated as the file [sampleModel.txt](sampleModel.txt). 61 | 62 | For more information on developing analytics for the Predix Analytics platform, see [Analytic Development](https://docs.predix.io/en-US/content/service/analytics_services/analytics_framework/analytic-development) in the Predix Analytics Services documentation on Predix IO. -------------------------------------------------------------------------------- /analytics/demo-timeseries-adder-with-model/demo-timeseries-adder-with-model-java/src/main/java/com/ge/predix/analytics/customdto/AdderResponse.java: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright (c) 2015 - 2016 General Electric Company. All rights reserved. 3 | * 4 | * The copyright to the computer software herein is the property of 5 | * General Electric Company. The software may be used and/or copied only 6 | * with the written permission of General Electric Company or in accordance 7 | * with the terms and conditions stipulated in the agreement/contract 8 | * under which the software has been supplied. 9 | */ 10 | 11 | package com.ge.predix.analytics.customdto; 12 | 13 | public class AdderResponse { 14 | 15 | protected Data data; 16 | 17 | public Data getData() { 18 | return data; 19 | } 20 | 21 | public void setData(Data data) { 22 | this.data = data; 23 | } 24 | } 25 | -------------------------------------------------------------------------------- /analytics/demo-timeseries-adder-with-model/demo-timeseries-adder-with-model-java/src/main/java/com/ge/predix/analytics/customdto/Data.java: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright (c) 2015 - 2016 General Electric Company. All rights reserved. 3 | * 4 | * The copyright to the computer software herein is the property of 5 | * General Electric Company. The software may be used and/or copied only 6 | * with the written permission of General Electric Company or in accordance 7 | * with the terms and conditions stipulated in the agreement/contract 8 | * under which the software has been supplied. 9 | */ 10 | 11 | package com.ge.predix.analytics.customdto; 12 | 13 | public class Data { 14 | protected TimeseriesOutput time_series; 15 | 16 | public TimeseriesOutput getTime_series() { 17 | return time_series; 18 | } 19 | 20 | public void setTime_series(TimeseriesOutput time_series) { 21 | this.time_series = time_series; 22 | } 23 | } 24 | -------------------------------------------------------------------------------- /analytics/demo-timeseries-adder-with-model/demo-timeseries-adder-with-model-java/src/main/java/com/ge/predix/analytics/customdto/TimeseriesOutput.java: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright (c) 2015 - 2016 General Electric Company. All rights reserved. 3 | * 4 | * The copyright to the computer software herein is the property of 5 | * General Electric Company. The software may be used and/or copied only 6 | * with the written permission of General Electric Company or in accordance 7 | * with the terms and conditions stipulated in the agreement/contract 8 | * under which the software has been supplied. 9 | */ 10 | 11 | package com.ge.predix.analytics.customdto; 12 | 13 | import java.util.List; 14 | 15 | public class TimeseriesOutput { 16 | 17 | protected List sum; 18 | 19 | protected List time_stamp; 20 | 21 | public List getSum() { 22 | return sum; 23 | } 24 | 25 | public void setSum(List sum) { 26 | this.sum = sum; 27 | } 28 | 29 | public List getTime_stamp() { 30 | return time_stamp; 31 | } 32 | 33 | public void setTime_stamp(List time_stamp) { 34 | this.time_stamp = time_stamp; 35 | } 36 | } 37 | -------------------------------------------------------------------------------- /analytics/demo-timeseries-adder-with-model/demo-timeseries-adder-with-model-java/src/main/resources/analyticTemplate.json: -------------------------------------------------------------------------------- 1 | { 2 | "analyticName": "java-timeseries-demo-adder", 3 | "analyticVersion": "1.0", 4 | "inputPortDefinitions": [ 5 | { 6 | "portType": "COMPOSITE", 7 | "portName": "data", 8 | "variable": false, 9 | "childrenPorts": [ 10 | { 11 | "portName": "time_series", 12 | "portType": "TIMESERIES_ARRAY", 13 | "required": true, 14 | "variable": false, 15 | "columns": [ 16 | { 17 | "portName": "numberArray1", 18 | "portType": "FIELD", 19 | "variable": false, 20 | "dataType": "DOUBLE_ARRAY", 21 | "required": true 22 | }, 23 | { 24 | "portName": "numberArray2", 25 | "portType": "FIELD", 26 | "variable": false, 27 | "dataType": "DOUBLE_ARRAY", 28 | "required": true 29 | } 30 | ] 31 | } 32 | ] 33 | } 34 | ], 35 | "inputModelDefinitions": [ 36 | { 37 | "modelPortName": "threshold" 38 | } 39 | ], 40 | 41 | "outputPortDefinitions": [ 42 | { 43 | "portName": "data", 44 | "portType": "COMPOSITE", 45 | "required": true, 46 | "variable": false, 47 | "childrenPorts": [ 48 | { 49 | "portName": "time_series", 50 | "portType": "TIMESERIES_ARRAY", 51 | "required": true, 52 | "variable": false, 53 | "columns": [ 54 | { 55 | "portName": "sum", 56 | "portType": "FIELD", 57 | "variable": false, 58 | "dataType": "DOUBLE_ARRAY", 59 | "required": true 60 | } 61 | ] 62 | } 63 | ] 64 | } 65 | ] 66 | } 67 | -------------------------------------------------------------------------------- /analytics/demo-timeseries-adder-with-model/demo-timeseries-adder-with-model-java/src/main/resources/config.json: -------------------------------------------------------------------------------- 1 | { 2 | "className": "com.ge.predix.analytics.demo.java.DemoJavaAdderWithTimeseriesEntryPoint", 3 | "methodName": "add2NumberArrays" 4 | } 5 | -------------------------------------------------------------------------------- /analytics/demo-timeseries-adder-with-model/demo-timeseries-adder-with-model-java/src/main/resources/log4j.properties: -------------------------------------------------------------------------------- 1 | log4j.appender.Stdout=org.apache.log4j.ConsoleAppender 2 | log4j.appender.Stdout.layout=org.apache.log4j.PatternLayout 3 | log4j.logger.org.springframework=DEBUG 4 | # Set root logger level to DEBUG and its only appender to CONSOLE. 5 | log4j.rootLogger=DEBUG, FILE, CONSOLE 6 | #CONSOLE is set to be a ConsoleAppender. 7 | log4j.appender.CONSOLE=org.apache.log4j.ConsoleAppender 8 | # CONSOLE uses PatternLayout. 9 | log4j.appender.CONSOLE.layout=org.apache.log4j.PatternLayout 10 | log4j.appender.CONSOLE.layout.ConversionPattern=%-5p %c %x - %m%n 11 | # FILE is file logger with rotation 12 | log4j.appender.FILE=org.apache.log4j.RollingFileAppender 13 | log4j.appender.FILE.layout=org.apache.log4j.PatternLayout 14 | log4j.appender.FILE.layout.ConversionPattern=%d{ISO8601} %-5p %c %x - %m%n 15 | log4j.appender.FILE.File=java-timeseries-demo-adder.log 16 | log4j.appender.FILE.MaxFileSize=10000KB 17 | # Keep three backup files 18 | log4j.appender.FILE.MaxBackupIndex=3 19 | -------------------------------------------------------------------------------- /analytics/demo-timeseries-adder-with-model/demo-timeseries-adder-with-model-java/src/test/java/com/ge/predix/analytics/demo/java/TestDemoJavaAdderWithTimeseries.java: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright (c) 2015 - 2016 General Electric Company. All rights reserved. 3 | * 4 | * The copyright to the computer software herein is the property of 5 | * General Electric Company. The software may be used and/or copied only 6 | * with the written permission of General Electric Company or in accordance 7 | * with the terms and conditions stipulated in the agreement/contract 8 | * under which the software has been supplied. 9 | */ 10 | 11 | package com.ge.predix.analytics.demo.java; 12 | 13 | import static org.junit.Assert.assertNotNull; 14 | 15 | import java.io.IOException; 16 | import java.io.InputStream; 17 | 18 | import org.apache.commons.io.IOUtils; 19 | import org.json.JSONException; 20 | import org.junit.Test; 21 | import org.skyscreamer.jsonassert.JSONAssert; 22 | 23 | public class TestDemoJavaAdderWithTimeseries { 24 | @Test 25 | public void testAdd2Numbers() throws IOException, JSONException { 26 | String inputDataString = getInputDataString(); 27 | DemoJavaAdderWithTimeseriesEntryPoint adder = new DemoJavaAdderWithTimeseriesEntryPoint(); 28 | 29 | String result = adder.add2NumberArrays(inputDataString); 30 | assertNotNull(result); 31 | JSONAssert.assertEquals(getExpectedResultString(), result, true); 32 | } 33 | 34 | private String getInputDataString() throws IOException { 35 | InputStream inputDataStream = getClass().getClassLoader().getResourceAsStream("analyticInputDataWithTimeseriesData.json"); 36 | return IOUtils.toString(inputDataStream); 37 | } 38 | 39 | private String getExpectedResultString() throws IOException { 40 | InputStream expectedOutputDataStream = getClass().getClassLoader().getResourceAsStream("analyticOutputDataWithTimeseriesData.json"); 41 | return IOUtils.toString(expectedOutputDataStream); 42 | } 43 | } 44 | -------------------------------------------------------------------------------- /analytics/demo-timeseries-adder-with-model/demo-timeseries-adder-with-model-java/src/test/resources/analyticInputDataWithTimeseriesData.json: -------------------------------------------------------------------------------- 1 | { 2 | "data": { 3 | "time_series": { 4 | "numberArray1": [ 5 | 1.0, 6 | 2.0, 7 | 3.0, 8 | 4.0, 9 | 5.0, 10 | 6.0, 11 | 7.0, 12 | 8.0, 13 | 9.0, 14 | 10.0 15 | ], 16 | "numberArray2": [ 17 | 100.0, 18 | 200.0, 19 | 300.0, 20 | 400.0, 21 | 500.0, 22 | 600.0, 23 | 700.0, 24 | 800.0, 25 | 900.0, 26 | 1000.0 27 | ], 28 | "time_stamp": [ 29 | "1455733669601", 30 | "1455733669602", 31 | "1455733669603", 32 | "1455733669604", 33 | "1455733669605", 34 | "1455733669606", 35 | "1455733669607", 36 | "1455733669608", 37 | "1455733669609", 38 | "1455733669610" 39 | ] 40 | } 41 | } 42 | } -------------------------------------------------------------------------------- /analytics/demo-timeseries-adder-with-model/demo-timeseries-adder-with-model-java/src/test/resources/analyticOutputDataWithTimeseriesData.json: -------------------------------------------------------------------------------- 1 | { 2 | "data": { 3 | "time_series": { 4 | "time_stamp": [ 5 | "1455733669601", 6 | "1455733669602", 7 | "1455733669603", 8 | "1455733669604", 9 | "1455733669605", 10 | "1455733669606", 11 | "1455733669607", 12 | "1455733669608", 13 | "1455733669609", 14 | "1455733669610" 15 | ], 16 | "sum": [ 17 | 101.0, 18 | 202.0, 19 | 303.0, 20 | 404.0, 21 | 505.0, 22 | 606.0, 23 | 707.0, 24 | 808.0, 25 | 909.0, 26 | 1010.0 27 | ] 28 | } 29 | } 30 | } -------------------------------------------------------------------------------- /analytics/demo-timeseries-adder-with-model/demo-timeseries-adder-with-model-java/src/test/resources/sampleModel.txt: -------------------------------------------------------------------------------- 1 | 200 -------------------------------------------------------------------------------- /analytics/demo-timeseries-adder-with-model/demo-timeseries-adder-with-model-py/README.md: -------------------------------------------------------------------------------- 1 | #demo-timeseries-adder-with-model-py 2 | 3 | A python-based sample analytic that adds two timeseries arrays using a trained model for the Predix Analytics platform. 4 | 5 | ## Compiled binaries 6 | Refer to the [Releases](https://github.com/PredixDev/predix-analytics-sample/releases) page for compiled binaries you can upload directly to Predix Analytics. 7 | 8 | ## Pre-requisites 9 | To run this analytic locally, you will need to have the following: 10 | 11 | - Python 2.7+ 12 | - Flask 0.10+ 13 | 14 | ## Building, deploying and running the analytic 15 | 1. Zip the contents of this directory, or just get the latest demo-adder-py binary from the [Releases](https://github.com/PredixDev/predix-analytics-sample/releases) page. 16 | 2. Create an analytic in Analytics Catalog with the name "demo-timeseries-adder-with-model-py" and the version "v1". 17 | 3. Upload the zip file and attach it to the created analytic. 18 | 4. Deploy and test the analytic on Predix Analytics platform. 19 | 20 | ## Analytic template 21 | This analytic takes in 2 timeseries arrays and returns their sum, augmented by a "threshold" model. This structure is outlined in this [analytic template](../demo-timeseries-adder-with-model-template.json). 22 | 23 | ## Input format 24 | The expected JSON input data format is as follows: 25 | ```json 26 | { 27 | "data": { 28 | "time_series": { 29 | "numberArray1": [ 30 | 1.0, 31 | 2.0, 32 | 3.0 33 | ], 34 | "numberArray2": [ 35 | 100.0, 36 | 200.0, 37 | 300.0 38 | ], 39 | "time_stamp": [ 40 | "1455733669601", 41 | "1455733669602", 42 | "1455733669603" 43 | ] 44 | } 45 | } 46 | } 47 | ``` 48 | 49 | ## Output format 50 | The JSON output format from the analytic is as follows: 51 | ```json 52 | { 53 | "data": { 54 | "time_series": { 55 | "time_stamp": [ 56 | "1455733669601", 57 | "1455733669602", 58 | "1455733669603" 59 | ], 60 | "sum": [ 61 | 101.0, 62 | 202.0, 63 | 303.0 64 | ] 65 | } 66 | } 67 | } 68 | ``` 69 | 70 | ## Developing a Python-based analytic 71 | 1. Implement the analytic (and test functions) according to your development guidelines. 72 | 2. Create an entry method in your analytic class. The entry method signature must be in one of the following two formats: 73 | * For analytics that do not use trained models, use the following signature for your entry method: 74 | `def entry_method(self, inputJson):` 75 | * For analytics that use trained models, use the following signature for your entry method: 76 | `def entry_method(self, inputJson, inputModels):` 77 | * In either case, the `entry_method` can be any method name. `inputJson` is the JSON string input that will be passed to the analytic. The output of this method must also be a JSON string. 78 | * `inputModels` contains a dict() of trained models as defined in the port-to-field map. The entry method should properly handle the case of an empty dict. 79 | 3. Create a config.json file in the top level of the project directory. Specify the entry method in the format of `..`, conda-libs, and non-conda-libs. 80 | 4. Package all the analytic files and the config.json file into a ZIP file. 81 | 82 | For more information, see [Python Analytic Development](https://docs.predix.io/en-US/content/service/analytics_services/analytics_framework/analytic-development#concept_9cbf93d9-d4f2-4b42-8695-4c3195f04a79) in the Predix Analytics Services documentation on Predix IO. 83 | -------------------------------------------------------------------------------- /analytics/demo-timeseries-adder-with-model/demo-timeseries-adder-with-model-py/analytics/__init__.py: -------------------------------------------------------------------------------- 1 | from analytics.demoTimeSeriesAdder import demoTimeSeriesAdder 2 | -------------------------------------------------------------------------------- /analytics/demo-timeseries-adder-with-model/demo-timeseries-adder-with-model-py/analytics/demoTimeSeriesAdder.py: -------------------------------------------------------------------------------- 1 | import json 2 | 3 | class demoTimeSeriesAdder: 4 | def __init__(self): 5 | print("Create python time series demo adder") 6 | 7 | def add2NumberArrays(self, data, modelmap = {}): 8 | threshold = float(modelmap['threshold']) if len(modelmap) > 0 else None 9 | data_json = json.loads(data) 10 | numberArray1 = data_json['data']['time_series']['numberArray1'] 11 | numberArray2 = data_json['data']['time_series']['numberArray2'] 12 | sum = [] 13 | for i in range(len(numberArray1)): 14 | result = numberArray1[i] + numberArray2[i] 15 | if (threshold is not None and result > threshold): 16 | sum.append(-1) 17 | else: 18 | sum.append(numberArray1[i] + numberArray2[i]) 19 | 20 | timestamps = data_json['data']['time_series']['time_stamp'] 21 | 22 | return json.dumps( \ 23 | { 24 | "data": 25 | { 26 | "time_series": 27 | { 28 | "time_stamp": timestamps, 29 | "sum": sum 30 | } 31 | } 32 | }) 33 | 34 | 35 | -------------------------------------------------------------------------------- /analytics/demo-timeseries-adder-with-model/demo-timeseries-adder-with-model-py/config.json: -------------------------------------------------------------------------------- 1 | { 2 | "entry-method": "analytics.demoTimeSeriesAdder.add2NumberArrays" 3 | } 4 | -------------------------------------------------------------------------------- /analytics/demo-timeseries-adder-with-model/demo-timeseries-adder-with-model-py/testbench/data/inputData/analyticInput.json: -------------------------------------------------------------------------------- 1 | { 2 | "data": { 3 | "time_series": { 4 | "numberArray1": [ 5 | 1.0, 6 | 2.0, 7 | 3.0, 8 | 4.0, 9 | 5.0, 10 | 6.0, 11 | 7.0, 12 | 8.0, 13 | 9.0, 14 | 10.0 15 | ], 16 | "numberArray2": [ 17 | 100.0, 18 | 200.0, 19 | 300.0, 20 | 400.0, 21 | 500.0, 22 | 600.0, 23 | 700.0, 24 | 800.0, 25 | 900.0, 26 | 1000.0 27 | ], 28 | "time_stamp": [ 29 | "1455733669601", 30 | "1455733669602", 31 | "1455733669603", 32 | "1455733669604", 33 | "1455733669605", 34 | "1455733669606", 35 | "1455733669607", 36 | "1455733669608", 37 | "1455733669609", 38 | "1455733669610" 39 | ] 40 | } 41 | } 42 | } -------------------------------------------------------------------------------- /analytics/demo-timeseries-adder-with-model/demo-timeseries-adder-with-model-py/testbench/data/outputData/expectedAnalyticOutput.json: -------------------------------------------------------------------------------- 1 | { 2 | "data": { 3 | "time_series": { 4 | "time_stamp": [ 5 | "1455733669601", 6 | "1455733669602", 7 | "1455733669603", 8 | "1455733669604", 9 | "1455733669605", 10 | "1455733669606", 11 | "1455733669607", 12 | "1455733669608", 13 | "1455733669609", 14 | "1455733669610" 15 | ], 16 | "sum": [ 17 | 101.0, 18 | 202.0, 19 | 303.0, 20 | 404.0, 21 | 505.0, 22 | 606.0, 23 | 707.0, 24 | 808.0, 25 | 909.0, 26 | 1010.0 27 | ] 28 | } 29 | } 30 | } -------------------------------------------------------------------------------- /analytics/demo-timeseries-adder-with-model/demo-timeseries-adder-with-model-py/testbench/data/outputData/expectedAnalyticOutputForThresholdOf200.json: -------------------------------------------------------------------------------- 1 | { 2 | "data": { 3 | "time_series": { 4 | "time_stamp": [ 5 | "1455733669601", 6 | "1455733669602", 7 | "1455733669603", 8 | "1455733669604", 9 | "1455733669605", 10 | "1455733669606", 11 | "1455733669607", 12 | "1455733669608", 13 | "1455733669609", 14 | "1455733669610" 15 | ], 16 | "sum": [ 17 | 101.0, 18 | -1, 19 | -1, 20 | -1, 21 | -1, 22 | -1, 23 | -1, 24 | -1, 25 | -1, 26 | -1 27 | ] 28 | } 29 | } 30 | } -------------------------------------------------------------------------------- /analytics/demo-timeseries-adder-with-model/demo-timeseries-adder-with-model-py/testbench/data/sampleModel.txt: -------------------------------------------------------------------------------- 1 | 200 -------------------------------------------------------------------------------- /analytics/demo-timeseries-adder-with-model/demo-timeseries-adder-with-model-py/testbench/demoTimeSeriesAdderUnitTestBench.py: -------------------------------------------------------------------------------- 1 | import sys 2 | import os 3 | import unittest 4 | import json 5 | 6 | sys.path.append(os.path.dirname(os.path.abspath(__file__)) + "/..") 7 | from analytics import demoTimeSeriesAdder 8 | 9 | 10 | class demoTimeSeriesAdderUnitTestBench(unittest.TestCase): 11 | def test_givenTimeSeriesAnalytic_whenAnalyticIsInvokedWithoutModel_thenAnalyticOutputIsAsExpected(self): 12 | analytic = demoTimeSeriesAdder() 13 | with open('./data/inputData/analyticInput.json') as json_input: 14 | actual_output_data = analytic.add2NumberArrays(json_input.read()) 15 | with open('./data/outputData/expectedAnalyticOutput.json') as json_output: 16 | expected_output_data = json.loads(json_output.read()) 17 | 18 | self.assertEqual(expected_output_data, json.loads(actual_output_data)) 19 | 20 | 21 | def test_givenTimeSeriesAnalytic_whenAnalyticIsInvokedWithModel_thenAnalyticOutputIsAsExpected(self): 22 | analytic = demoTimeSeriesAdder() 23 | modelmap = { "threshold" : "200" } 24 | 25 | with open('./data/inputData/analyticInput.json') as json_input: 26 | actual_output_data = analytic.add2NumberArrays(json_input.read(), modelmap) 27 | with open('./data/outputData/expectedAnalyticOutputForThresholdOf200.json') as json_output: 28 | expected_output_data = json.loads(json_output.read()) 29 | 30 | self.assertEqual(expected_output_data, json.loads(actual_output_data)) 31 | 32 | 33 | if __name__ == "__main__": 34 | unittest.main() 35 | -------------------------------------------------------------------------------- /analytics/demo-timeseries-adder-with-model/demo-timeseries-adder-with-model-template.json: -------------------------------------------------------------------------------- 1 | { 2 | "analyticName": "java-timeseries-demo-adder", 3 | "analyticVersion": "1.0", 4 | "inputPortDefinitions": [ 5 | { 6 | "portType": "COMPOSITE", 7 | "portName": "data", 8 | "variable": false, 9 | "childrenPorts": [ 10 | { 11 | "portName": "time_series", 12 | "portType": "TIMESERIES_ARRAY", 13 | "required": true, 14 | "variable": false, 15 | "columns": [ 16 | { 17 | "portName": "numberArray1", 18 | "portType": "FIELD", 19 | "variable": false, 20 | "dataType": "DOUBLE_ARRAY", 21 | "required": true 22 | }, 23 | { 24 | "portName": "numberArray2", 25 | "portType": "FIELD", 26 | "variable": false, 27 | "dataType": "DOUBLE_ARRAY", 28 | "required": true 29 | } 30 | ] 31 | } 32 | ] 33 | } 34 | ], 35 | "inputModelDefinitions": [ 36 | { 37 | "modelPortName": "threshold" 38 | } 39 | ], 40 | 41 | "outputPortDefinitions": [ 42 | { 43 | "portName": "data", 44 | "portType": "COMPOSITE", 45 | "required": true, 46 | "variable": false, 47 | "childrenPorts": [ 48 | { 49 | "portName": "time_series", 50 | "portType": "TIMESERIES_ARRAY", 51 | "required": true, 52 | "variable": false, 53 | "columns": [ 54 | { 55 | "portName": "sum", 56 | "portType": "FIELD", 57 | "variable": false, 58 | "dataType": "DOUBLE_ARRAY", 59 | "required": true 60 | } 61 | ] 62 | } 63 | ] 64 | } 65 | ] 66 | } 67 | -------------------------------------------------------------------------------- /analytics/demo-timeseries-adder-with-model/sampleModel.txt: -------------------------------------------------------------------------------- 1 | 200 -------------------------------------------------------------------------------- /analytics/miners-rule/pom.xml: -------------------------------------------------------------------------------- 1 | 2 | 4 | 5 | 4.0.0 6 | com.ge.predix.insight 7 | miners-rule 8 | Demo Miner's Rule implementation with stress limit model input 9 | jar 10 | Implementation of java miners-rule analytic service 11 | 2.0 12 | 13 | 14 | 15 | 16 | org.slf4j 17 | slf4j-api 18 | 1.6.6 19 | provided 20 | 21 | 22 | com.fasterxml.jackson.core 23 | jackson-core 24 | 2.4.1 25 | provided 26 | 27 | 28 | com.fasterxml.jackson.core 29 | jackson-databind 30 | 2.4.1 31 | provided 32 | 33 | 34 | junit 35 | junit 36 | 4.12 37 | 38 | 39 | 40 | 41 | 42 | 43 | 44 | org.apache.maven.plugins 45 | maven-compiler-plugin 46 | 47 | 1.7 48 | 1.7 49 | 50 | 51 | 52 | org.apache.maven.plugins 53 | maven-dependency-plugin 54 | 2.8 55 | 56 | 57 | copy-dependencies 58 | prepare-package 59 | 60 | copy-dependencies 61 | 62 | 63 | ${project.build.directory}/classes/lib 64 | false 65 | false 66 | true 67 | 68 | 69 | 70 | 71 | 72 | 73 | 74 | 75 | 76 | 77 | -------------------------------------------------------------------------------- /analytics/miners-rule/src/main/java/com/ge/predix/analytics/customdto/CDM_TimeSeries.java: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright (c) 2015 - 2016 General Electric Company. All rights reserved. 3 | * 4 | * The copyright to the computer software herein is the property of 5 | * General Electric Company. The software may be used and/or copied only 6 | * with the written permission of General Electric Company or in accordance 7 | * with the terms and conditions stipulated in the agreement/contract 8 | * under which the software has been supplied. 9 | */ 10 | package com.ge.predix.analytics.customdto; 11 | 12 | import java.util.List; 13 | 14 | public class CDM_TimeSeries { 15 | 16 | private List time_stamp; 17 | private List cdm_values; 18 | private List quality; 19 | 20 | public List getTime_stamp() { 21 | return time_stamp; 22 | } 23 | 24 | public void setTime_stamp(List time_stamp) { 25 | this.time_stamp = time_stamp; 26 | } 27 | 28 | public List getCdm_values() { 29 | return cdm_values; 30 | } 31 | 32 | public void setCdm_values(List cdm_values) { 33 | this.cdm_values = cdm_values; 34 | } 35 | 36 | public List getQuality() { 37 | return quality; 38 | } 39 | 40 | public void setQuality(List quality) { 41 | this.quality = quality; 42 | } 43 | } 44 | -------------------------------------------------------------------------------- /analytics/miners-rule/src/main/java/com/ge/predix/analytics/customdto/Input.java: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright (c) 2015 - 2016 General Electric Company. All rights reserved. 3 | * 4 | * The copyright to the computer software herein is the property of 5 | * General Electric Company. The software may be used and/or copied only 6 | * with the written permission of General Electric Company or in accordance 7 | * with the terms and conditions stipulated in the agreement/contract 8 | * under which the software has been supplied. 9 | */ 10 | package com.ge.predix.analytics.customdto; 11 | 12 | import java.util.ArrayList; 13 | import java.util.List; 14 | 15 | public class Input { 16 | 17 | private CDM_TimeSeries current_cdm; 18 | private StressesTimeSeries recentStresses; 19 | 20 | public CDM_TimeSeries getCurrent_cdm() { 21 | if (current_cdm == null) { 22 | current_cdm = new CDM_TimeSeries(); 23 | } 24 | return current_cdm; 25 | } 26 | 27 | public void setCurrent_cdm(CDM_TimeSeries current_cdm) { 28 | this.current_cdm = current_cdm; 29 | } 30 | 31 | public StressesTimeSeries getRecentStresses() { 32 | if (recentStresses == null) { 33 | recentStresses = new StressesTimeSeries(); 34 | } 35 | return recentStresses; 36 | } 37 | 38 | public void setRecentStresses(StressesTimeSeries recentStresses) { 39 | this.recentStresses = recentStresses; 40 | } 41 | 42 | } 43 | -------------------------------------------------------------------------------- /analytics/miners-rule/src/main/java/com/ge/predix/analytics/customdto/Response.java: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright (c) 2015 - 2016 General Electric Company. All rights reserved. 3 | * 4 | * The copyright to the computer software herein is the property of 5 | * General Electric Company. The software may be used and/or copied only 6 | * with the written permission of General Electric Company or in accordance 7 | * with the terms and conditions stipulated in the agreement/contract 8 | * under which the software has been supplied. 9 | */ 10 | 11 | package com.ge.predix.analytics.customdto; 12 | 13 | public class Response { 14 | 15 | protected CDM_TimeSeries updatedCDM; 16 | 17 | public CDM_TimeSeries getUpdatedCDM() { 18 | return updatedCDM; 19 | } 20 | 21 | public void setUpdatedCDM(CDM_TimeSeries updatedCDM) { 22 | this.updatedCDM = updatedCDM; 23 | } 24 | } 25 | 26 | -------------------------------------------------------------------------------- /analytics/miners-rule/src/main/java/com/ge/predix/analytics/customdto/StressesTimeSeries.java: -------------------------------------------------------------------------------- 1 | package com.ge.predix.analytics.customdto; 2 | 3 | import java.util.ArrayList; 4 | import java.util.List; 5 | 6 | public class StressesTimeSeries { 7 | 8 | private List time_stamp; 9 | private List> stresses; 10 | private List quality; 11 | 12 | public List getTime_stamp() { 13 | if (this.time_stamp == null) { 14 | this.time_stamp = new ArrayList(); 15 | } 16 | return time_stamp; 17 | } 18 | 19 | public void setTime_stamp(List time_stamp) { 20 | this.time_stamp = time_stamp; 21 | } 22 | 23 | public List> getStresses() { 24 | if (this.stresses == null) { 25 | this.stresses = new ArrayList>(); 26 | } 27 | return stresses; 28 | } 29 | 30 | public void setStresses(List> stresses) { 31 | this.stresses = stresses; 32 | } 33 | 34 | public List getQuality() { 35 | return quality; 36 | } 37 | 38 | public void setQuality(List quality) { 39 | this.quality = quality; 40 | } 41 | } 42 | -------------------------------------------------------------------------------- /analytics/miners-rule/src/main/java/com/ge/predix/analytics/customdto/TimeSeries.java: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright (c) 2015 - 2016 General Electric Company. All rights reserved. 3 | * 4 | * The copyright to the computer software herein is the property of 5 | * General Electric Company. The software may be used and/or copied only 6 | * with the written permission of General Electric Company or in accordance 7 | * with the terms and conditions stipulated in the agreement/contract 8 | * under which the software has been supplied. 9 | */ 10 | package com.ge.predix.analytics.customdto; 11 | 12 | import java.util.ArrayList; 13 | import java.util.List; 14 | 15 | public class TimeSeries { 16 | 17 | private List time_stamp; 18 | private List> values; 19 | 20 | public List getTime_stamp() { 21 | if (time_stamp == null) { 22 | time_stamp = new ArrayList<>(); 23 | } 24 | return time_stamp; 25 | } 26 | 27 | public void setTime_stamp(List time_stamp) { 28 | this.time_stamp = time_stamp; 29 | } 30 | 31 | public List> getValues() { 32 | if (values == null) { 33 | values = new ArrayList<>(); 34 | } 35 | return values; 36 | } 37 | 38 | public void setValues(List> values) { 39 | this.values = values; 40 | } 41 | } 42 | -------------------------------------------------------------------------------- /analytics/miners-rule/src/main/resources/analyticTemplate.json: -------------------------------------------------------------------------------- 1 | { 2 | "analyticName": "MinersRule", 3 | "analyticVersion": "v1", 4 | "inputPortDefinitions": [ 5 | { 6 | "portName": "current_cdm", 7 | "portType": "TIMESERIES_ARRAY", 8 | "variable": false, 9 | "columns": [ 10 | { 11 | "portName": "cdm_values", 12 | "portType": "FIELD", 13 | "variable": false, 14 | "dataType": "DOUBLE_ARRAY" 15 | } 16 | ] 17 | }, 18 | { 19 | "portName": "recentStresses", 20 | "portType": "TIMESERIES_ARRAY", 21 | "variable": false, 22 | "columns": [ 23 | { 24 | "portName": "stresses", 25 | "portType": "FIELD", 26 | "variable": true, 27 | "dataType": "DOUBLE_ARRAY" 28 | } 29 | ] 30 | } 31 | ], 32 | "inputModelDefinitions" : [ 33 | { 34 | "comment": ["the order of the stress limits in this model must match the order of the stress values in the analytic input."], 35 | "modelPortName": "Stress Limits" 36 | } 37 | ], 38 | "outputPortDefinitions": [ 39 | { 40 | "portName": "updatedCDM", 41 | "portType": "TIMESERIES_ARRAY", 42 | "variable": false, 43 | "columns": [ 44 | { 45 | "portName": "cdm_values", 46 | "portType": "FIELD", 47 | "variable": false, 48 | "dataType": "DOUBLE_ARRAY" 49 | } 50 | ] 51 | } 52 | ] 53 | } 54 | -------------------------------------------------------------------------------- /analytics/miners-rule/src/main/resources/config.json: -------------------------------------------------------------------------------- 1 | { 2 | "className": "com.ge.predix.analytics.demo.java.MinersRule", 3 | "methodName": "computeCDM" 4 | } -------------------------------------------------------------------------------- /analytics/miners-rule/src/main/resources/log4j.properties: -------------------------------------------------------------------------------- 1 | log4j.appender.Stdout=org.apache.log4j.ConsoleAppender 2 | log4j.appender.Stdout.layout=org.apache.log4j.PatternLayout 3 | 4 | log4j.logger.org.springframework=DEBUG 5 | 6 | 7 | # Set root logger level to DEBUG and its only appender to CONSOLE. 8 | log4j.rootLogger=DEBUG, FILE, CONSOLE 9 | 10 | #CONSOLE is set to be a ConsoleAppender. 11 | log4j.appender.CONSOLE=org.apache.log4j.ConsoleAppender 12 | 13 | 14 | # CONSOLE uses PatternLayout. 15 | log4j.appender.CONSOLE.layout=org.apache.log4j.PatternLayout 16 | log4j.appender.CONSOLE.layout.ConversionPattern= %-5p %c %x - %m%n 17 | 18 | 19 | # FILE is file logger with rotation 20 | log4j.appender.FILE=org.apache.log4j.RollingFileAppender 21 | log4j.appender.FILE.layout=org.apache.log4j.PatternLayout 22 | log4j.appender.FILE.layout.ConversionPattern=%d{ISO8601} %-5p %c %x - %m%n 23 | log4j.appender.FILE.File=pmp.log 24 | log4j.appender.FILE.MaxFileSize=10000KB 25 | # Keep three backup files 26 | log4j.appender.FILE.MaxBackupIndex=3 27 | -------------------------------------------------------------------------------- /analytics/miners-rule/src/test/resources/minersRuleIn.json: -------------------------------------------------------------------------------- 1 | { 2 | "current_cdm": { 3 | "time_stamp": [1,2,3,4], 4 | "cdm_values": [0.01, 0.03, 0.03, 0.045] 5 | }, 6 | "recentStresses" : { 7 | "time_stamp": [1, 2, 3, 4, 5, 6, 7, 8], 8 | "stresses": [ 9 | [1.0, 2.0, 0.0, 1.0, 2.0, 1.0, 3.0, 1.0], 10 | [0.0, 0.0, 0.0, 1.0, 1.0, 1.0, 1.0, 1.0] 11 | ] 12 | } 13 | } -------------------------------------------------------------------------------- /analytics/miners-rule/src/test/resources/minersRuleModel.txt: -------------------------------------------------------------------------------- 1 | 100.0, 200.0 -------------------------------------------------------------------------------- /analytics/miners-rule/src/test/resources/minersRuleOut.json: -------------------------------------------------------------------------------- 1 | {"updatedCDM":{"time_stamp":[1,2,3,4,8],"cdm_values":[0.01,0.03,0.03,0.045,0.16],"quality":null}} -------------------------------------------------------------------------------- /analytics/miners-rule/src/test/resources/sampleOrchestration/MinersRuleOrchestration.bpmn20.xml: -------------------------------------------------------------------------------- 1 | 2 | 8 | 9 | 10 | 11 | 13 | sid-miners-rule-in 14 | 15 | 16 | 20 | sid-miners-rule-in 21 | sid-end-in 22 | 23 | 24 | 25 | sid-end-in 26 | 27 | 28 | 29 | 32 | 33 | 35 | 36 | 37 | 38 | 39 | -------------------------------------------------------------------------------- /analytics/miners-rule/src/test/resources/sampleOrchestration/MinersRulePortToFieldMap.json: -------------------------------------------------------------------------------- 1 | { 2 | "analyticName": "MinersRule", 3 | "analyticVersion": "v1", 4 | "iterations": [ 5 | { 6 | "inputMaps": [ 7 | { 8 | "valueSourceType": "DATA_CONNECTOR", 9 | "fullyQualifiedPortName": "current_cdm.cdm_values", 10 | "fieldId": "inputCDM", 11 | "queryCriteria": { 12 | "start": 0, 13 | "end": -1, 14 | "tags": [{ 15 | "limit": 10, 16 | "order": "asc" 17 | } 18 | ] 19 | }, 20 | "engUnit": null, 21 | "required": true, 22 | "dataSourceId": "PredixTimeSeries" 23 | }, 24 | { 25 | "valueSourceType": "DATA_CONNECTOR", 26 | "fullyQualifiedPortName": "recentStresses.stresses.0", 27 | "fieldId": "sensor1Stress", 28 | "queryCriteria": { 29 | "start": 0, 30 | "end": -1, 31 | "tags": [{ 32 | "limit": 10, 33 | "order": "asc" 34 | } 35 | ] 36 | }, 37 | "engUnit": null, 38 | "required": true, 39 | "dataSourceId": "PredixTimeSeries" 40 | }, 41 | { 42 | "valueSourceType": "DATA_CONNECTOR", 43 | "fullyQualifiedPortName": "recentStresses.stresses.1", 44 | "fieldId": "sensor2Stress", 45 | "queryCriteria": { 46 | "start": 0, 47 | "end": -1, 48 | "tags": [{ 49 | "limit": 10, 50 | "order": "asc" 51 | } 52 | ] 53 | }, 54 | "engUnit": null, 55 | "required": true, 56 | "dataSourceId": "PredixTimeSeries" 57 | }, 58 | { 59 | "valueSourceType": "DATA_CONNECTOR", 60 | "fullyQualifiedPortName": "recentStresses.stresses.2", 61 | "fieldId": "sensor3Stress", 62 | "queryCriteria": { 63 | "start": 0, 64 | "end": -1, 65 | "tags": [{ 66 | "limit": 10, 67 | "order": "asc" 68 | } 69 | ] 70 | }, 71 | "engUnit": null, 72 | "required": true, 73 | "dataSourceId": "PredixTimeSeries" 74 | } 75 | ], 76 | "outputMaps": [ 77 | { 78 | "fullyQualifiedPortName": "updatedCDM.cdm_values", 79 | "fieldId": "outputCDM", 80 | "dataSourceId": "PredixTimeSeries" 81 | } 82 | ] 83 | } 84 | ] 85 | } 86 | -------------------------------------------------------------------------------- /analytics/miners-rule/src/test/resources/sampleOrchestration/minersRuleModelForAsset1.txt: -------------------------------------------------------------------------------- 1 | 100.0, 200.0 -------------------------------------------------------------------------------- /analytics/miners-rule/src/test/resources/sampleOrchestration/minersRuleModelForAssetGroup1.txt: -------------------------------------------------------------------------------- 1 | 1000.0, 2000.0 -------------------------------------------------------------------------------- /analytics/simple-linear-regression/README.MD: -------------------------------------------------------------------------------- 1 | 2 | # Simple Linear Regression 3 | 4 | 5 | A Python3-based sample analytic performing simple linear regression computation which can be deployed to Predix Analytics platform. 6 | ## Pre-requisites 7 | 8 | To run this analytic locally, you will need to have the following: 9 | 10 | - Python 3.6+ 11 | - scipy 0.19.1 12 | - Maven 3 or later 13 | 14 | ## Running unit tests 15 | 16 | ```bash 17 | $ cd /simple-linear-regression/analytics 18 | $ python -m unittest test_simple_linear_regression.py 19 | 20 | ``` 21 | 22 | ## Building, deploying and running the analytic 23 | 24 | 1. Zip the contents of this directory or if you have Maven3 installed and configured, do a `mvn clean install` to generate the `py3-simple-linear-regression-0.1-bin.zip` in the `target` folder. 25 | 2. Create an analytic in Analytics Catalog with the name "simple-linregress", the version "v1" and the supported language to "PYTHON_3". 26 | 3. Upload the zip file and attach it to the created analytic. 27 | 4. Deploy and test the analytic on Predix Analytics platform. 28 | 29 | ## Analytic template 30 | This analytic takes in 2 arrays and returns the p-value, r-value, slope, intercept and standard error. This structure is outlined in this [analytic template](simple_linear_regression_template.json). 31 | 32 | ### Input format 33 | 34 | The expected JSON input data format is as follows 35 | 36 | ```json 37 | { 38 | "y": [19.0, 20.3, 20.5, 21.5, 22.45, 23.0, 23.0, 25.5, 24.0], 39 | "x": [1, 2, 3, 4, 5, 6, 7, 8, 9] 40 | } 41 | ``` 42 | 43 | ### Output format 44 | The JSON output format from the analytic is as follows: 45 | 46 | ```json 47 | { 48 | "slope": 0.7166666666666667, 49 | "intercept": 18.472222222222225, 50 | "r_value": 0.9559490311973318, 51 | "p_value": 5.6576124923241295e-05, 52 | "std_err": 0.08317445171439007 53 | } 54 | ``` 55 | 56 | ## Developing a Python-based analytic 57 | 58 | 1. Implement the analytic (and test functions) according to your development guidelines. 59 | 2. Create an entry method in your analytic class. The entry method signature must be in one of the following two formats: 60 | * For analytics that do not use trained models, use the following signature for your entry method: 61 | `def entry_method(self, inputJson):` 62 | * For analytics that use trained models, use the following signature for your entry method: 63 | `def entry_method(self, inputJson, inputModels):` 64 | * In either case, the `entry_method` can be any method name. `inputJson` is the JSON string input that will be passed to the analytic. The output of this method must also be a JSON string. 65 | * `inputModels` contains a dict() of trained models as defined in the port-to-field map. The entry method should properly handle the case of an empty dict. 66 | 3. Create a config.json file in the top level of the project directory. Specify the entry method in the format of `..`, conda-libs, and non-conda-libs. 67 | 4. Package all the analytic files and the config.json file into a ZIP file. 68 | 69 | For more information on developing analytics for use with the Predix Analytics platform, please visit the **[Analytic Development](https://docs.predix.io/en-US/content/service/analytics_services/analytics_framework/analytic-development)** section of the Predix Analytics Services documentation on predix.io. 70 | 71 | 72 | -------------------------------------------------------------------------------- /analytics/simple-linear-regression/analytics/__init__.py: -------------------------------------------------------------------------------- 1 | from analytics.simple_linear_regression import SimpleLinearRegression 2 | -------------------------------------------------------------------------------- /analytics/simple-linear-regression/analytics/simple_linear_regression.py: -------------------------------------------------------------------------------- 1 | import json 2 | 3 | from scipy import stats 4 | 5 | 6 | class SimpleLinearRegression: 7 | 8 | def compute(self, data): 9 | data_json = json.loads(data) 10 | x = data_json.get("x") 11 | y = data_json.get("y") 12 | slope, intercept, r_value, p_value, std_err = stats.linregress(x, y) 13 | 14 | output = { 15 | "slope": slope, 16 | "intercept": intercept, 17 | "r_value": r_value, 18 | "p_value": p_value, 19 | "std_err": std_err} 20 | 21 | return json.dumps(output) 22 | -------------------------------------------------------------------------------- /analytics/simple-linear-regression/analytics/test_simple_linear_regression.py: -------------------------------------------------------------------------------- 1 | import json 2 | from unittest import TestCase 3 | 4 | from analytics import SimpleLinearRegression 5 | 6 | 7 | class SimpleLinearRegressionTest(TestCase): 8 | 9 | def setUp(self): 10 | pass 11 | 12 | def test_compute(self): 13 | input_data_dict = {"y": [19, 20, 20.5, 21.5, 22, 23, 23, 25.5, 24], 14 | "x": [1, 2, 3, 4, 5, 6, 7, 8, 9]} 15 | 16 | simple_linear_regression = SimpleLinearRegression() 17 | output_data = simple_linear_regression.compute(json.dumps(input_data_dict)) 18 | 19 | analytic_output_dict = json.loads(output_data) 20 | 21 | print(analytic_output_dict) 22 | 23 | self.assertEqual(0.7166666666666667, analytic_output_dict["slope"]) 24 | self.assertEqual(0.9559490311973318, analytic_output_dict["r_value"]) 25 | self.assertEqual(5.6576124923241295e-05, analytic_output_dict["p_value"]) 26 | self.assertEqual(18.472222222222225, analytic_output_dict["intercept"]) 27 | self.assertEqual(0.08317445171439007, analytic_output_dict["std_err"]) 28 | -------------------------------------------------------------------------------- /analytics/simple-linear-regression/assembly.xml: -------------------------------------------------------------------------------- 1 | 2 | 3 | 12 | 13 | 14 | bin 15 | 16 | zip 17 | 18 | false 19 | 20 | 21 | ${project.basedir} 22 | 23 | pom.xml 24 | assembly.xml 25 | target/ 26 | .idea/ 27 | *.iml 28 | **/**.pyc 29 | **/**/test_*.py 30 | **/**/*_pycache_* 31 | README.MD 32 | 33 | 34 | 35 | -------------------------------------------------------------------------------- /analytics/simple-linear-regression/config.json: -------------------------------------------------------------------------------- 1 | { 2 | "entry-method": "analytics.SimpleLinearRegression.compute", 3 | "conda-libs": [ 4 | "scipy==0.19.1" 5 | ] 6 | } 7 | 8 | 9 | -------------------------------------------------------------------------------- /analytics/simple-linear-regression/pom.xml: -------------------------------------------------------------------------------- 1 | 2 | 11 | 12 | 14 | 15 | 4.0.0 16 | com.ge.predix.insight 17 | py3-simple-linear-regression 18 | Sample Simple Linear regression in Python 3 19 | pom 20 | Implementation of Simple Linear regression analytic service in Python 3 21 | 0.1 22 | 23 | 24 | 25 | 26 | org.apache.maven.plugins 27 | maven-assembly-plugin 28 | 2.5.5 29 | 30 | 31 | create-distribution 32 | package 33 | 34 | single 35 | 36 | 37 | 38 | assembly.xml 39 | 40 | 41 | 42 | 43 | 44 | 45 | 46 | 47 | 48 | 49 | -------------------------------------------------------------------------------- /analytics/simple-linear-regression/simple_linear_regression_template.json: -------------------------------------------------------------------------------- 1 | { 2 | "analyticName": "SimpleLinearRegression", 3 | "analyticVersion": "V1.0", 4 | "inputPortDefinitions": [ 5 | { 6 | "portType": "FIELD", 7 | "portName": "x", 8 | "variable": false, 9 | "dataType": "DOUBLE_ARRAY", 10 | "required": true 11 | }, 12 | { 13 | "portType": "FIELD", 14 | "portName": "y", 15 | "variable": false, 16 | "dataType": "INTEGER_ARRAY", 17 | "required": true 18 | } 19 | ], 20 | "outputPortDefinitions": [ 21 | { 22 | "portType": "FIELD", 23 | "portName": "slope", 24 | "variable": false, 25 | "dataType": "DOUBLE", 26 | "required": true 27 | }, 28 | { 29 | "portType": "FIELD", 30 | "portName": "intercept", 31 | "variable": false, 32 | "dataType": "DOUBLE", 33 | "required": true 34 | }, 35 | { 36 | "portType": "FIELD", 37 | "portName": "r_value", 38 | "variable": false, 39 | "dataType": "DOUBLE", 40 | "required": true 41 | }, 42 | { 43 | "portType": "FIELD", 44 | "portName": "p_value", 45 | "variable": false, 46 | "dataType": "DOUBLE", 47 | "required": true 48 | }, 49 | { 50 | "portType": "FIELD", 51 | "portName": "std_err", 52 | "variable": false, 53 | "dataType": "DOUBLE", 54 | "required": true 55 | } 56 | ] 57 | } -------------------------------------------------------------------------------- /custom-data-connector/README.md: -------------------------------------------------------------------------------- 1 | # Sample Custom Data Connectors 2 | 3 | These are sample custom data connector implementations to connect Predix Analytics to various data sources: 4 | 5 | - **[postgres db implementation](postgresdb-ref-impl-data-connector)** 6 | 7 | For more information on developing custom data connector implementations for use with Predix Analytics, see [About Analytics Using an External Data Source](https://docs.predix.io/en-US/content/service/analytics_services/analytics_framework/data-sources#concept_0fa2cc54-b511-4524-8aa3-85da48a5acc9) on Predix IO. 8 | -------------------------------------------------------------------------------- /custom-data-connector/postgresdb-ref-impl-data-connector/manifest.yml: -------------------------------------------------------------------------------- 1 | --- 2 | # yml for custom data connector 3 | applications: 4 | - name: ${REF_POSTGRES_DATA_CONNECTOR_HOST} 5 | buildpack: https://github.com/cloudfoundry/java-buildpack.git#v3.7 6 | path: ${REF_POSTGRES_DATA_CONNECTOR_JAR} 7 | memory: 3G 8 | timeout: 180 9 | services: 10 | - ${REF_POSTGRES_DATA_CONNECTOR_RDPG} 11 | env: 12 | uaa_client_id: ${UAA_CLIENT_ID} 13 | uaa_client_secret: ${UAA_CLIENT_SECRET} 14 | uaa_check_token_url: ${UAA_CHECK_TOKEN_URL} 15 | SPRING_PROFILES_ACTIVE: cloud 16 | MEMORY_LIMIT: 2G 17 | -------------------------------------------------------------------------------- /custom-data-connector/postgresdb-ref-impl-data-connector/src/main/java/com/ge/predix/analytics/customconnector/refimpl/postgresdb/ApiV1AnalyticsCustomdataResource.java: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright (c) 2015 - 2016 General Electric Company. All rights reserved. 3 | * 4 | * The copyright to the computer software herein is the property of 5 | * General Electric Company. The software may be used and/or copied only 6 | * with the written permission of General Electric Company or in accordance 7 | * with the terms and conditions stipulated in the agreement/contract 8 | * under which the software has been supplied. 9 | */ 10 | 11 | package com.ge.predix.analytics.customconnector.refimpl.postgresdb; 12 | 13 | import javax.ws.rs.Consumes; 14 | import javax.ws.rs.GET; 15 | import javax.ws.rs.POST; 16 | import javax.ws.rs.Path; 17 | import javax.ws.rs.Produces; 18 | import javax.ws.rs.core.Context; 19 | import javax.ws.rs.core.HttpHeaders; 20 | import javax.ws.rs.core.Response; 21 | import javax.ws.rs.core.UriInfo; 22 | 23 | import org.springframework.beans.factory.annotation.Autowired; 24 | import org.springframework.stereotype.Service; 25 | 26 | import com.ge.predix.insight.dto.customdataprovider.AnalyticReadDataRequest; 27 | import com.ge.predix.insight.dto.customdataprovider.AnalyticReadDataResponse; 28 | import com.ge.predix.insight.dto.customdataprovider.AnalyticWriteDataRequest; 29 | import com.ge.predix.insight.dto.customdataprovider.AnalyticWriteDataResponse; 30 | 31 | @Path("/api/v1/analytics/customdata") 32 | @Service 33 | public class ApiV1AnalyticsCustomdataResource { 34 | 35 | @Autowired 36 | private CustomDataResourceManager customDataResourceManager; 37 | 38 | @POST 39 | @Consumes("application/json") 40 | @Produces("application/json") 41 | @Path("/read") 42 | public Response postRead(AnalyticReadDataRequest analyticReadDataRequest, @Context HttpHeaders httpHeaders, @Context UriInfo uriInfo) { 43 | AnalyticReadDataResponse analyticReadDataResponse = customDataResourceManager.getAnalyticReadDataResponse(analyticReadDataRequest); 44 | return Response.ok().entity(analyticReadDataResponse).build(); 45 | } 46 | 47 | @POST 48 | @Consumes("application/json") 49 | @Produces("application/json") 50 | @Path("/write") 51 | public Response postWrite(AnalyticWriteDataRequest analyticWriteDataRequest, @Context HttpHeaders httpHeaders, @Context UriInfo uriInfo) { 52 | AnalyticWriteDataResponse analyticWriteDataResponse = customDataResourceManager.getAnalyticWriteDataResponse(analyticWriteDataRequest); 53 | return Response.status(Response.Status.CREATED).entity(analyticWriteDataResponse).build(); 54 | } 55 | 56 | @GET 57 | @Path("/healthcheck") 58 | public Response healthcheck(@Context HttpHeaders httpHeaders, @Context UriInfo uriInfo) { 59 | return Response.status(Response.Status.OK).build(); 60 | } 61 | } -------------------------------------------------------------------------------- /custom-data-connector/postgresdb-ref-impl-data-connector/src/main/java/com/ge/predix/analytics/customconnector/refimpl/postgresdb/DataSourceConfig.java: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright (c) 2015 - 2016 General Electric Company. All rights reserved. 3 | * 4 | * The copyright to the computer software herein is the property of 5 | * General Electric Company. The software may be used and/or copied only 6 | * with the written permission of General Electric Company or in accordance 7 | * with the terms and conditions stipulated in the agreement/contract 8 | * under which the software has been supplied. 9 | */ 10 | 11 | package com.ge.predix.analytics.customconnector.refimpl.postgresdb; 12 | 13 | import javax.sql.DataSource; 14 | 15 | import org.slf4j.Logger; 16 | import org.slf4j.LoggerFactory; 17 | import org.springframework.beans.factory.annotation.Autowired; 18 | import org.springframework.cloud.Cloud; 19 | import org.springframework.cloud.CloudFactory; 20 | import org.springframework.context.annotation.Bean; 21 | import org.springframework.context.annotation.Configuration; 22 | import org.springframework.context.annotation.Profile; 23 | import org.springframework.core.env.Environment; 24 | 25 | import com.jolbox.bonecp.BoneCPDataSource; 26 | 27 | class DataSourceConfig { 28 | 29 | private final static Logger LOGGER = LoggerFactory.getLogger(DataSourceConfig.class); 30 | 31 | @Configuration 32 | @Profile("cloud") 33 | static class CloudConfiguration { 34 | 35 | @Bean 36 | public Cloud cloud() { 37 | return new CloudFactory().getCloud(); 38 | } 39 | 40 | @Bean 41 | public DataSource dataSource() { 42 | return cloud().getSingletonServiceConnector(DataSource.class, null); 43 | } 44 | 45 | } 46 | 47 | @Configuration 48 | @Profile("default") 49 | static class LocalConfiguration { 50 | 51 | @Autowired 52 | private Environment environment; 53 | 54 | @Bean 55 | public DataSource dataSource() { 56 | BoneCPDataSource dataSource = new BoneCPDataSource(); 57 | setDataSourceProperties(dataSource, environment); 58 | LOGGER.info("Using BoneCP DataSource"); 59 | return dataSource; 60 | } 61 | 62 | } 63 | 64 | private static void setDataSourceProperties(BoneCPDataSource dataSource, Environment environment) { 65 | LOGGER.info("Initializing DataSourceProperties - default profile"); 66 | dataSource.setDriverClass(environment.getRequiredProperty("turbine.connector.db.driver")); 67 | dataSource.setJdbcUrl(environment.getRequiredProperty("turbine.connector.db.url")); 68 | dataSource.setUsername(environment.getRequiredProperty("turbine.connector.db.username")); 69 | dataSource.setPassword(environment.getRequiredProperty("turbine.connector.db.password")); 70 | LOGGER.info("BoneCP Configuration (default) : " + dataSource); 71 | } 72 | 73 | } 74 | -------------------------------------------------------------------------------- /custom-data-connector/postgresdb-ref-impl-data-connector/src/main/java/com/ge/predix/analytics/customconnector/refimpl/postgresdb/PostgresRefImplConnectorApplication.java: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright (c) 2015 - 2016 General Electric Company. All rights reserved. 3 | * 4 | * The copyright to the computer software herein is the property of 5 | * General Electric Company. The software may be used and/or copied only 6 | * with the written permission of General Electric Company or in accordance 7 | * with the terms and conditions stipulated in the agreement/contract 8 | * under which the software has been supplied. 9 | */ 10 | 11 | package com.ge.predix.analytics.customconnector.refimpl.postgresdb; 12 | 13 | import java.util.Collections; 14 | import javax.sql.DataSource; 15 | import javax.ws.rs.Path; 16 | import javax.ws.rs.ext.Provider; 17 | 18 | import org.apache.cxf.Bus; 19 | import org.apache.cxf.endpoint.Server; 20 | import org.apache.cxf.jaxrs.JAXRSServerFactoryBean; 21 | import org.codehaus.jackson.jaxrs.JacksonJsonProvider; 22 | import org.codehaus.jackson.map.DeserializationConfig; 23 | import org.codehaus.jackson.map.ObjectMapper; 24 | import org.codehaus.jackson.map.annotate.JsonSerialize; 25 | import org.springframework.beans.factory.annotation.Autowired; 26 | import org.springframework.boot.SpringApplication; 27 | import org.springframework.boot.autoconfigure.EnableAutoConfiguration; 28 | import org.springframework.boot.autoconfigure.SpringBootApplication; 29 | import org.springframework.context.ApplicationContext; 30 | import org.springframework.context.annotation.Bean; 31 | import org.springframework.context.annotation.ComponentScan; 32 | import org.springframework.context.annotation.Configuration; 33 | import org.springframework.context.annotation.ImportResource; 34 | import org.springframework.context.annotation.PropertySource; 35 | import org.springframework.jdbc.core.JdbcTemplate; 36 | 37 | @SpringBootApplication 38 | @ComponentScan(basePackages = "com.ge.predix.analytics") 39 | @Configuration 40 | @EnableAutoConfiguration 41 | @ImportResource(value = "classpath:META-INF/authentication-context.xml") 42 | //@EnableAutoConfiguration(exclude = SecurityAutoConfiguration.class) 43 | @PropertySource(value = "custom-connector-application.properties") 44 | public class PostgresRefImplConnectorApplication { 45 | 46 | @Autowired 47 | ApplicationContext applicationContext; 48 | 49 | @Autowired 50 | private Bus bus; 51 | 52 | public static void main(String[] args) { 53 | SpringApplication.run(PostgresRefImplConnectorApplication.class, args); 54 | } 55 | 56 | @Bean 57 | public Server rsServer() { 58 | JAXRSServerFactoryBean endpoint = new JAXRSServerFactoryBean(); 59 | endpoint.setBus(bus); 60 | endpoint.setServiceBeans(Collections.singletonList(applicationContext.getBeansWithAnnotation(Path.class).values())); 61 | endpoint.setProviders(Collections.singletonList(applicationContext.getBeansWithAnnotation(Provider.class).values())); 62 | endpoint.setAddress("/"); 63 | return endpoint.create(); 64 | } 65 | 66 | @Bean 67 | public JdbcTemplate jdbcTemplate(DataSource dataSource) { 68 | return new JdbcTemplate(dataSource); 69 | } 70 | 71 | @Bean 72 | public JacksonJsonProvider jacksonJsonProvider() { 73 | JacksonJsonProvider jacksonJsonProvider = new JacksonJsonProvider(); 74 | ObjectMapper mapper = new ObjectMapper(); 75 | mapper.setSerializationInclusion(JsonSerialize.Inclusion.NON_NULL); 76 | mapper.configure(DeserializationConfig.Feature.FAIL_ON_UNKNOWN_PROPERTIES, false); 77 | jacksonJsonProvider.setMapper(mapper); 78 | return jacksonJsonProvider; 79 | } 80 | } 81 | -------------------------------------------------------------------------------- /custom-data-connector/postgresdb-ref-impl-data-connector/src/main/java/com/ge/predix/analytics/customconnector/refimpl/postgresdb/SQLBuilder.java: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright (c) 2015 - 2016 General Electric Company. All rights reserved. 3 | * 4 | * The copyright to the computer software herein is the property of 5 | * General Electric Company. The software may be used and/or copied only 6 | * with the written permission of General Electric Company or in accordance 7 | * with the terms and conditions stipulated in the agreement/contract 8 | * under which the software has been supplied. 9 | */ 10 | 11 | package com.ge.predix.analytics.customconnector.refimpl.postgresdb; 12 | 13 | import java.util.List; 14 | 15 | public interface SQLBuilder { 16 | 17 | public enum StatementType { 18 | SELECT, 19 | INSERT_OR_UPDATE 20 | } 21 | 22 | String buildSelectSql(); 23 | 24 | String[] buildInsertOrUpdateSql(); 25 | 26 | List getSelectColumns(); 27 | 28 | } 29 | -------------------------------------------------------------------------------- /custom-data-connector/postgresdb-ref-impl-data-connector/src/main/java/com/ge/predix/analytics/customconnector/refimpl/postgresdb/SQLBuilderForGenericSchema.java: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright (c) 2015 - 2016 General Electric Company. All rights reserved. 3 | * 4 | * The copyright to the computer software herein is the property of 5 | * General Electric Company. The software may be used and/or copied only 6 | * with the written permission of General Electric Company or in accordance 7 | * with the terms and conditions stipulated in the agreement/contract 8 | * under which the software has been supplied. 9 | */ 10 | 11 | package com.ge.predix.analytics.customconnector.refimpl.postgresdb; 12 | 13 | import java.util.ArrayList; 14 | import java.util.Collection; 15 | import java.util.List; 16 | import java.util.Map; 17 | 18 | import org.apache.commons.lang3.StringUtils; 19 | 20 | import com.ge.predix.insight.dto.customdataprovider.Field; 21 | import com.ge.predix.insight.dto.customdataprovider.OrchestrationExecutionContext; 22 | 23 | public class SQLBuilderForGenericSchema extends SQLBuilderBase implements SQLBuilder { 24 | 25 | public SQLBuilderForGenericSchema(OrchestrationExecutionContext orchestrationExecutionContext, Object queryCriteria, StatementType statementType, List dataValues, Field field) { 26 | this.orchestrationExecutionContext = orchestrationExecutionContext; 27 | this.queryCriteria = queryCriteria; 28 | this.statementType = statementType; 29 | this.dataValues = dataValues; 30 | this.field = field; 31 | } 32 | 33 | public SQLBuilderForGenericSchema(Object queryCriteria, StatementType statementType) { 34 | this.queryCriteria = queryCriteria; 35 | this.statementType = statementType; 36 | } 37 | 38 | public String[] buildInsertOrUpdateSql() { 39 | selectColumns = buildSelectColumns(); 40 | Collection conditions = buildConditions(); 41 | String table = (String) ((Map) queryCriteria).get("table"); 42 | if (conditions.isEmpty()) { 43 | List insertStatements = new ArrayList<>(); 44 | // insert 45 | selectColumns.add(0, "asset_id"); 46 | for (Object value : dataValues) { 47 | String insertStmt = ""; 48 | List listValue = new ArrayList<>(); 49 | listValue.add("'" + getAssetId() + "'"); 50 | listValue.add("'" + this.field.getFieldId() + "'"); 51 | listValue.add("to_timestamp(" + ((List) value).get(0) + "::double precision/1000)"); 52 | listValue.add(((List) value).get(1)); 53 | insertStmt = insertStmt + " insert into " + table + "(" + StringUtils.join(selectColumns, ", ") + ")" + " values( " + StringUtils.join(listValue, ", ") + " )"; 54 | LOGGER.info("insert SQL: " + insertStmt); 55 | insertStatements.add(insertStmt); 56 | } 57 | return insertStatements.toArray(new String[0]); 58 | } else { 59 | // update 60 | List updateStatements = update(conditions, table); 61 | return updateStatements.toArray(new String[0]); 62 | } 63 | } 64 | 65 | } 66 | -------------------------------------------------------------------------------- /custom-data-connector/postgresdb-ref-impl-data-connector/src/main/java/com/ge/predix/analytics/customconnector/refimpl/postgresdb/SQLBuilderForNormalizedSchema.java: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright (c) 2015 - 2016 General Electric Company. All rights reserved. 3 | * 4 | * The copyright to the computer software herein is the property of 5 | * General Electric Company. The software may be used and/or copied only 6 | * with the written permission of General Electric Company or in accordance 7 | * with the terms and conditions stipulated in the agreement/contract 8 | * under which the software has been supplied. 9 | */ 10 | 11 | package com.ge.predix.analytics.customconnector.refimpl.postgresdb; 12 | 13 | import java.util.ArrayList; 14 | import java.util.Collection; 15 | import java.util.List; 16 | import java.util.Map; 17 | 18 | import org.apache.commons.lang3.StringUtils; 19 | 20 | import com.ge.predix.insight.dto.customdataprovider.OrchestrationExecutionContext; 21 | 22 | public class SQLBuilderForNormalizedSchema extends SQLBuilderBase implements SQLBuilder { 23 | 24 | public SQLBuilderForNormalizedSchema(OrchestrationExecutionContext orchestrationExecutionContext, Object queryCriteria, StatementType statementType, List dataValues) { 25 | this.orchestrationExecutionContext = orchestrationExecutionContext; 26 | this.queryCriteria = queryCriteria; 27 | this.statementType = statementType; 28 | this.dataValues = dataValues; 29 | } 30 | 31 | public SQLBuilderForNormalizedSchema(Object queryCriteria, StatementType statementType) { 32 | this.queryCriteria = queryCriteria; 33 | this.statementType = statementType; 34 | } 35 | 36 | protected String getMapData(Map eachCondition, String key) { 37 | return (String) eachCondition.get(key); 38 | } 39 | 40 | @Override public String[] buildInsertOrUpdateSql() { 41 | selectColumns = buildSelectColumns(); 42 | Collection conditions = buildConditions(); 43 | String table = (String) ((Map) queryCriteria).get("table"); 44 | if (conditions.isEmpty()) { 45 | List insertStatements = new ArrayList<>(); 46 | // insert 47 | selectColumns.add(0, "asset_id"); 48 | for (Object value : dataValues) { 49 | String insertStmt = ""; 50 | List listValue = (List) value; 51 | listValue.set(0, "to_timestamp(" + listValue.get(0) + "::double precision/1000)"); 52 | listValue.add(0, "'" + getAssetId() + "'"); 53 | insertStmt = insertStmt + " insert into " + table + "(" + StringUtils.join(selectColumns, ", ") + ")" + " values( " + StringUtils.join(listValue, ", ") + " )"; 54 | LOGGER.info("insert SQL: " + insertStmt); 55 | insertStatements.add(insertStmt); 56 | } 57 | return insertStatements.toArray(new String[0]); 58 | } else { 59 | // update 60 | List updateStatements = update(conditions, table); 61 | return updateStatements.toArray(new String[0]); 62 | } 63 | } 64 | 65 | } 66 | -------------------------------------------------------------------------------- /custom-data-connector/postgresdb-ref-impl-data-connector/src/main/java/com/ge/predix/analytics/security/CsrfSecurityRequestMatcher.java: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright (c) 2015 - 2016 General Electric Company. All rights reserved. 3 | * 4 | * The copyright to the computer software herein is the property of 5 | * General Electric Company. The software may be used and/or copied only 6 | * with the written permission of General Electric Company or in accordance 7 | * with the terms and conditions stipulated in the agreement/contract 8 | * under which the software has been supplied. 9 | */ 10 | 11 | package com.ge.predix.analytics.security; 12 | 13 | import java.util.regex.Pattern; 14 | import javax.servlet.http.HttpServletRequest; 15 | 16 | import org.springframework.security.web.util.matcher.RegexRequestMatcher; 17 | import org.springframework.security.web.util.matcher.RequestMatcher; 18 | 19 | public class CsrfSecurityRequestMatcher implements RequestMatcher { 20 | private Pattern allowedMethods = Pattern.compile("^(GET|HEAD|TRACE|OPTIONS)$"); 21 | private RegexRequestMatcher unprotectedMatcher = new RegexRequestMatcher("/api.*", null); 22 | 23 | @Override 24 | public boolean matches(HttpServletRequest request) { 25 | if (allowedMethods.matcher(request.getMethod()).matches()) { 26 | return false; 27 | } 28 | return !unprotectedMatcher.matches(request); 29 | } 30 | } -------------------------------------------------------------------------------- /custom-data-connector/postgresdb-ref-impl-data-connector/src/main/java/com/ge/predix/insight/dto/customdataprovider/AnalyticReadDataRequest.java: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright (c) 2015 - 2016 General Electric Company. All rights reserved. 3 | * 4 | * The copyright to the computer software herein is the property of 5 | * General Electric Company. The software may be used and/or copied only 6 | * with the written permission of General Electric Company or in accordance 7 | * with the terms and conditions stipulated in the agreement/contract 8 | * under which the software has been supplied. 9 | */ 10 | 11 | package com.ge.predix.insight.dto.customdataprovider; 12 | 13 | import javax.xml.bind.annotation.XmlAccessType; 14 | import javax.xml.bind.annotation.XmlAccessorType; 15 | import javax.xml.bind.annotation.XmlRootElement; 16 | import javax.xml.bind.annotation.XmlType; 17 | 18 | 19 | /** 20 | *

Java class for anonymous complex type. 21 | * 22 | *

The following schema fragment specifies the expected content contained within this class. 23 | * 24 | *

25 |  * <complexType>
26 |  *   <complexContent>
27 |  *     <extension base="{http://predix.ge.com/insight/dto/customdataprovider}DataRequest">
28 |  *       <sequence>
29 |  *       </sequence>
30 |  *     </extension>
31 |  *   </complexContent>
32 |  * </complexType>
33 |  * 
34 | * 35 | * 36 | */ 37 | @XmlAccessorType(XmlAccessType.FIELD) 38 | @XmlType(name = "") 39 | @XmlRootElement(name = "analyticReadDataRequest") 40 | public class AnalyticReadDataRequest 41 | extends DataRequest 42 | { 43 | 44 | 45 | } 46 | -------------------------------------------------------------------------------- /custom-data-connector/postgresdb-ref-impl-data-connector/src/main/java/com/ge/predix/insight/dto/customdataprovider/AnalyticReadDataResponse.java: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright (c) 2015 - 2016 General Electric Company. All rights reserved. 3 | * 4 | * The copyright to the computer software herein is the property of 5 | * General Electric Company. The software may be used and/or copied only 6 | * with the written permission of General Electric Company or in accordance 7 | * with the terms and conditions stipulated in the agreement/contract 8 | * under which the software has been supplied. 9 | */ 10 | 11 | package com.ge.predix.insight.dto.customdataprovider; 12 | 13 | import javax.xml.bind.annotation.XmlAccessType; 14 | import javax.xml.bind.annotation.XmlAccessorType; 15 | import javax.xml.bind.annotation.XmlRootElement; 16 | import javax.xml.bind.annotation.XmlType; 17 | 18 | 19 | /** 20 | *

Java class for anonymous complex type. 21 | * 22 | *

The following schema fragment specifies the expected content contained within this class. 23 | * 24 | *

25 |  * <complexType>
26 |  *   <complexContent>
27 |  *     <extension base="{http://predix.ge.com/insight/dto/customdataprovider}DataResponse">
28 |  *       <sequence>
29 |  *       </sequence>
30 |  *     </extension>
31 |  *   </complexContent>
32 |  * </complexType>
33 |  * 
34 | * 35 | * 36 | */ 37 | @XmlAccessorType(XmlAccessType.FIELD) 38 | @XmlType(name = "") 39 | @XmlRootElement(name = "analyticReadDataResponse") 40 | public class AnalyticReadDataResponse 41 | extends DataResponse 42 | { 43 | 44 | 45 | } 46 | -------------------------------------------------------------------------------- /custom-data-connector/postgresdb-ref-impl-data-connector/src/main/java/com/ge/predix/insight/dto/customdataprovider/AnalyticWriteDataRequest.java: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright (c) 2015 - 2016 General Electric Company. All rights reserved. 3 | * 4 | * The copyright to the computer software herein is the property of 5 | * General Electric Company. The software may be used and/or copied only 6 | * with the written permission of General Electric Company or in accordance 7 | * with the terms and conditions stipulated in the agreement/contract 8 | * under which the software has been supplied. 9 | */ 10 | 11 | package com.ge.predix.insight.dto.customdataprovider; 12 | 13 | import javax.xml.bind.annotation.XmlAccessType; 14 | import javax.xml.bind.annotation.XmlAccessorType; 15 | import javax.xml.bind.annotation.XmlRootElement; 16 | import javax.xml.bind.annotation.XmlType; 17 | 18 | 19 | /** 20 | *

Java class for anonymous complex type. 21 | * 22 | *

The following schema fragment specifies the expected content contained within this class. 23 | * 24 | *

25 |  * <complexType>
26 |  *   <complexContent>
27 |  *     <extension base="{http://predix.ge.com/insight/dto/customdataprovider}DataRequest">
28 |  *       <sequence>
29 |  *       </sequence>
30 |  *     </extension>
31 |  *   </complexContent>
32 |  * </complexType>
33 |  * 
34 | * 35 | * 36 | */ 37 | @XmlAccessorType(XmlAccessType.FIELD) 38 | @XmlType(name = "") 39 | @XmlRootElement(name = "analyticWriteDataRequest") 40 | public class AnalyticWriteDataRequest 41 | extends DataRequest 42 | { 43 | 44 | 45 | } 46 | -------------------------------------------------------------------------------- /custom-data-connector/postgresdb-ref-impl-data-connector/src/main/java/com/ge/predix/insight/dto/customdataprovider/AnalyticWriteDataResponse.java: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright (c) 2015 - 2016 General Electric Company. All rights reserved. 3 | * 4 | * The copyright to the computer software herein is the property of 5 | * General Electric Company. The software may be used and/or copied only 6 | * with the written permission of General Electric Company or in accordance 7 | * with the terms and conditions stipulated in the agreement/contract 8 | * under which the software has been supplied. 9 | */ 10 | 11 | package com.ge.predix.insight.dto.customdataprovider; 12 | 13 | import javax.xml.bind.annotation.XmlAccessType; 14 | import javax.xml.bind.annotation.XmlAccessorType; 15 | import javax.xml.bind.annotation.XmlRootElement; 16 | import javax.xml.bind.annotation.XmlType; 17 | 18 | 19 | /** 20 | *

Java class for anonymous complex type. 21 | * 22 | *

The following schema fragment specifies the expected content contained within this class. 23 | * 24 | *

25 |  * <complexType>
26 |  *   <complexContent>
27 |  *     <extension base="{http://predix.ge.com/insight/dto/customdataprovider}DataResponse">
28 |  *       <sequence>
29 |  *       </sequence>
30 |  *     </extension>
31 |  *   </complexContent>
32 |  * </complexType>
33 |  * 
34 | * 35 | * 36 | */ 37 | @XmlAccessorType(XmlAccessType.FIELD) 38 | @XmlType(name = "") 39 | @XmlRootElement(name = "analyticWriteDataResponse") 40 | public class AnalyticWriteDataResponse 41 | extends DataResponse 42 | { 43 | 44 | 45 | } 46 | -------------------------------------------------------------------------------- /custom-data-connector/postgresdb-ref-impl-data-connector/src/main/java/com/ge/predix/insight/dto/customdataprovider/ObjectFactory.java: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright (c) 2015 - 2016 General Electric Company. All rights reserved. 3 | * 4 | * The copyright to the computer software herein is the property of 5 | * General Electric Company. The software may be used and/or copied only 6 | * with the written permission of General Electric Company or in accordance 7 | * with the terms and conditions stipulated in the agreement/contract 8 | * under which the software has been supplied. 9 | */ 10 | 11 | package com.ge.predix.insight.dto.customdataprovider; 12 | 13 | import javax.xml.bind.annotation.XmlRegistry; 14 | 15 | 16 | /** 17 | * This object contains factory methods for each 18 | * Java content interface and Java element interface 19 | * generated in the com.ge.predix.insight.dto.customdataprovider package. 20 | *

An ObjectFactory allows you to programatically 21 | * construct new instances of the Java representation 22 | * for XML content. The Java representation of XML 23 | * content can consist of schema derived interfaces 24 | * and classes representing the binding of schema 25 | * type definitions, element declarations and model 26 | * groups. Factory methods for each of these are 27 | * provided in this class. 28 | * 29 | */ 30 | @XmlRegistry 31 | public class ObjectFactory { 32 | 33 | 34 | /** 35 | * Create a new ObjectFactory that can be used to create new instances of schema derived classes for package: com.ge.predix.insight.dto.customdataprovider 36 | * 37 | */ 38 | public ObjectFactory() { 39 | } 40 | 41 | /** 42 | * Create an instance of {@link AnalyticReadDataRequest } 43 | * 44 | */ 45 | public AnalyticReadDataRequest createAnalyticReadDataRequest() { 46 | return new AnalyticReadDataRequest(); 47 | } 48 | 49 | /** 50 | * Create an instance of {@link DataRequest } 51 | * 52 | */ 53 | public DataRequest createDataRequest() { 54 | return new DataRequest(); 55 | } 56 | 57 | /** 58 | * Create an instance of {@link Field } 59 | * 60 | */ 61 | public Field createField() { 62 | return new Field(); 63 | } 64 | 65 | /** 66 | * Create an instance of {@link OrchestrationExecutionContext } 67 | * 68 | */ 69 | public OrchestrationExecutionContext createOrchestrationExecutionContext() { 70 | return new OrchestrationExecutionContext(); 71 | } 72 | 73 | /** 74 | * Create an instance of {@link AnalyticReadDataResponse } 75 | * 76 | */ 77 | public AnalyticReadDataResponse createAnalyticReadDataResponse() { 78 | return new AnalyticReadDataResponse(); 79 | } 80 | 81 | /** 82 | * Create an instance of {@link DataResponse } 83 | * 84 | */ 85 | public DataResponse createDataResponse() { 86 | return new DataResponse(); 87 | } 88 | 89 | /** 90 | * Create an instance of {@link AnalyticWriteDataRequest } 91 | * 92 | */ 93 | public AnalyticWriteDataRequest createAnalyticWriteDataRequest() { 94 | return new AnalyticWriteDataRequest(); 95 | } 96 | 97 | /** 98 | * Create an instance of {@link AnalyticWriteDataResponse } 99 | * 100 | */ 101 | public AnalyticWriteDataResponse createAnalyticWriteDataResponse() { 102 | return new AnalyticWriteDataResponse(); 103 | } 104 | 105 | } 106 | -------------------------------------------------------------------------------- /custom-data-connector/postgresdb-ref-impl-data-connector/src/main/java/com/ge/predix/insight/dto/customdataprovider/package-info.java: -------------------------------------------------------------------------------- 1 | @javax.xml.bind.annotation.XmlSchema(namespace = "http://predix.ge.com/insight/dto/customdataprovider") 2 | package com.ge.predix.insight.dto.customdataprovider; 3 | -------------------------------------------------------------------------------- /custom-data-connector/postgresdb-ref-impl-data-connector/src/main/java/com/ge/predix/insight/dto/errorresponse/ObjectFactory.java: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright (c) 2015 - 2016 General Electric Company. All rights reserved. 3 | * 4 | * The copyright to the computer software herein is the property of 5 | * General Electric Company. The software may be used and/or copied only 6 | * with the written permission of General Electric Company or in accordance 7 | * with the terms and conditions stipulated in the agreement/contract 8 | * under which the software has been supplied. 9 | */ 10 | 11 | package com.ge.predix.insight.dto.errorresponse; 12 | 13 | import javax.xml.bind.annotation.XmlRegistry; 14 | 15 | 16 | /** 17 | * This object contains factory methods for each 18 | * Java content interface and Java element interface 19 | * generated in the com.ge.predix.insight.dto.errorresponse package. 20 | *

An ObjectFactory allows you to programatically 21 | * construct new instances of the Java representation 22 | * for XML content. The Java representation of XML 23 | * content can consist of schema derived interfaces 24 | * and classes representing the binding of schema 25 | * type definitions, element declarations and model 26 | * groups. Factory methods for each of these are 27 | * provided in this class. 28 | * 29 | */ 30 | @XmlRegistry 31 | public class ObjectFactory { 32 | 33 | 34 | /** 35 | * Create a new ObjectFactory that can be used to create new instances of schema derived classes for package: com.ge.predix.insight.dto.errorresponse 36 | * 37 | */ 38 | public ObjectFactory() { 39 | } 40 | 41 | /** 42 | * Create an instance of {@link ErrorResponse } 43 | * 44 | */ 45 | public ErrorResponse createErrorResponse() { 46 | return new ErrorResponse(); 47 | } 48 | 49 | } 50 | -------------------------------------------------------------------------------- /custom-data-connector/postgresdb-ref-impl-data-connector/src/main/java/com/ge/predix/insight/dto/errorresponse/package-info.java: -------------------------------------------------------------------------------- 1 | @javax.xml.bind.annotation.XmlSchema(namespace = "http://predix.ge.com/insight/dto/errorresponse") 2 | package com.ge.predix.insight.dto.errorresponse; 3 | -------------------------------------------------------------------------------- /custom-data-connector/postgresdb-ref-impl-data-connector/src/main/resources/META-INF/authentication-context.xml: -------------------------------------------------------------------------------- 1 | 10 | 11 | 20 | 21 | 22 | 23 | 24 | 25 | 26 | 27 | 28 | 29 | 30 | classpath:custom-connector-application.properties 31 | 32 | 33 | 34 | 35 | 36 | 37 | 38 | 39 | 40 | 41 | 42 | 43 | 44 | 45 | 46 | 47 | 52 | 53 | 54 | 55 | 56 | 57 | 58 | 59 | 60 | 61 | 62 | -------------------------------------------------------------------------------- /custom-data-connector/postgresdb-ref-impl-data-connector/src/main/resources/custom-connector-application.properties: -------------------------------------------------------------------------------- 1 | # 2 | # Copyright (c) 2015 - 2016 General Electric Company. All rights reserved. 3 | # 4 | # The copyright to the computer software herein is the property of 5 | # General Electric Company. The software may be used and/or copied only 6 | # with the written permission of General Electric Company or in accordance 7 | # with the terms and conditions stipulated in the agreement/contract 8 | # under which the software has been supplied. 9 | # 10 | 11 | 12 | server.port=18888 13 | 14 | cxf.path=/ 15 | 16 | # PostgresSQL Driver - for local PostgresSQL DB instance 17 | turbine.connector.db.driver=org.postgresql.Driver 18 | turbine.connector.db.url=jdbc:postgresql://localhost:5432/analytics?searchpath=turbine 19 | turbine.connector.db.username=turbine 20 | turbine.connector.db.password=turbine123 21 | -------------------------------------------------------------------------------- /custom-data-connector/postgresdb-ref-impl-data-connector/src/main/resources/db/migration/schema/V0_1_0__turbine_db_schema.sql: -------------------------------------------------------------------------------- 1 | -- Copyright (c) 2015 - 2016 General Electric Company. All rights reserved. 2 | 3 | -- The copyright to the computer software herein is the property of 4 | -- General Electric Company. The software may be used and/or copied only 5 | -- with the written permission of General Electric Company or in accordance 6 | -- with the terms and conditions stipulated in the agreement/contract 7 | -- under which the software has been supplied. 8 | 9 | 10 | -- **************************************************** 11 | -- DDL FOR TABLE ANALYTIC CATALOG 12 | -- **************************************************** 13 | 14 | 15 | 16 | CREATE TABLE TURBINE_COMPRESSOR ( 17 | ID SERIAL NOT NULL, 18 | ASSET_ID varchar(36) NOT NULL, 19 | EXHAUST_GAS_TEMP real NULL, 20 | VIBRATION real NULL, 21 | RECORDED_AT timestamp NULL 22 | ); 23 | 24 | 25 | -------------------------------------------------------- 26 | -- Constraints for Table ANALYTIC CATALOG 27 | -------------------------------------------------------- 28 | ALTER TABLE TURBINE_COMPRESSOR ADD CONSTRAINT PK_TURBINE_COMPRESSOR PRIMARY KEY ( ID ); 29 | 30 | -------------------------------------------------------------------------------- /custom-data-connector/postgresdb-ref-impl-data-connector/src/main/resources/db/migration/schema/V0_3_0__generic_sensor_schema.sql: -------------------------------------------------------------------------------- 1 | -- Copyright (c) 2015 - 2016 General Electric Company. All rights reserved. 2 | 3 | -- The copyright to the computer software herein is the property of 4 | -- General Electric Company. The software may be used and/or copied only 5 | -- with the written permission of General Electric Company or in accordance 6 | -- with the terms and conditions stipulated in the agreement/contract 7 | -- under which the software has been supplied. 8 | 9 | -- **************************************************** 10 | -- DDL FOR TABLE ANALYTIC CATALOG 11 | -- **************************************************** 12 | 13 | CREATE TABLE Sensor_Data ( 14 | ID SERIAL NOT NULL, 15 | asset_id varchar(250) NOT NULL, 16 | field_id varchar(250) NULL, 17 | data_value real NULL, 18 | recorded_at timestamp default CURRENT_TIMESTAMP, 19 | updated_at timestamp default CURRENT_TIMESTAMP 20 | ); 21 | 22 | -------------------------------------------------------- 23 | -- Constraints for Table ANALYTIC CATALOG 24 | -------------------------------------------------------- 25 | ALTER TABLE Sensor_Data ADD CONSTRAINT PK_Sensor_Data PRIMARY KEY ( ID ); -------------------------------------------------------------------------------- /custom-data-connector/postgresdb-ref-impl-data-connector/src/main/resources/db/migration/schema/V0_4_0__generic_sensor_sample_data.sql: -------------------------------------------------------------------------------- 1 | -- Copyright (c) 2015 - 2016 General Electric Company. All rights reserved. 2 | 3 | -- The copyright to the computer software herein is the property of 4 | -- General Electric Company. The software may be used and/or copied only 5 | -- with the written permission of General Electric Company or in accordance 6 | -- with the terms and conditions stipulated in the agreement/contract 7 | -- under which the software has been supplied. 8 | 9 | INSERT INTO SENSOR_DATA ( asset_id, field_id, data_value) VALUES ('ASSET1', 'vibration', 200.42); 10 | INSERT INTO SENSOR_DATA ( asset_id, field_id, data_value) VALUES ('ASSET1', 'vibration', 201.42); 11 | INSERT INTO SENSOR_DATA ( asset_id, field_id, data_value) VALUES ( 'ASSET1', 'vibration', 202.42); 12 | INSERT INTO SENSOR_DATA ( asset_id, field_id, data_value) VALUES ('ASSET1', 'vibration', 203.42); 13 | INSERT INTO SENSOR_DATA ( asset_id, field_id, data_value) VALUES ('ASSET1', 'KW', 300.42); 14 | INSERT INTO SENSOR_DATA ( asset_id, field_id, data_value) VALUES ( 'ASSET1', 'KW', 301.42); 15 | INSERT INTO SENSOR_DATA ( asset_id, field_id, data_value) VALUES ( 'ASSET1', 'KW', 302.42); 16 | INSERT INTO SENSOR_DATA ( asset_id, field_id, data_value) VALUES ('ASSET1', 'KW', 303.42); 17 | -------------------------------------------------------------------------------- /custom-data-connector/postgresdb-ref-impl-data-connector/src/main/resources/logback.xml: -------------------------------------------------------------------------------- 1 | 2 | 11 | 12 | 13 | 14 | 15 | UTF-8 16 | %d %-4relative [%thread] %-5level %logger{35} - %msg%n 17 | 18 | 19 | 20 | 21 | custom-data-connector.log 22 | 23 | 24 | custom-data-connector-%i.log 25 | 1 26 | 3 27 | 28 | 29 | 30 | 1MB 31 | 32 | 33 | 34 | UTF-8 35 | %d %-4relative [%thread] %-5level %logger{35} - %msg%n 36 | 37 | 38 | 39 | 40 | 41 | 42 | 43 | 44 | -------------------------------------------------------------------------------- /custom-data-connector/postgresdb-ref-impl-data-connector/src/test/resources/analytic-read-data-request.json: -------------------------------------------------------------------------------- 1 | { 2 | "field": [ 3 | { 4 | "fieldId": "vibration", 5 | "fullyQualifiedPortName": "a.b.c.somefield", 6 | "queryCriteria": { 7 | "columns": [ 8 | "recorded_at", 9 | "vibration" 10 | ], 11 | "table": "turbine_compressor", 12 | "conditions": [ 13 | { 14 | "key": "asset_id", 15 | "value": "ASSET1", 16 | "valueType": "string", 17 | "relation": " = " 18 | }, 19 | { 20 | "key": "recorded_at", 21 | "value": "current_timestamp", 22 | "valueType": "none", 23 | "relation": " < " 24 | } 25 | ] 26 | } 27 | }, 28 | { 29 | "fieldId": "exhaust_gas_temp", 30 | "fullyQualifiedPortName": "a.b.c.somefield", 31 | "queryCriteria": { 32 | "columns": [ 33 | "recorded_at", 34 | "exhaust_gas_temp" 35 | ], 36 | "table": "turbine_compressor", 37 | "conditions": [ 38 | { 39 | "key": "asset_id", 40 | "value": "ASSET1", 41 | "valueType": "string", 42 | "relation": " = " 43 | }, 44 | { 45 | "key": "recorded_at", 46 | "value": "current_timestamp", 47 | "valueType": "none", 48 | "relation": " < " 49 | } 50 | ] 51 | } 52 | } 53 | ], 54 | "customAttributes": { 55 | "key2": "value2", 56 | "key1": "value1" 57 | }, 58 | "systemAttributes": { 59 | "syskey2": "sysvalue2", 60 | "syskey1": "sysvalue1" 61 | }, 62 | "orchestrationExecutionContext": { 63 | "assetId": "/assets/1", 64 | "orchestrationConfigurationID": "febf24f8-182f-4569-b195-75e4d59127bf", 65 | "orchestrationExecutionRequestID": "cddb7e23-0bc3-445c-b7f9-e14bf65d499c", 66 | "analyticId": "6c1ee0be-1691-46f8-b0ce-b9120fb1e316", 67 | "analyticName": "Trend Anomaly Detector", 68 | "analyticVersion": "1.0", 69 | "analyticExecutionRequestID": "ae6ceb06-b922-4a8e-97a9-4c7d2f7a313d" 70 | }, 71 | "dataSourceId": "postgresdb" 72 | } -------------------------------------------------------------------------------- /custom-data-connector/postgresdb-ref-impl-data-connector/src/test/resources/custom-connector-application.properties: -------------------------------------------------------------------------------- 1 | # 2 | # Copyright (c) 2015 - 2016 General Electric Company. All rights reserved. 3 | # 4 | # The copyright to the computer software herein is the property of 5 | # General Electric Company. The software may be used and/or copied only 6 | # with the written permission of General Electric Company or in accordance 7 | # with the terms and conditions stipulated in the agreement/contract 8 | # under which the software has been supplied. 9 | # 10 | 11 | server.port=${ref_postgres_data_connector_port} 12 | 13 | cxf.path=/ 14 | 15 | # H2 Driver - For component tests 16 | turbine.connector.db.driver=org.h2.Driver 17 | turbine.connector.db.url=jdbc:h2:mem:config;MODE=PostgreSQL;DB_CLOSE_ON_EXIT=FALSE;INIT=CREATE SCHEMA IF NOT EXISTS "public" 18 | turbine.connector.db.username=sa 19 | turbine.connector.db.password= 20 | -------------------------------------------------------------------------------- /orchestrations/OrchestrationWithOneAnalytic.xml: -------------------------------------------------------------------------------- 1 | 2 | 8 | 9 | 10 | 11 | 13 | sid-flow1 14 | 15 | 16 | 23 | sid-flow1 24 | sid-flow2 25 | 26 | 27 | 28 | sid-flow2 29 | 30 | 31 | 32 | 35 | 36 | 38 | 39 | 40 | 41 | -------------------------------------------------------------------------------- /orchestrations/OrchestrationWithThirdPartyAnalytic.xml: -------------------------------------------------------------------------------- 1 | 2 | 8 | 9 | 10 | 11 | 13 | sid-flow1 14 | 15 | 16 | 22 | sid-flow1 23 | sid-flow2 24 | 25 | 26 | 27 | sid-flow2 28 | 29 | 30 | 31 | 34 | 35 | 37 | 38 | 39 | 40 | -------------------------------------------------------------------------------- /orchestrations/OrchestrationWithTwoAnalytics.xml: -------------------------------------------------------------------------------- 1 | 2 | 10 | 11 | 12 | 13 | 15 | sid-flow1 16 | 17 | 18 | 25 | sid-flow1 26 | sid-flow2 27 | 28 | 29 | 36 | sid-flow2 37 | sid-flow3 38 | 39 | 40 | 41 | sid-flow3 42 | 43 | 44 | 45 | 48 | 49 | 51 | 52 | 54 | 55 | 56 | 57 | -------------------------------------------------------------------------------- /orchestrations/README.md: -------------------------------------------------------------------------------- 1 | ## Sample Orchestration Workflow Files 2 | 3 | A collection of simple orchestration workflow files for use with Predix Analytics. 4 | 5 | 1. **[Orchestration with one Catalog analytic](OrchestrationWithOneAnalytic.xml)** 6 | 2. **[Orchestration with one third-party analytic](OrchestrationWithThirdPartyAnalytic.xml)** 7 | 3. **[Orchestration with one Catalog analytic and one third-party analytic](OrchestrationWithTwoAnalytics.xml)** 8 | 9 | A Catalog analytic is one that has been uploaded to the Analytic Catalog and deployed into Cloud Foundry. 10 | 11 | ## Complete Sample Orchestrations 12 | 13 | Sample orchestrations that include all supporting files, such as orchestration workflow (BPMN), port-to-field maps, input/output data, and Postman collections 14 | 15 | 1. **[One step orchestration](oneStepOrchestration):** An orchestration using a single timeseries demo adder to add 2 timeseries arrays 16 | 2. **[Multi step orchestration with Demo Adder](demoAdderMultiStepOrchestration):** An orchestration that adds 3 timeseries arrays by running the timeseries demo adder twice 17 | 3. **[Multi step orchestration with Miner's Rule Analytic](multiStepOrchestration):** An orchestration that runs a Miner's rule operation on pre-processed output from a timeseries demo adder analytic 18 | 19 | 20 | You can find more information on [configuring](https://docs.predix.io/en-US/content/service/analytics_services/analytics_framework/orchestration-configuration) and [running](https://docs.predix.io/en-US/content/service/analytics_services/analytics_framework/orchestration-execution) orchestrations on Predix IO. -------------------------------------------------------------------------------- /orchestrations/demoAdderMultiStepOrchestration/InitialData/rawTimeSeriesData_KW.json: -------------------------------------------------------------------------------- 1 | { 2 | "messageId": "SampleData1", 3 | "body": [ 4 | { 5 | "name": "KW", 6 | "datapoints": [ 7 | [ 8 | "1455733669601", 9 | 1.0 10 | ], 11 | [ 12 | "1455733669602", 13 | 2.0 14 | ], 15 | [ 16 | "1455733669603", 17 | 3.0 18 | ], 19 | [ 20 | "1455733669604", 21 | 4.0 22 | ], 23 | [ 24 | "1455733669605", 25 | 5.0 26 | ], 27 | [ 28 | "1455733669606", 29 | 6.0 30 | ], 31 | [ 32 | "1455733669607", 33 | 7.0 34 | ], 35 | [ 36 | "1455733669608", 37 | 8.0 38 | ], 39 | [ 40 | "1455733669609", 41 | 9.0 42 | ], 43 | [ 44 | "1455733669610", 45 | 10.0 46 | ] 47 | ], 48 | "attributes": { 49 | "source1": "analytics", 50 | "source2": "test" 51 | } 52 | } 53 | ] 54 | } -------------------------------------------------------------------------------- /orchestrations/demoAdderMultiStepOrchestration/InitialData/rawTimeSeriesData_vibration.json: -------------------------------------------------------------------------------- 1 | { 2 | "messageId": "SampleData2", 3 | "body": [ 4 | { 5 | "name": "vibration", 6 | "datapoints": [ 7 | [ 8 | "1455733669601", 9 | 100.0 10 | ], 11 | [ 12 | "1455733669602", 13 | 200.0 14 | ], 15 | [ 16 | "1455733669603", 17 | 300.0 18 | ], 19 | [ 20 | "1455733669604", 21 | 400.0 22 | ], 23 | [ 24 | "1455733669605", 25 | 500.0 26 | ], 27 | [ 28 | "1455733669606", 29 | 600.0 30 | ], 31 | [ 32 | "1455733669607", 33 | 700.0 34 | ], 35 | [ 36 | "1455733669608", 37 | 800.0 38 | ], 39 | [ 40 | "1455733669609", 41 | 900.0 42 | ], 43 | [ 44 | "1455733669610", 45 | 1000.0 46 | ] 47 | ], 48 | "attributes": { 49 | "source1": "analytics", 50 | "source2": "test" 51 | } 52 | } 53 | ] 54 | } 55 | 56 | -------------------------------------------------------------------------------- /orchestrations/demoAdderMultiStepOrchestration/Orchestration/TwoStepOrchestration.bpmn20.xml: -------------------------------------------------------------------------------- 1 | 2 | 8 | 9 | 10 | 11 | 13 | sid-flow1 14 | 15 | 16 | 20 | sid-flow1 21 | sid-flow2 22 | 23 | 24 | 28 | sid-flow2 29 | sid-flow3 30 | 31 | 32 | 33 | 34 | sid-flow3 35 | 36 | 37 | 38 | 41 | 42 | 44 | 45 | 47 | 48 | 49 | 50 | 51 | -------------------------------------------------------------------------------- /orchestrations/demoAdderMultiStepOrchestration/Orchestration/step1-portToFieldMap.json: -------------------------------------------------------------------------------- 1 | { 2 | "analyticName": "java-timeseries-demo-adder", 3 | "analyticVersion": "1.0", 4 | "orchestrationStepId": "sid-10001", 5 | "iterations": [ 6 | { 7 | "inputMaps": [ 8 | { 9 | "valueSourceType": "DATA_CONNECTOR", 10 | "fullyQualifiedPortName": "data.time_series.numberArray1", 11 | "fieldId": "KW", 12 | "queryCriteria": { 13 | "start": 1455733669601, 14 | "end": 1455733669610 15 | }, 16 | "engUnit": "kw", 17 | "required": true, 18 | "dataSourceId": "Predix Time Series" 19 | }, 20 | { 21 | "valueSourceType": "DATA_CONNECTOR", 22 | "fullyQualifiedPortName": "data.time_series.numberArray2", 23 | "fieldId": "vibration", 24 | "queryCriteria": { 25 | "start": 1455733669601, 26 | "end": 1455733669610 27 | }, 28 | "engUnit": "hertz", 29 | "required": true, 30 | "dataSourceId": "Predix Time Series" 31 | } 32 | ], 33 | "outputMaps": [ 34 | { 35 | "fullyQualifiedPortName": "data.time_series.sum", 36 | "fieldId": "bearing_temperature", 37 | "engUnit": "Celsius", 38 | "dataSourceId": "Temporary, Predix Time Series" 39 | } 40 | ] 41 | } 42 | ] 43 | } 44 | -------------------------------------------------------------------------------- /orchestrations/demoAdderMultiStepOrchestration/Orchestration/step2-portToFieldMap.json: -------------------------------------------------------------------------------- 1 | { 2 | "analyticName": "java-timeseries-demo-adder", 3 | "analyticVersion": "1.0", 4 | "orchestrationStepId": "sid-10002", 5 | "iterations": [ 6 | { 7 | "inputMaps": [ 8 | { 9 | "valueSourceType": "DATA_CONNECTOR", 10 | "fullyQualifiedPortName": "data.time_series.numberArray1", 11 | "fieldId": "bearing_temperature", 12 | "queryCriteria": { 13 | "start": 1455733669601, 14 | "end": 1455733669610 15 | }, 16 | "engUnit": "Celsius", 17 | "required": true, 18 | "dataSourceId": "Predix Time Series" 19 | }, 20 | { 21 | "valueSourceType": "DATA_CONNECTOR", 22 | "fullyQualifiedPortName": "data.time_series.numberArray2", 23 | "fieldId": "vibration", 24 | "queryCriteria": { 25 | "start": 1455733669601, 26 | "end": 1455733669610 27 | }, 28 | "engUnit": "hertz", 29 | "required": true, 30 | "dataSourceId": "Predix Time Series" 31 | } 32 | ], 33 | "inputModelMaps": [ 34 | { 35 | "modelPortName": "threshold", 36 | "modelName": "model-name", 37 | "modelVersion": "v1" 38 | } 39 | ], 40 | "outputMaps": [ 41 | { 42 | "fullyQualifiedPortName": "data.time_series.sum", 43 | "fieldId": "bearing_temperature_final", 44 | "engUnit": "Celsius", 45 | "dataSourceId": "Predix Time Series" 46 | } 47 | ] 48 | } 49 | ] 50 | } 51 | -------------------------------------------------------------------------------- /orchestrations/demoAdderMultiStepOrchestration/Orchestration/thresholdModel.json: -------------------------------------------------------------------------------- 1 | 500 -------------------------------------------------------------------------------- /orchestrations/multiStepOrchestration/InitialData/README.md: -------------------------------------------------------------------------------- 1 | # Initial Data and Queries for the 2 Step Orchestration 2 | 3 | Note: The data in these files has been created for demonstration purposes. It is not from real assets. 4 | # File list 5 | | File | Description | 6 | | ---- | ----------- | 7 | |asset32-bearing-temperature-timeseries-data.json|The initial timeseries values for asset 32's bearing temperature sensor| 8 | |asset32-kw-timeseries-data.json|The initial timeseries values for asset 32's kw sensor| 9 | |asset32-vibration-timeseries-data.json|The initial timeseries values for asset 32's vibration sensor| 10 | |asset32-vibration1-timeseries-data.json|The initial timeseries values for asset 32's backup vibration sensor| 11 | |asset37-bearing-temperature-timeseries-data.json|The initial timeseries values for asset 37's bearing temperature sensor| 12 | |asset37-kw-timeseries-data.json|The initial timeseries values for asset 37's kw sensor| 13 | |asset37-vibration-timeseries-data.json|The initial timeseries values for asset 37's vibration sensor| 14 | |asset37-vibration1-timeseries-data.json|The initial timeseries values for asset 37's backup vibration sensor| 15 | |asset38-bearing-temperature-timeseries-data.json|The initial timeseries values for asset 38's bearing temperature sensor| 16 | |asset38-kw-timeseries-data.json|The initial timeseries values for asset 38's kw sensor| 17 | |asset38-vibration-timeseries-data.json|The initial timeseries values for asset 38's vibration sensor| 18 | |asset38-vibration1-timeseries-data.json|The initial timeseries values for asset 38's backup vibration sensor| 19 | |asset32-CDM-timeseries-data.json|The initial timeseries values for asset 32's CDM 20 | |asset37-CDM-timeseries-data.json|The initial timeseries values for asset 37's CDM 21 | |asset38-CDM-timeseries-data.json|The initial timeseries values for asset 38's CDM 22 | |asset-model.json|The data model loaded into Predix Asset to support the demonstration orchestration 23 | |assetGroupQuery.txt|The asset group query that will be used on the orchestration run request| 24 | |tagMapQuery.txt|The tag map query that will be loaded as the default tag map query| 25 | -------------------------------------------------------------------------------- /orchestrations/multiStepOrchestration/InitialData/asset32-CDM-timeseries-data.json: -------------------------------------------------------------------------------- 1 | { 2 | "messageId": "1453338376222", 3 | "body": [ 4 | { 5 | "name": "32_CDM_44-UUID", 6 | "datapoints": [ 7 | [1453338376200,1.0,3], 8 | [1453338376201,2.0,3], 9 | [1453338376202,1.3,3], 10 | [1453338376203,1.2,3], 11 | [1453338376204,0.0,3] 12 | ], 13 | "attributes":{ 14 | "source1":"demo" 15 | } 16 | } 17 | ] 18 | } -------------------------------------------------------------------------------- /orchestrations/multiStepOrchestration/InitialData/asset32-bearing-temperature-timeseries-data.json: -------------------------------------------------------------------------------- 1 | { 2 | "messageId": "1453338376222", 3 | "body": [ 4 | { 5 | "name": "32_bearing_temperature_36-UUID", 6 | "datapoints": [ 7 | [1453338376200,201.0,3], 8 | [1453338376201,202.0,3], 9 | [1453338376202,201.3,3], 10 | [1453338376203,201.2,3], 11 | [1453338376204,200.0,3] 12 | ], 13 | "attributes":{ 14 | "source1":"demo" 15 | } 16 | } 17 | ] 18 | } -------------------------------------------------------------------------------- /orchestrations/multiStepOrchestration/InitialData/asset32-kw-timeseries-data.json: -------------------------------------------------------------------------------- 1 | { 2 | "messageId": "1453338376222", 3 | "body": [ 4 | { 5 | "name": "32_KW_33-UUID", 6 | "datapoints": [ 7 | [1453338376200,1.0,3], 8 | [1453338376201,2.0,3], 9 | [1453338376202,1.8,3], 10 | [1453338376203,3.0,3], 11 | [1453338376204,1.6,3] 12 | ], 13 | "attributes":{ 14 | "source1":"demo" 15 | } 16 | } 17 | ] 18 | } -------------------------------------------------------------------------------- /orchestrations/multiStepOrchestration/InitialData/asset32-vibration-timeseries-data.json: -------------------------------------------------------------------------------- 1 | { 2 | "messageId": "1453338376222", 3 | "body": [ 4 | { 5 | "name": "32_vibration_34-UUID", 6 | "datapoints": [ 7 | [1453338376200,1.0,3], 8 | [1453338376201,2.0,3], 9 | [1453338376202,1.3,3], 10 | [1453338376203,1.2,3], 11 | [1453338376204,0.0,3] 12 | ], 13 | "attributes":{ 14 | "source1":"demo" 15 | } 16 | } 17 | ] 18 | } -------------------------------------------------------------------------------- /orchestrations/multiStepOrchestration/InitialData/asset32-vibration1-timeseries-data.json: -------------------------------------------------------------------------------- 1 | { 2 | "messageId": "1453338376222", 3 | "body": [ 4 | { 5 | "name": "32_vibration1_42-UUID", 6 | "datapoints": [ 7 | [1453338376200,1.0,3], 8 | [1453338376201,2.0,3], 9 | [1453338376202,1.3,3], 10 | [1453338376203,1.2,3], 11 | [1453338376204,0.0,3] 12 | ], 13 | "attributes":{ 14 | "source1":"demo" 15 | } 16 | } 17 | ] 18 | } -------------------------------------------------------------------------------- /orchestrations/multiStepOrchestration/InitialData/asset37-CDM-timeseries-data.json: -------------------------------------------------------------------------------- 1 | { 2 | "messageId": "1453338376222", 3 | "body": [ 4 | { 5 | "name": "37_CDM_45-UUID", 6 | "datapoints": [ 7 | [1453338376200,1.0,3], 8 | [1453338376201,2.0,3], 9 | [1453338376202,1.3,3], 10 | [1453338376203,1.2,3], 11 | [1453338376204,0.0,3] 12 | ], 13 | "attributes":{ 14 | "source1":"demo" 15 | } 16 | } 17 | ] 18 | } -------------------------------------------------------------------------------- /orchestrations/multiStepOrchestration/InitialData/asset37-bearing-temperature-timeseries-data.json: -------------------------------------------------------------------------------- 1 | { 2 | "messageId": "1453338376222", 3 | "body": [ 4 | { 5 | "name": "37_bearing_temperature_41-UUID", 6 | "datapoints": [ 7 | [1453338376200,1.0,3], 8 | [1453338376201,2.0,3], 9 | [1453338376202,1.3,3], 10 | [1453338376203,1.2,3], 11 | [1453338376204,0.0,3] 12 | ], 13 | "attributes":{ 14 | "source1":"demo" 15 | } 16 | } 17 | ] 18 | } -------------------------------------------------------------------------------- /orchestrations/multiStepOrchestration/InitialData/asset37-kw-timeseries-data.json: -------------------------------------------------------------------------------- 1 | { 2 | "messageId": "1453338376222", 3 | "body": [ 4 | { 5 | "name": "37_KW_57-UUID", 6 | "datapoints": [ 7 | [1453338376200,1.0,3], 8 | [1453338376201,2.0,3], 9 | [1453338376202,1.3,3], 10 | [1453338376203,1.2,3], 11 | [1453338376204,0.0,3] 12 | ], 13 | "attributes":{ 14 | "source1":"demo" 15 | } 16 | } 17 | ] 18 | } -------------------------------------------------------------------------------- /orchestrations/multiStepOrchestration/InitialData/asset37-vibration-timeseries-data.json: -------------------------------------------------------------------------------- 1 | { 2 | "messageId": "1453338376222", 3 | "body": [ 4 | { 5 | "name": "37_vibration_39-UUID", 6 | "datapoints": [ 7 | [1453338376200,1.0,3], 8 | [1453338376201,2.0,3], 9 | [1453338376202,1.3,3], 10 | [1453338376203,1.2,3], 11 | [1453338376204,0.0,3] 12 | ], 13 | "attributes":{ 14 | "source1":"demo" 15 | } 16 | } 17 | ] 18 | } -------------------------------------------------------------------------------- /orchestrations/multiStepOrchestration/InitialData/asset37-vibration1-timeseries-data.json: -------------------------------------------------------------------------------- 1 | { 2 | "messageId": "1453338376222", 3 | "body": [ 4 | { 5 | "name": "37_vibration1_43-UUID", 6 | "datapoints": [ 7 | [1453338376200,1.0,3], 8 | [1453338376201,2.0,3], 9 | [1453338376202,1.3,3], 10 | [1453338376203,1.2,3], 11 | [1453338376204,0.0,3] 12 | ], 13 | "attributes":{ 14 | "source1":"demo" 15 | } 16 | } 17 | ] 18 | } -------------------------------------------------------------------------------- /orchestrations/multiStepOrchestration/InitialData/asset38-CDM-timeseries-data.json: -------------------------------------------------------------------------------- 1 | { 2 | "messageId": "1453338376222", 3 | "body": [ 4 | { 5 | "name": "38_CDM_54-UUID", 6 | "datapoints": [ 7 | [1453338376200,1.0,3], 8 | [1453338376201,2.0,3], 9 | [1453338376202,1.3,3], 10 | [1453338376203,1.2,3], 11 | [1453338376204,0.0,3] 12 | ], 13 | "attributes":{ 14 | "source1":"demo" 15 | } 16 | } 17 | ] 18 | } -------------------------------------------------------------------------------- /orchestrations/multiStepOrchestration/InitialData/asset38-bearing-temperature-timeseries-data.json: -------------------------------------------------------------------------------- 1 | { 2 | "messageId": "1453338376222", 3 | "body": [ 4 | { 5 | "name": "38_bearing_temperature_53-UUID", 6 | "datapoints": [ 7 | [1453338376200,1.0,3], 8 | [1453338376201,2.0,3], 9 | [1453338376202,1.3,3], 10 | [1453338376203,1.2,3], 11 | [1453338376204,0.0,3] 12 | ], 13 | "attributes":{ 14 | "source1":"demo" 15 | } 16 | } 17 | ] 18 | } -------------------------------------------------------------------------------- /orchestrations/multiStepOrchestration/InitialData/asset38-kw-timeseries-data.json: -------------------------------------------------------------------------------- 1 | { 2 | "messageId": "1453338376222", 3 | "body": [ 4 | { 5 | "name": "38_KW_50-UUID", 6 | "datapoints": [ 7 | [1453338376200,1.0,3], 8 | [1453338376201,2.0,3], 9 | [1453338376202,1.3,3], 10 | [1453338376203,1.2,3], 11 | [1453338376204,0.0,3] 12 | ], 13 | "attributes":{ 14 | "source1":"demo" 15 | } 16 | } 17 | ] 18 | } -------------------------------------------------------------------------------- /orchestrations/multiStepOrchestration/InitialData/asset38-vibration-timeseries-data.json: -------------------------------------------------------------------------------- 1 | { 2 | "messageId": "1453338376222", 3 | "body": [ 4 | { 5 | "name": "38_vibration_51-UUID", 6 | "datapoints": [ 7 | [1453338376200,1.0,3], 8 | [1453338376201,2.0,3], 9 | [1453338376202,1.3,3], 10 | [1453338376203,1.2,3], 11 | [1453338376204,0.0,3] 12 | ], 13 | "attributes":{ 14 | "source1":"demo" 15 | } 16 | } 17 | ] 18 | } -------------------------------------------------------------------------------- /orchestrations/multiStepOrchestration/InitialData/asset38-vibration1-timeseries-data.json: -------------------------------------------------------------------------------- 1 | { 2 | "messageId": "1453338376222", 3 | "body": [ 4 | { 5 | "name": "38_vibration1_55-UUID", 6 | "datapoints": [ 7 | [1453338376200,1.0,3], 8 | [1453338376201,2.0,3], 9 | [1453338376202,1.3,3], 10 | [1453338376203,1.2,3], 11 | [1453338376204,0.0,3] 12 | ], 13 | "attributes":{ 14 | "source1":"demo" 15 | } 16 | } 17 | ] 18 | } -------------------------------------------------------------------------------- /orchestrations/multiStepOrchestration/InitialData/assetGroupQuery.txt: -------------------------------------------------------------------------------- 1 | classification=/classifications/turbine:uri=/assets/minersRuleOrch*&fields=uri 2 | -------------------------------------------------------------------------------- /orchestrations/multiStepOrchestration/InitialData/tagMapQuery.txt: -------------------------------------------------------------------------------- 1 | /assets/?filter=classification=/classifications/timeseries_tag&asset_id=${assetId}?fields=name,timeseries_tag_id 2 | 3 | PAYLOAD BODY FOR DEFAULT TAG MAP QUERY 4 | { 5 | "defaultTagNameQuery":"assets?filter=classification=/classifications/timeseries_tag:asset_id=${ASSET_ID}", 6 | "fieldNameSpecifier" : "name", 7 | "tagNameSpecifier" : "timeseries_tag_id", 8 | "author": "Jeanette", 9 | "description": "default tag query for miners rule multi-step orchestration example" 10 | } -------------------------------------------------------------------------------- /orchestrations/multiStepOrchestration/Orchestration/Asset32CDMModel.txt: -------------------------------------------------------------------------------- 1 | 10.0, 20.0,10.0 -------------------------------------------------------------------------------- /orchestrations/multiStepOrchestration/Orchestration/DemoAdderPortToFieldMap.json: -------------------------------------------------------------------------------- 1 | { 2 | "analyticName": "Timeseries Demo Adder", 3 | "analyticVersion": "v1", 4 | "iterations": [ 5 | { 6 | "inputMaps": [ 7 | { 8 | "valueSourceType": "DATA_CONNECTOR", 9 | "fullyQualifiedPortName": "data.time_series.numberArray1", 10 | "fieldId": "vibration", 11 | "queryCriteria": { 12 | "start": 0, 13 | "end": -1, 14 | "tags": [{ 15 | "limit": 10, 16 | "order": "asc" 17 | } 18 | ] 19 | }, 20 | "engUnit": null, 21 | "required": true, 22 | "dataSourceId": "PredixTimeSeries" 23 | }, 24 | { 25 | "valueSourceType": "DATA_CONNECTOR", 26 | "fullyQualifiedPortName": "data.time_series.numberArray2", 27 | "fieldId": "vibration1", 28 | "queryCriteria": { 29 | "start": 0, 30 | "end": -1, 31 | "tags": [{ 32 | "limit": 10, 33 | "order": "asc" 34 | } 35 | ] 36 | }, 37 | "engUnit": null, 38 | "required": true, 39 | "dataSourceId": "PredixTimeSeries" 40 | } 41 | ], 42 | "outputMaps": [ 43 | { 44 | "fullyQualifiedPortName": "data.time_series.sum", 45 | "fieldId": "vibeSum", 46 | "dataSourceId": "Temporary" 47 | } 48 | ] 49 | } 50 | ] 51 | } 52 | -------------------------------------------------------------------------------- /orchestrations/multiStepOrchestration/Orchestration/MinersRuleOrchestration.bpmn20.xml: -------------------------------------------------------------------------------- 1 | 2 | 8 | 9 | 10 | 11 | 13 | start-to-demoAdder 14 | 15 | 16 | 20 | start-to-demoAdder 21 | demoAdder-to-minersRule 22 | 23 | 27 | demoAdder-to-minersRule 28 | minersRule-to-end 29 | 30 | 31 | 32 | minersRule-to-end 33 | 34 | 35 | 36 | 39 | 40 | 43 | 44 | 46 | 47 | 48 | 49 | 50 | -------------------------------------------------------------------------------- /orchestrations/multiStepOrchestration/Orchestration/MinersRulePortToFieldMap.json: -------------------------------------------------------------------------------- 1 | { 2 | "analyticName": "Miners Rule", 3 | "analyticVersion": "v2", 4 | "iterations": [ 5 | { 6 | "inputMaps": [ 7 | { 8 | "valueSourceType": "DATA_CONNECTOR", 9 | "fullyQualifiedPortName": "current_cdm.cdm_values", 10 | "fieldId": "currentCDM", 11 | "queryCriteria": { 12 | "start": 0, 13 | "end": -1, 14 | "tags": [{ 15 | "limit": 10, 16 | "order": "asc" 17 | } 18 | ] 19 | }, 20 | "engUnit": null, 21 | "required": true, 22 | "dataSourceId": "PredixTimeSeries" 23 | }, 24 | { 25 | "valueSourceType": "DATA_CONNECTOR", 26 | "fullyQualifiedPortName": "recentStresses.stresses.0", 27 | "fieldId": "KW", 28 | "queryCriteria": { 29 | "start": 0, 30 | "end": -1, 31 | "tags": [{ 32 | "limit": 10, 33 | "order": "asc" 34 | } 35 | ] 36 | }, 37 | "engUnit": null, 38 | "required": true, 39 | "dataSourceId": "PredixTimeSeries" 40 | }, 41 | { 42 | "valueSourceType": "DATA_CONNECTOR", 43 | "fullyQualifiedPortName": "recentStresses.stresses.1", 44 | "fieldId": "bearing temperature", 45 | "queryCriteria": { 46 | "start": 0, 47 | "end": -1, 48 | "tags": [{ 49 | "limit": 10, 50 | "order": "asc" 51 | } 52 | ] 53 | }, 54 | "engUnit": null, 55 | "required": true, 56 | "dataSourceId": "PredixTimeSeries" 57 | }, 58 | { 59 | "valueSourceType": "DATA_CONNECTOR", 60 | "fullyQualifiedPortName": "recentStresses.stresses.2", 61 | "fieldId": "vibeSum", 62 | "dataSourceId": "Temporary" 63 | } 64 | ], 65 | "inputModelMaps":[ 66 | {"modelPortName": "Stress Limits", 67 | "modelName" : "CDMStressLimits", 68 | "modelVersion":"v1.0" 69 | } 70 | ], 71 | "outputMaps": [ 72 | { 73 | "fullyQualifiedPortName": "updatedCDM.cdm_values", 74 | "fieldId": "currentCDM", 75 | "dataSourceId": "PredixTimeSeries" 76 | } 77 | ] 78 | } 79 | ] 80 | } 81 | -------------------------------------------------------------------------------- /orchestrations/multiStepOrchestration/Orchestration/README.md: -------------------------------------------------------------------------------- 1 | ## Step Miners Rule orchestration 2 | 3 | - The first step adds 2 vibration values together 4 | - The second step runs Miners Rule against 2 other sensors and the sum from the first step 5 | - The CDM for the analytic has one set of values for all assets in the Turbine asset group and a special set of values for asset with id Asset32 6 | 7 | ## File list 8 | | File | Description | 9 | | ---- | ----------- | 10 | |DemoAdderPortToFieldMap.json|The port to field map for adding the vibration sensors.| 11 | |MinersRulePortToFieldMap.json|The port to field map for running Miners Rule on the stress1 and stress2 sensors and vibeSum the life limits coming in as a model. | 12 | |MinersRuleOrchestration.bpmn20.xml|The bpmn xml file for this orchestration| 13 | |TurbineCDMModel.txt|the model file for Turbines| 14 | |Asset32CDMModel.txt|the model file for asset32 (asset id: /assets/minersRuleOrch32-UUID) 15 | -------------------------------------------------------------------------------- /orchestrations/multiStepOrchestration/Orchestration/TurbineCDMModel.txt: -------------------------------------------------------------------------------- 1 | 100.0, 200.0,100.0 -------------------------------------------------------------------------------- /orchestrations/oneStepOrchestration/orchestrationConfigurationFiles/orchestration-workflow.xml: -------------------------------------------------------------------------------- 1 | 2 | 8 | 9 | 10 | 11 | 13 | start-to-demoTimeseriesAdder 14 | 15 | 16 | 23 | start-to-demoTimeseriesAdder 24 | demoTimeseriesAdder-to-end 25 | 26 | 27 | 28 | demoTimeseriesAdder-to-end 29 | 30 | 31 | 32 | 35 | 36 | 38 | 39 | 40 | 41 | -------------------------------------------------------------------------------- /orchestrations/oneStepOrchestration/orchestrationConfigurationFiles/port-to-field-map-for-demoTimeseriesAdder.json: -------------------------------------------------------------------------------- 1 | { 2 | "analyticName": "demo-timeseries-adder", 3 | "analyticVersion": "V1", 4 | "orchestrationStepId": "demoTimeseriesAdder", 5 | "iterations": [ 6 | { 7 | "inputMaps": [ 8 | { 9 | "valueSourceType": "DATA_CONNECTOR", 10 | "fullyQualifiedPortName": "data.time_series.numberArray1", 11 | "fieldId": "temperature sensor", 12 | "queryCriteria": {"start": 0, "end": -1}, 13 | "dataSourceId": "PredixTimeSeries" 14 | }, 15 | { 16 | "valueSourceType": "DATA_CONNECTOR", 17 | "fullyQualifiedPortName": "data.time_series.numberArray2", 18 | "fieldId": "vibration sensor", 19 | "queryCriteria": {"start": 0, "end": -1}, 20 | "dataSourceId": "PredixTimeSeries" 21 | } 22 | ], 23 | "outputMaps": [ 24 | { 25 | "fullyQualifiedPortName": "data.time_series.sum", 26 | "fieldId": "demo sum", 27 | "dataSourceId": "PredixTimeSeries" 28 | } 29 | ] 30 | } 31 | ] 32 | } -------------------------------------------------------------------------------- /orchestrations/oneStepOrchestration/supportingDataFiles/analytic-input-for-demo-timeseries-adder.json: -------------------------------------------------------------------------------- 1 | { 2 | "data": { 3 | "time_series": { 4 | "numberArray1": [ 5 | 1.0, 6 | 2.0, 7 | 3.0, 8 | 4.0, 9 | 5.0, 10 | 6.0, 11 | 7.0, 12 | 8.0, 13 | 9.0, 14 | 10.0 15 | ], 16 | "numberArray2": [ 17 | 100.0, 18 | 200.0, 19 | 300.0, 20 | 400.0, 21 | 500.0, 22 | 600.0, 23 | 700.0, 24 | 800.0, 25 | 900.0, 26 | 1000.0 27 | ], 28 | "time_stamp": [ 29 | "1455733669601", 30 | "1455733669602", 31 | "1455733669603", 32 | "1455733669604", 33 | "1455733669605", 34 | "1455733669606", 35 | "1455733669607", 36 | "1455733669608", 37 | "1455733669609", 38 | "1455733669610" 39 | ] 40 | } 41 | } 42 | } -------------------------------------------------------------------------------- /orchestrations/oneStepOrchestration/supportingDataFiles/tag-A-time-bounded-request.json: -------------------------------------------------------------------------------- 1 | {"cache_time":0,"tags":[{"name":"tag-A"}],"start":0,"end":-1} -------------------------------------------------------------------------------- /orchestrations/oneStepOrchestration/supportingDataFiles/tag-B-time-bounded-request.json: -------------------------------------------------------------------------------- 1 | {"cache_time":0,"tags":[{"name":"tag-B"}],"start":0,"end":-1} -------------------------------------------------------------------------------- /orchestrations/oneStepOrchestration/supportingDataFiles/time-series-tag-A-data.json: -------------------------------------------------------------------------------- 1 | { 2 | "messageId": "1453338376222", 3 | "body": [ 4 | { 5 | "name": "tag-A", 6 | "datapoints": [ 7 | [ 8 | 1453338376200, 9 | 1, 10 | 3 11 | ], 12 | [ 13 | 1453338376201, 14 | 2, 15 | 3 16 | ], 17 | [ 18 | 1453338376202, 19 | 3, 20 | 3 21 | ], 22 | [ 23 | 1453338376203, 24 | 4, 25 | 3 26 | ], 27 | [ 28 | 1453338376204, 29 | 5, 30 | 3 31 | ], 32 | [ 33 | 1453338376205, 34 | 6, 35 | 3 36 | ], 37 | [ 38 | 1453338376206, 39 | 7, 40 | 3 41 | ], 42 | [ 43 | 1453338376207, 44 | 8, 45 | 3 46 | ], 47 | [ 48 | 1453338376208, 49 | 9, 50 | 3 51 | ], 52 | [ 53 | 1453338376209, 54 | 10, 55 | 3 56 | ] 57 | ], 58 | "attributes": { 59 | "host": "server1", 60 | "customer": "Acme" 61 | } 62 | } 63 | ] 64 | } -------------------------------------------------------------------------------- /orchestrations/oneStepOrchestration/supportingDataFiles/time-series-tag-B-data.json: -------------------------------------------------------------------------------- 1 | { 2 | "messageId": "1453338376222", 3 | "body": [ 4 | { 5 | "name": "tag-B", 6 | "datapoints": [ 7 | [ 8 | 1453338376200, 9 | 10, 10 | 3 11 | ], 12 | [ 13 | 1453338376201, 14 | 20, 15 | 3 16 | ], 17 | [ 18 | 1453338376202, 19 | 30, 20 | 3 21 | ], 22 | [ 23 | 1453338376203, 24 | 40, 25 | 3 26 | ], 27 | [ 28 | 1453338376204, 29 | 50, 30 | 3 31 | ], 32 | [ 33 | 1453338376205, 34 | 60, 35 | 3 36 | ], 37 | [ 38 | 1453338376206, 39 | 70, 40 | 3 41 | ], 42 | [ 43 | 1453338376207, 44 | 80, 45 | 3 46 | ], 47 | [ 48 | 1453338376208, 49 | 90, 50 | 3 51 | ], 52 | [ 53 | 1453338376209, 54 | 100, 55 | 3 56 | ] 57 | ], 58 | "attributes": { 59 | "host": "server1", 60 | "customer": "Acme" 61 | } 62 | } 63 | ] 64 | } -------------------------------------------------------------------------------- /postman/README.md: -------------------------------------------------------------------------------- 1 | ### ![Predix Analytics](./images/PredixAnalytics.png)![Predix Analytics](./images/separator.png)[![Postman](./images/postman-logo.png)](https://www.getpostman.com) 2 | 3 | 4 | ### API Reference 5 | These request collections conform to the Analytics Framework API set, which is documented at [predix.io/api](https://www.predix.io/api). User guides for the API, which cover common use cases and workflows, can be found on the full [Analytics Framework](https://docs.predix.io/en-US/content/service/analytics_services/analytics_framework/) documentation site. 6 | 7 | 8 | ### Request Collections 9 | These files contain sample requests that can be imported directly into Postman through the "Import" link near the top-center of the window. You can then customize and test out your REST requests to aid in implementing your applications. 10 | 11 | * [Analytics Framework](./Analytics_Framework.postman_collection.json) 12 | * [UAA Token for Analytics Services](./UAA_Token_for_Analytics_Services.json.postman_collection): Use these templates to retrieve the tokens needed to call Analytics APIs 13 | 14 | If you have trouble importing Postman files using this method, you can try directly pasting the contents of the Postman files using the "Raw Text" tab within the Import window. 15 | 16 | 17 | ### Environment Variables 18 | Postman allows you to create "Environments" which can have a set of variables. That way, you can use the same Postman request for multiple environments. Variables are referenced using double curly bracket notation (ex: {{token}}) anywhere in the request (URL, params, header, body). 19 | 20 | The collections above assume that you have the following set of variables defined, either for a single environment, or at a global level. You can import the [Template Environment](./Template.postman_environment.json) and fill out the values of the following environment variables. 21 | 22 | * **token** : The full token obtained from your UAA service 23 | * **tenant_id** : The Predix Zone ID of your Analytics Framework instance (obtained from the VCAP environment in your Analytics Framework service instance) 24 | * **catalog_uri** : The hostname of the instance of Analytics Catalog (obtained from the VCAP environment in your Analytics Framework service instance) 25 | * **config_uri** : The hostname of the instance of Orchestration Configuration (obtained from the VCAP environment in your Analytics Framework service instance) 26 | * **execution_uri** : The hostname of the instance of Orchestration Execution (obtained from the VCAP environment in your Analytics Framework service instance) 27 | * **scheduler_uri** : The hostname of the instance of Analytics Scheduler Service (obtained from the VCAP environment in your Analytics Framework service instance) 28 | * **monitoring_uri** : The hostname of the instance of Orchestration Monitoring Service (obtained from the VCAP environment in your Analytics Framework service instance) 29 | 30 | 31 | To manage your environment, use the drop down menu to the left of the circled "x" in the top-right corner of the window and select "Manage Environment" like below: 32 | 33 | ![Postman Environments Dropdown](./images/PostmanEnvironmentsDropdown.png) 34 | 35 | This will bring up a popup from which you add/import environments and edit environment variables (including global variables): 36 | 37 | ![Postman Environments Popup](./images/PostmanEnvironmentsPopup.png) 38 | 39 | 40 | ### Archived Collections 41 | The Analytics Catalog and Runtime services are deprecated. However, if you are still using these services, you can refer to the following archived Postman files: 42 | 43 | * [Analytics Catalog](./archive/Analytics_Catalog.json.postman_collection) 44 | * [Analytics Runtime](./archive/Analytics_Runtime.json.postman_collection) 45 | * [Template Environment](./archive/Template.postman_environment) -------------------------------------------------------------------------------- /postman/Template.postman_environment.json: -------------------------------------------------------------------------------- 1 | { 2 | "id": "18bbd39b-4969-83f5-d774-2e5608e60693", 3 | "name": "Template", 4 | "values": [ 5 | { 6 | "enabled": true, 7 | "key": "token", 8 | "value": "", 9 | "type": "text" 10 | }, 11 | { 12 | "enabled": true, 13 | "key": "tenant_id", 14 | "value": "", 15 | "type": "text" 16 | }, 17 | { 18 | "enabled": true, 19 | "key": "catalog_uri", 20 | "value": "", 21 | "type": "text" 22 | }, 23 | { 24 | "enabled": true, 25 | "key": "config_uri", 26 | "value": "", 27 | "type": "text" 28 | }, 29 | { 30 | "enabled": true, 31 | "key": "execution_uri", 32 | "value": "", 33 | "type": "text" 34 | }, 35 | { 36 | "enabled": true, 37 | "key": "scheduler_uri", 38 | "value": "", 39 | "type": "text" 40 | }, 41 | { 42 | "enabled": true, 43 | "key": "monitoring_uri", 44 | "value": "", 45 | "type": "text" 46 | } 47 | ], 48 | "timestamp": 1500508510907, 49 | "_postman_variable_scope": "environment", 50 | "_postman_exported_at": "2017-07-19T23:55:25.000Z", 51 | "_postman_exported_using": "Postman/5.0.2" 52 | } -------------------------------------------------------------------------------- /postman/archive/Template.postman_environment: -------------------------------------------------------------------------------- 1 | { 2 | "id": "acf58668-6045-60a0-5f3c-6e5df4293dfa", 3 | "name": "Template", 4 | "values": [ 5 | { 6 | "key": "token", 7 | "value": null, 8 | "type": "text", 9 | "name": "token", 10 | "enabled": true 11 | }, 12 | { 13 | "key": "catalog_uri", 14 | "value": null, 15 | "type": "text", 16 | "name": "catalog_uri", 17 | "enabled": true 18 | }, 19 | { 20 | "key": "config_uri", 21 | "value": null, 22 | "type": "text", 23 | "name": "config_uri", 24 | "enabled": true 25 | }, 26 | { 27 | "key": "execution_uri", 28 | "value": null, 29 | "type": "text", 30 | "name": "execution_uri", 31 | "enabled": true 32 | }, 33 | { 34 | "key": "scheduler_uri", 35 | "value": null, 36 | "type": "text", 37 | "name": "scheduler_uri", 38 | "enabled": true 39 | }, 40 | { 41 | "key": "catalog_tenant", 42 | "value": null, 43 | "type": "text", 44 | "name": "catalog_tenant", 45 | "enabled": true 46 | }, 47 | { 48 | "key": "runtime_tenant", 49 | "value": null, 50 | "type": "text", 51 | "name": "runtime_tenant", 52 | "enabled": true 53 | } 54 | ], 55 | "timestamp": 1456446589065, 56 | "synced": false, 57 | "syncedFilename": "" 58 | } -------------------------------------------------------------------------------- /postman/images/PostmanEnvironmentsDropdown.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/PredixDev/predix-analytics-sample/2f99ae7335d12c05597041c2734fbdd454be1e57/postman/images/PostmanEnvironmentsDropdown.png -------------------------------------------------------------------------------- /postman/images/PostmanEnvironmentsPopup.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/PredixDev/predix-analytics-sample/2f99ae7335d12c05597041c2734fbdd454be1e57/postman/images/PostmanEnvironmentsPopup.png -------------------------------------------------------------------------------- /postman/images/PredixAnalytics.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/PredixDev/predix-analytics-sample/2f99ae7335d12c05597041c2734fbdd454be1e57/postman/images/PredixAnalytics.png -------------------------------------------------------------------------------- /postman/images/postman-logo.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/PredixDev/predix-analytics-sample/2f99ae7335d12c05597041c2734fbdd454be1e57/postman/images/postman-logo.png -------------------------------------------------------------------------------- /postman/images/separator.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/PredixDev/predix-analytics-sample/2f99ae7335d12c05597041c2734fbdd454be1e57/postman/images/separator.png -------------------------------------------------------------------------------- /user-provisioning/README.md: -------------------------------------------------------------------------------- 1 | # Adding users to UAA to use Analytics UI 2 | 3 | This [analytics-ui-user-add.sh](./analytics-ui-user-add.sh) script can be used to add users with the 4 | proper credentials to your UAA to allow them to use your new Analytics UI instance. It is a Bash shell 5 | script that can be used on all Unix flavors as well as cygwin or GitBash if you're on Windows. 6 | 7 | ##Prerequisites 8 | 1. You must first follow the instructions to set up Analytics UI and its dependent services as per [these instructions](https://docs.predix.io/en-US/content/service/analytics_services/analytic_user_interface/get-started) 9 | 2. You must have a UAAC client installed as this script uses it. [This one](https://github.com/cloudfoundry-community/traveling-cf-admin) is an easy one to install. 10 | 3. You must be first logged in to your Cloud Foundry org and space on the commandline by `cf login`. 11 | 12 | This script is provided to the GE community **AS IS** to enable us to create users more easily, in advance of more 13 | user-friendly GUI tools to do the same. As such, there are no guarantees or warranties offered with this script. 14 | Pull Requests are welcome. Questions may be asked in the 15 | [Issues](../../../issues) to the right on this page. 16 | 17 | ##Usage 18 | 1. Copy this [analytics-ui-user-add.sh](./analytics-ui-user-add.sh) to your system, 19 | 2. Make it executable with `chmod +x analytics-ui-user-add.sh` 20 | 3. Issue `./analytics-ui-user-add.sh` and follow the prompts. 21 | 22 | The script initally collects alot of information from you, then it does the UAAC work. If you want abort at 23 | any time, just hit ctrl-c to end the script. 24 | 25 | --------------------------------------------------------------------------------