├── .dockerignore ├── .github └── workflows │ └── erlang.yml ├── .gitignore ├── Dockerfile_debian ├── Dockerfile_debian.intel-nuc ├── Dockerfile_debian_tether ├── LICENSE ├── Makefile ├── README.md ├── apps └── faxe │ ├── include │ ├── faxe.hrl │ └── faxe_epgsql_response.hrl │ ├── priv │ ├── advanced.config │ ├── email_template.html │ ├── erlang_vm.schema │ ├── faxe.schema │ ├── python │ │ ├── azblobstream.py │ │ ├── azblobstream_bytes.py │ │ ├── azblobstreampd.py │ │ ├── decode_dict.py │ │ ├── faxe.py │ │ ├── faxe_handler.py │ │ └── faxe_test.py │ └── rest_routes.config │ ├── src │ ├── components │ │ ├── c_python3.erl │ │ ├── esp_agg_js._erl │ │ ├── esp_aggregate.erl │ │ ├── esp_aggregate_old.erl │ │ ├── esp_amqp_consume.erl │ │ ├── esp_amqp_publish.erl │ │ ├── esp_array_explode.erl │ │ ├── esp_batch.erl │ │ ├── esp_blobstream.erl │ │ ├── esp_case.erl │ │ ├── esp_change_detect.erl │ │ ├── esp_collect.erl │ │ ├── esp_collect_fields.erl │ │ ├── esp_collect_unique.erl │ │ ├── esp_combine.erl │ │ ├── esp_conn_status.erl │ │ ├── esp_count_change.erl │ │ ├── esp_crate_out.erl │ │ ├── esp_crate_query.erl │ │ ├── esp_crate_query_cont.erl │ │ ├── esp_deadman.erl │ │ ├── esp_debug.erl │ │ ├── esp_default.erl │ │ ├── esp_delete.erl │ │ ├── esp_email.erl │ │ ├── esp_eval.erl │ │ ├── esp_extract_path.erl │ │ ├── esp_fields_to_array.erl │ │ ├── esp_group_by.erl │ │ ├── esp_group_union.erl │ │ ├── esp_http_get.erl │ │ ├── esp_http_listen.erl │ │ ├── esp_http_post.erl │ │ ├── esp_http_post_crate.erl │ │ ├── esp_if.erl │ │ ├── esp_influx_out.erl │ │ ├── esp_influx_query._erl │ │ ├── esp_join.erl │ │ ├── esp_join2.erl │ │ ├── esp_json_emitter.erl │ │ ├── esp_jsonsize.erl │ │ ├── esp_keep.erl │ │ ├── esp_log.erl │ │ ├── esp_mem.erl │ │ ├── esp_metrics.erl │ │ ├── esp_modbus.erl │ │ ├── esp_modbus_read.erl │ │ ├── esp_mongo_query.erl │ │ ├── esp_mqtt_publish.erl │ │ ├── esp_mqtt_subscribe.erl │ │ ├── esp_multi_map.erl │ │ ├── esp_oracle_query.erl │ │ ├── esp_parser.erl │ │ ├── esp_path_split.erl │ │ ├── esp_percentile.erl │ │ ├── esp_postgre_out.erl │ │ ├── esp_postgre_statement.erl │ │ ├── esp_rename.erl │ │ ├── esp_s7read.erl │ │ ├── esp_sample.erl │ │ ├── esp_set.erl │ │ ├── esp_shift.erl │ │ ├── esp_state_change.erl │ │ ├── esp_state_change_bulk.erl │ │ ├── esp_state_count.erl │ │ ├── esp_state_duration.erl │ │ ├── esp_state_sequence.erl │ │ ├── esp_statistics.erl │ │ ├── esp_stats.erl │ │ ├── esp_tcp_recv.erl │ │ ├── esp_tcp_recv_line.erl │ │ ├── esp_tcp_send.erl │ │ ├── esp_tcp_serve.erl │ │ ├── esp_tcppoll.erl │ │ ├── esp_time_diff.erl │ │ ├── esp_time_elapsed.erl │ │ ├── esp_triggered_timeout.erl │ │ ├── esp_udp_recv.erl │ │ ├── esp_udp_send.erl │ │ ├── esp_unbatch.erl │ │ ├── esp_union.erl │ │ ├── esp_value_diff.erl │ │ ├── esp_value_emitter.erl │ │ ├── esp_where.erl │ │ ├── esp_win_clock.erl │ │ ├── esp_win_event.erl │ │ ├── esp_win_session.erl │ │ ├── esp_win_time.erl │ │ ├── stats │ │ │ ├── c_agg.erl │ │ │ ├── c_noop.erl │ │ │ ├── esp_avg.erl │ │ │ ├── esp_bottom.erl │ │ │ ├── esp_count.erl │ │ │ ├── esp_count_change._erl │ │ │ ├── esp_count_distinct.erl │ │ │ ├── esp_distinct.erl │ │ │ ├── esp_elapsed.erl │ │ │ ├── esp_first.erl │ │ │ ├── esp_geometric_mean.erl │ │ │ ├── esp_kurtosis.erl │ │ │ ├── esp_last.erl │ │ │ ├── esp_max.erl │ │ │ ├── esp_mean.erl │ │ │ ├── esp_median.erl │ │ │ ├── esp_min.erl │ │ │ ├── esp_percentile._erl │ │ │ ├── esp_range.erl │ │ │ ├── esp_skew.erl │ │ │ ├── esp_stats_difference.erl │ │ │ ├── esp_stats_dist_count.erl │ │ │ ├── esp_stddev.erl │ │ │ ├── esp_sum.erl │ │ │ ├── esp_top.erl │ │ │ └── esp_variance.erl │ │ └── win_util.erl │ ├── emqtt_changes._erl │ ├── event_handler_starter.erl │ ├── faxe.app.src │ ├── faxe.erl │ ├── faxe_app.erl │ ├── faxe_config.erl │ ├── faxe_db.erl │ ├── faxe_dfs.erl │ ├── faxe_ets.erl │ ├── faxe_event_guard.erl │ ├── faxe_event_guard_sup.erl │ ├── faxe_event_handlers.erl │ ├── faxe_event_sup.erl │ ├── faxe_flow_observer.erl │ ├── faxe_metrics_sup.erl │ ├── faxe_migration.erl │ ├── faxe_sup.erl │ ├── flow │ │ ├── dataflow_events.erl │ │ ├── dataflow_sup.erl │ │ ├── df_graph.erl │ │ ├── df_subscription.erl │ │ ├── flow_deleter.erl │ │ ├── graph_builder.erl │ │ ├── graph_starter.erl │ │ ├── graph_sup.erl │ │ ├── initial_task_starter.erl │ │ └── lock._erl │ ├── flow_post_mortem.erl │ ├── lib │ │ ├── amqp_options.erl │ │ ├── bcd.erl │ │ ├── bunny_esq_worker.erl │ │ ├── bunny_worker.erl │ │ ├── conn_status_handler_dataflow.erl │ │ ├── conn_status_handler_mqtt.erl │ │ ├── conn_status_handler_observer.erl │ │ ├── crate_ignore_rules.erl │ │ ├── debug_handler.erl │ │ ├── debug_handler_mqtt.erl │ │ ├── dfs_debug_handler.erl │ │ ├── email_address.erl │ │ ├── event_handler_mqtt.erl │ │ ├── faxe_epgsql_codec.erl │ │ ├── faxe_epgsql_response.erl │ │ ├── faxe_epgsql_stmt.erl │ │ ├── faxe_python_stats.erl │ │ ├── faxe_s7_stats.erl │ │ ├── faxe_seq_check.erl │ │ ├── faxe_seq_check_manager.erl │ │ ├── faxe_stats.erl │ │ ├── faxe_tcp_server.erl │ │ ├── faxe_time_offset_monitor.erl │ │ ├── faxe_vmstats.erl │ │ ├── flow_changed_handler_mqtt.erl │ │ ├── graph_node_registry.erl │ │ ├── http_lib.erl │ │ ├── http_listen_handler.erl │ │ ├── http_manager.erl │ │ ├── influx_line.erl │ │ ├── lager_emit_backend.erl │ │ ├── lager_flowlog_backend.erl │ │ ├── lager_observer_backend.erl │ │ ├── metrics_handler_dataflow.erl │ │ ├── metrics_handler_mqtt.erl │ │ ├── modbus_reader.erl │ │ ├── mqtt_options.erl │ │ ├── mqtt_pub_pool_handler.erl │ │ ├── mqtt_pub_pool_manager.erl │ │ ├── mqtt_pub_pool_sup.erl │ │ ├── mqtt_publisher.erl │ │ ├── node_metrics.erl │ │ ├── process_stats.erl │ │ ├── queue_cleaner.erl │ │ ├── s7_utils.erl │ │ ├── s7pool_con_handler.erl │ │ ├── s7pool_handler.erl │ │ ├── s7pool_manager.erl │ │ ├── s7pool_sup.erl │ │ ├── s7reader.erl │ │ ├── s7reader_sup.erl │ │ ├── s7worker.erl │ │ ├── sec_to_human.erl │ │ └── state_change.erl │ ├── perf.erl │ └── web │ │ ├── cmw_headers.erl │ │ ├── cowboy_rest_skeleton.erl │ │ ├── rest_audit_server.erl │ │ ├── rest_helper.erl │ │ ├── rest_misc_handler.erl │ │ ├── rest_stats_handler.erl │ │ ├── rest_tags_handler.erl │ │ ├── rest_task_handler.erl │ │ ├── rest_tasks_handler.erl │ │ ├── rest_template_handler.erl │ │ ├── rest_templates_handler.erl │ │ ├── rest_user_handler.erl │ │ ├── upload_handler.erl │ │ └── ws_debug_handler.erl │ └── test │ ├── component_SUITE.erl │ ├── df_graph_test.erl │ ├── dfs │ ├── batch_test.dfs │ ├── mqtt_amqp_bridge_test.dfs │ ├── script_expr_test.dfs │ └── unknown_options_test.dfs │ ├── dfs_to_graph.erl │ ├── faxe.spec │ ├── faxe_lambdalib_tests.erl │ ├── faxe_time_tests.erl │ ├── flowdata_tests.erl │ ├── jsn_tests.erl │ ├── lambda_tests.erl │ └── pygments.html ├── config ├── vars.config └── vars │ ├── balena.config │ ├── dev1.config │ ├── dev2.config │ ├── dev3.config │ ├── dev4.config │ ├── dev5.config │ ├── k8s.config │ └── prod.config ├── dfs ├── aggregate │ └── test.dfs ├── array_explode.dfs ├── debug │ └── map_get.dfs ├── dev │ ├── mqtt_pub_pool.dfs │ ├── mqtt_pub_pool2.dfs │ ├── path_split.dfs │ ├── python2.dfs │ ├── python_double.dfs │ └── python_time.dfs ├── other │ ├── alarm2.dfs │ ├── batch_test.dfs │ ├── case_test.dfs │ ├── case_test2.dfs │ ├── case_test3.dfs │ ├── change_detect_test.dfs │ ├── change_detect_timeout_test.dfs │ ├── collect_unique.dfs │ ├── combine.dfs │ ├── combine1.dfs │ ├── combine_merge.dfs │ ├── cond_test1.dfs │ ├── conditional_delete.dfs │ ├── conf_test.dfs │ ├── deadman_2.dfs │ ├── deadman_test.dfs │ ├── deadman_test_repeat.dfs │ ├── delete_test.dfs │ ├── did_you_mean.dfs │ ├── email_param.dfs │ ├── extract.dfs │ ├── grip_calib_images.dfs │ ├── helix_convtrack_crate.dfs │ ├── http.dfs │ ├── https_get_test.dfs │ ├── influx_test.dfs │ ├── join.dfs │ ├── join2.dfs │ ├── jpath.dfs │ ├── json_emitter.dfs │ ├── json_test.dfs │ ├── lambda_list_test.dfs │ ├── lambda_op.dfs │ ├── lambda_test.dfs │ ├── live_data_test.dfs │ ├── macro1.dfs │ ├── mem_test.dfs │ ├── modbus_mqtt_pub.dfs │ ├── modbus_multi_test.dfs │ ├── mqtt_lambda_topic_test.dfs │ ├── mqtt_publish.dfs │ ├── mqtt_subscribe.dfs │ ├── node_not_found_err.dfs │ ├── option_check.dfs │ ├── oracle_query_test.dfs │ ├── param_list_test.dfs │ ├── pg_query.dfs │ ├── python1.dfs │ ├── python2.dfs │ ├── python3.dfs │ ├── python4.dfs │ ├── rename.dfs │ ├── rename2.dfs │ ├── s7_read_test1.dfs │ ├── s7_read_test2.dfs │ ├── s7_test.dfs │ ├── sample_test.dfs │ ├── set_test.dfs │ ├── shift._dfs │ ├── shift.dfs │ ├── shift2.dfs │ ├── state_change_bulk_test.dfs │ ├── state_count_test.dfs │ ├── state_duration_test.dfs │ ├── state_duration_test_new.dfs │ ├── state_seq_test.dfs │ ├── statistics.dfs │ ├── stats_test.dfs │ ├── stats_test_fail.dfs │ ├── tcp_conveyor_track.dfs │ ├── tcp_line.dfs │ ├── tcp_line_robot_plc.dfs │ ├── tcp_robot_plc.dfs │ ├── tcp_window.dfs │ ├── template_test.dfs │ ├── test_lm_conveyor_tracking.dfs │ ├── test_lm_robot_plc.dfs │ ├── timeout_test1.dfs │ ├── union_test.dfs │ ├── value_diff_test.dfs │ ├── where.dfs │ ├── win_clock_test.dfs │ ├── win_event_test.dfs │ ├── win_event_test2.dfs │ ├── win_time_test.dfs │ ├── win_time_test_aligned.dfs │ └── window.dfs └── test │ ├── collect.dfs │ ├── count_change.dfs │ ├── delete.dfs │ ├── env.dfs │ ├── faxe_http.dfs │ ├── group_by.dfs │ ├── group_by2.dfs │ ├── http_get.dfs │ ├── http_listen.dfs │ ├── http_post.dfs │ ├── if.dfs │ ├── mongo.dfs │ ├── mqtt_topic_field_test.dfs │ ├── picking_tracking.dfs │ ├── rebatch_test.dfs │ ├── rename.dfs │ ├── time_align.dfs │ ├── value_emitter.dfs │ └── win_session.dfs ├── docker.run ├── ext.config ├── python ├── callback.py ├── data_demo.py ├── double.py ├── filter.py ├── less_than.py ├── path.py └── python_time.py ├── python_requirements.txt ├── python_requirements_prod.txt ├── rebar.config ├── rebar.config.script ├── renovate.json └── todo /.dockerignore: -------------------------------------------------------------------------------- 1 | .rebar3 2 | _* 3 | .eunit 4 | *.beam 5 | *.plt 6 | *.swp 7 | *.swo 8 | .erlang.cookie 9 | ebin 10 | log 11 | erl_crash.dump 12 | .rebar 13 | logs 14 | _build_old/* 15 | .idea 16 | apps/faxe/test/ 17 | dfs/ 18 | python/ 19 | /apps/faxe/doc/ 20 | /apps/faxe/src/lib/parser/data.erl 21 | /apps/faxe/src/lib/parser/robot_plc.json 22 | /apps/faxe/priv_old/ 23 | 24 | -------------------------------------------------------------------------------- /.github/workflows/erlang.yml: -------------------------------------------------------------------------------- 1 | name: Erlang CI 2 | 3 | on: 4 | push: 5 | branches: [ dev ] 6 | pull_request: 7 | branches: [ dev ] 8 | 9 | jobs: 10 | 11 | build: 12 | 13 | runs-on: ubuntu-latest 14 | 15 | env: 16 | BUILD_WITHOUT_QUIC: true 17 | container: 18 | image: erlang:27.3.2 19 | steps: 20 | - uses: actions/checkout@v2 21 | - name: Compile 22 | run: git config --global url."https://".insteadOf git:// && rebar3 compile 23 | - name: Run tests 24 | run: rebar3 do eunit 25 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | .rebar3 2 | _* 3 | .eunit 4 | *.beam 5 | *.plt 6 | *.swp 7 | *.swo 8 | .erlang.cookie 9 | ebin 10 | log 11 | erl_crash.dump 12 | .rebar 13 | logs 14 | _build_old/ 15 | .idea 16 | /apps/faxe/doc/ 17 | /apps/faxe/src/lib/parser/data.erl 18 | /apps/faxe/src/lib/parser/robot_plc.json 19 | /apps/faxe/priv_old/ 20 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | Copyright (c) 2019, Alex Minichmair . 2 | All rights reserved. 3 | 4 | Redistribution and use in source and binary forms, with or without 5 | modification, are permitted provided that the following conditions are 6 | met: 7 | 8 | * Redistributions of source code must retain the above copyright 9 | notice, this list of conditions and the following disclaimer. 10 | 11 | * Redistributions in binary form must reproduce the above copyright 12 | notice, this list of conditions and the following disclaimer in the 13 | documentation and/or other materials provided with the distribution. 14 | 15 | * The names of its contributors may not be used to endorse or promote 16 | products derived from this software without specific prior written 17 | permission. 18 | 19 | THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS 20 | "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT 21 | LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR 22 | A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT 23 | OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, 24 | SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT 25 | LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, 26 | DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY 27 | THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT 28 | (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE 29 | OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. 30 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | faxe 2 | ===== 3 | 4 | Flow based data collector/ data processor 5 | Faxe is used to collect data from various sources, process and route this data and also store data to various DBs. 6 | 7 | 8 | ### Read the documentation [here](https://heyoka.github.io/faxe-docs/site). 9 | 10 | ### HTTP API documentation is [here](https://heyoka.github.io/faxe-docs/site/faxe_rest_api.html). 11 | 12 | 13 | Status 14 | ------ 15 | 16 | Latest tag/release is considered stable and is used in production. 17 | 18 | Build 19 | ----- 20 | 21 | faxe is a rebar3 release 22 | 23 | $ rebar3 compile 24 | 25 | 26 | Erlang 27 | ------ 28 | >= 25 29 | -------------------------------------------------------------------------------- /apps/faxe/include/faxe_epgsql_response.hrl: -------------------------------------------------------------------------------- 1 | %%%------------------------------------------------------------------- 2 | %%% @author heyoka 3 | %%% @copyright (C) 2021, 4 | %%% @doc 5 | %%% 6 | %%% @end 7 | %%% Created : 07. Nov 2021 18:55 8 | %%%------------------------------------------------------------------- 9 | -author("heyoka"). 10 | 11 | -record(faxe_epgsql_response, { 12 | time_field :: undefined | binary(), 13 | response_type = batch :: atom(), 14 | point_root_object :: undefined | binary(), 15 | default_timestamp :: non_neg_integer(), 16 | field_names_validated = false :: true|false 17 | }). 18 | 19 | -type faxe_epgsql_response() :: #faxe_epgsql_response{}. 20 | -export_type([faxe_epgsql_response/0]). -------------------------------------------------------------------------------- /apps/faxe/priv/advanced.config: -------------------------------------------------------------------------------- 1 | [{lager, [ 2 | {colored, true}, 3 | {error_logger_redirect, true} 4 | ]}, 5 | {kernel, 6 | [ 7 | {shell_history, enabled}, 8 | {shell_history_path, ".rebar3"} 9 | ] 10 | 11 | }, 12 | {sasl, [{utc_log, true}]} 13 | ]. -------------------------------------------------------------------------------- /apps/faxe/priv/python/faxe_handler.py: -------------------------------------------------------------------------------- 1 | import json 2 | import psutil 3 | import faxe 4 | 5 | from erlport.erlterms import Atom 6 | from erlport.erlang import set_message_handler 7 | 8 | callback_object = None 9 | 10 | 11 | def register_handler(_classname): 12 | 13 | def handler(args=None): 14 | tag = args[0] 15 | args = args[1:] 16 | global callback_object 17 | if tag == b'init': 18 | class_name = args[0].decode('utf-8') 19 | module_name = class_name.lower() 20 | module = __import__(module_name) 21 | class_ = getattr(module, class_name) 22 | callback_object = class_(args[1]) 23 | elif tag == b'point': 24 | callback_object.point(args[0]) 25 | elif tag == b'batch': 26 | data = json.loads(args[0], object_hook=undefined_to_None) 27 | callback_object.batch(data) 28 | else: 29 | print('no route for handler with', tag) 30 | 31 | set_message_handler(handler) 32 | return Atom(b'ok') 33 | 34 | 35 | def py_stats(ppids): 36 | pids = dict.keys(ppids) 37 | out = {b'mem_total': 0, b'cpu_total': 0, b'proc_list': list()} 38 | for p in psutil.process_iter(['pid', 'memory_info', 'cpu_percent']): 39 | if p.info['pid'] in pids: 40 | pid = p.info['pid'] 41 | mem = round(p.info['memory_info'].rss / (1024 * 1024), 2) 42 | cpu = p.info['cpu_percent'] 43 | out[b'mem_total'] += mem 44 | out[b'cpu_total'] += cpu 45 | out[b'proc_list'].append({b'name': faxe.to_bytes(ppids[pid]), 46 | b'pid': pid, 47 | b'mem': mem, 48 | b'cpu_percent': cpu}) 49 | 50 | return (Atom(b'ok'), out) 51 | 52 | 53 | def process_stats(): 54 | return {b'mem': process_mem_usage(), b'cpu_percent': process_cpu_usage()} 55 | 56 | 57 | # MiB of ram residential 58 | def process_mem_usage(): 59 | mem = psutil.Process().memory_info() 60 | return round(mem.rss / (1024 * 1024), 2) 61 | 62 | 63 | def process_cpu_usage(): 64 | p = psutil.Process() 65 | return p.cpu_percent(interval=4) 66 | 67 | 68 | 69 | def undefined_to_None(dct): 70 | for k, v in dct.items(): 71 | if v == 'undefined': 72 | dct[k] = None 73 | 74 | return dct 75 | 76 | -------------------------------------------------------------------------------- /apps/faxe/src/components/esp_case.erl: -------------------------------------------------------------------------------- 1 | %% Date: 17.01.20 - 18:38 2 | %% Ⓒ 2020 heyoka 3 | %% @doc 4 | %% Evaluate a series of lambda expressions in a top down manner 5 | %% the node will output / add the corresponding value of the first lambda expression that evaluates as true 6 | %% if none of the lambda expressions evaluates as true, a default value will be used 7 | %% @end 8 | -module(esp_case). 9 | -author("Alexander Minichmair"). 10 | 11 | %% API 12 | -behavior(df_component). 13 | 14 | -include("faxe.hrl"). 15 | %% API 16 | -export([init/3, process/3, options/0, check_options/0]). 17 | 18 | 19 | -record(state, { 20 | node_id, 21 | lambdas, 22 | values, 23 | as, 24 | default, 25 | json 26 | }). 27 | 28 | options() -> [ 29 | {lambdas, lambda_list}, 30 | {as, string}, %% key 31 | {values, list}, %% list of values 32 | {json, is_set, false}, %% treat 'values' as json strings 33 | {default, any} 34 | ]. 35 | 36 | check_options() -> 37 | [ 38 | {same_length, [lambdas, values]} 39 | ]. 40 | 41 | init(NodeId, _Ins, 42 | #{lambdas := LambdaFuns, as := As, values := Values0, default := Default0, json := Json}) -> 43 | Default = case Json of true -> jiffy:decode(Default0, [return_maps]); false -> Default0 end, 44 | Values = 45 | case Json of 46 | true -> [jiffy:decode(Val, [return_maps]) || Val <- Values0]; 47 | false -> Values0 48 | end, 49 | {ok, all, 50 | #state{lambdas = LambdaFuns, node_id = NodeId, 51 | as = As, values = Values, default = Default, json = Json}}. 52 | 53 | 54 | process(_In, #data_batch{points = Points} = Batch, 55 | State = #state{lambdas = LambdaFuns, as = As, values = Values, default = Def}) -> 56 | NewPoints = [eval(Point, LambdaFuns, Values, As, Def) || Point <- Points], 57 | {emit, Batch#data_batch{points = NewPoints}, State}; 58 | process(_Inport, #data_point{} = Point, 59 | State = #state{lambdas = LambdaFuns, as = As, values = Values, default = Def}) -> 60 | NewValue = eval(Point, LambdaFuns, Values, As, Def), 61 | {emit, NewValue, State}. 62 | 63 | eval(#data_point{} = P, [], [], As, Default) -> 64 | %% lager:warning("no lambda matched!!"), 65 | flowdata:set_field(P, As, Default); 66 | eval(#data_point{} = Point, [Lambda|Lambdas], [Value|Values], As, Default) -> 67 | case faxe_lambda:execute(Point, Lambda) of 68 | true -> 69 | %% lager:warning("lambda did match!!"), 70 | flowdata:set_field(Point, As, Value); 71 | false -> 72 | %% lager:warning("lambda did not match!!"), 73 | eval(Point, Lambdas, Values, As, Default) 74 | end. -------------------------------------------------------------------------------- /apps/faxe/src/components/esp_conn_status.erl: -------------------------------------------------------------------------------- 1 | %% Date: 16.02.2020 2 | %% Ⓒ 2020 heyoka 3 | %% @doc 4 | %% collect internal connection status events 5 | %% 6 | %% 7 | %% 8 | -module(esp_conn_status). 9 | -author("Alexander Minichmair"). 10 | 11 | -behaviour(df_component). 12 | 13 | -include("faxe.hrl"). 14 | 15 | %% API 16 | -export([init/3, process/3, handle_info/2, options/0]). 17 | 18 | -record(state, { 19 | flow_id, 20 | node_id, 21 | conn_type 22 | }). 23 | 24 | options() -> 25 | [ 26 | {flow, string}, 27 | {node, string, undefined}, 28 | {type, string, undefined} 29 | ]. 30 | 31 | init({FId, _NId}, _Inputs, #{flow := FlowId, node := NodeId, type := CType}) -> 32 | case FlowId == FId of 33 | true -> lager:error("can not use 'metrics' node within source flow."); 34 | false -> 35 | gen_event:add_sup_handler(conn_status, conn_status_handler_dataflow, 36 | #{parent => self(), flow_id => FlowId, node_id => NodeId}) 37 | end, 38 | State = #state{flow_id = FlowId, node_id = NodeId, conn_type = CType}, 39 | {ok, all, State}. 40 | 41 | process(_, _Item, State=#state{} ) -> 42 | {ok, State}. 43 | 44 | handle_info({_, Point}, State = #state{conn_type = undefined}) -> 45 | {emit, {1, Point}, State}; 46 | handle_info({_, Point}, State = #state{conn_type = Type}) -> 47 | %% lager:notice("got datapoint: ~p :: conn_type: ~p",[Point, flowdata:field(Point, <<"conn_type">>)]), 48 | case flowdata:field(Point, <<"conn_type">>) == Type of 49 | true -> {emit, {1, Point}, State}; 50 | false -> {ok, State} 51 | end; 52 | handle_info(_Request, State) -> 53 | {ok, State}. 54 | 55 | 56 | %%%=================================================================== 57 | %%% Internal functions 58 | %%%=================================================================== 59 | 60 | 61 | -------------------------------------------------------------------------------- /apps/faxe/src/components/esp_count_change.erl: -------------------------------------------------------------------------------- 1 | %% Date: 05.06.23- 08:02 2 | %% Ⓒ 2023 heyoka 3 | %% 4 | %% @doc 5 | %% 6 | %% @end 7 | -module(esp_count_change). 8 | -author("Alexander Minichmair"). 9 | 10 | %% API 11 | -behavior(df_component). 12 | 13 | -include("faxe.hrl"). 14 | %% API 15 | -export([init/3, process/3, options/0, wants/0, emits/0]). 16 | 17 | 18 | -record(state, { 19 | node_id, 20 | field, 21 | as, 22 | keep_last = true, 23 | last = nil 24 | }). 25 | 26 | options() -> [ 27 | {field, string}, 28 | {as, string, undefined}, 29 | {keep_last, boolean, true} 30 | ]. 31 | 32 | %%check_options() -> 33 | %% [{one_of, mode, [?MODE_ABS, ?MODE_CP, ?MODE_PC]}]. 34 | 35 | wants() -> batch. 36 | emits() -> point. 37 | 38 | init(NodeId, _Ins, #{field := Field, as := As, keep_last := KeepLast}) -> 39 | LastP = flowdata:set_field(flowdata:new(), Field, nil), 40 | S = #state{node_id = NodeId, field = Field, as = As, keep_last = KeepLast, last = LastP}, 41 | {ok, all, S}. 42 | 43 | process(_Inport, #data_batch{points = Points, start = BatchStart}, State = #state{field = Field, as = As, last = Last}) -> 44 | {FirstPoint, LastPoint, Count} = do_process(Field, Last, Points), 45 | NewLast = 46 | case State#state.keep_last of 47 | true -> LastPoint; 48 | false -> Last 49 | end, 50 | Ts = case BatchStart of undefined -> FirstPoint#data_point.ts; _-> BatchStart end, 51 | OutPoint0 = #data_point{ts = Ts}, 52 | OutPoint = flowdata:set_field(OutPoint0, As, Count), 53 | {emit, OutPoint, State#state{last = NewLast}}. 54 | 55 | do_process(Field, LastP = #data_point{}, Points) -> 56 | F = fun 57 | (P, {FirstPoint, LastPoint, Count}) -> 58 | NewFirst = 59 | case FirstPoint of undefined -> P; _ -> FirstPoint end, 60 | NewCount = 61 | case flowdata:field(P, Field) =:= flowdata:field(LastPoint, Field) of 62 | true -> Count; 63 | _ -> Count+1 64 | end, 65 | {NewFirst, P, NewCount} 66 | end, 67 | lists:foldl(F, {undefined, LastP, 0}, Points). 68 | %%%%%%%%%%%% 69 | -ifdef(TEST). 70 | 71 | -endif. -------------------------------------------------------------------------------- /apps/faxe/src/components/esp_eval.erl: -------------------------------------------------------------------------------- 1 | %% Date: 05.01.17 - 14:11 2 | %% Ⓒ 2017 heyoka 3 | %% @doc 4 | %% Evaluate a series of lambda expressions 5 | %% @end 6 | -module(esp_eval). 7 | -author("Alexander Minichmair"). 8 | 9 | %% API 10 | -behavior(df_component). 11 | 12 | -include("faxe.hrl"). 13 | %% API 14 | -export([init/3, process/3, options/0, check_options/0, eval/4]). 15 | 16 | 17 | -record(state, { 18 | node_id, 19 | lambdas, 20 | as, 21 | tags 22 | }). 23 | 24 | options() -> [{lambdas, lambda_list}, {as, string_list}, {tags, string_list, []}]. 25 | 26 | check_options() -> 27 | [ 28 | {same_length, [lambdas, as]} 29 | ]. 30 | 31 | init(NodeId, _Ins, #{lambdas := LambdaFuns, as := As, tags := Tags}) -> 32 | {ok, all, #state{lambdas = LambdaFuns, node_id = NodeId, as = As, tags = Tags}}. 33 | 34 | process(_In, #data_batch{points = Points} = Batch, 35 | State = #state{lambdas = LambdaFuns, as = As, tags = Tags}) -> 36 | NewPoints = [eval(Point, LambdaFuns, As, Tags) || Point <- Points], 37 | {emit, Batch#data_batch{points = NewPoints}, State}; 38 | process(_Inport, #data_point{} = Point, State = #state{lambdas = LambdaFun, as = As, tags = Tags}) -> 39 | %% {T, NewValue} = timer:tc(?MODULE, eval, [Point, LambdaFun, As, Tags]), 40 | %% lager:info("eval took: ~p",[T]), 41 | NewValue = eval(Point, LambdaFun, As, Tags), 42 | {emit, NewValue, State}. 43 | 44 | eval(#data_point{} = Point, Lambdas, As, _Tags) -> 45 | {PointResult, _Index} = 46 | lists:foldl( 47 | fun(LFun, {P, Index}) -> 48 | As0 = lists:nth(Index, As), 49 | P0 = faxe_lambda:execute(P, LFun, As0), 50 | %% P1 = 51 | %% case (catch lists:nth(Index, Tags)) of 52 | %% T0 when is_binary(T0) -> 53 | %% Po = flowdata:set_tag(P0, T0, flowdata:field(P0, As0)), 54 | %% flowdata:delete_field(Po, As0); 55 | %% _ -> P0 56 | %% end, 57 | {P0, Index + 1} 58 | 59 | end, 60 | {Point, 1}, 61 | Lambdas 62 | ), 63 | PointResult. -------------------------------------------------------------------------------- /apps/faxe/src/components/esp_extract_path.erl: -------------------------------------------------------------------------------- 1 | %% Date: 30.12.16 - 23:01 2 | %% Ⓒ 2019 heyoka 3 | %% 4 | %% searches the given paths in a data-point and emits a new point with the values found 5 | %% if a path is not found the 'default' value is used 6 | %% @todo implement for data_batch records 7 | %% 8 | -module(esp_extract_path). 9 | -author("Alexander Minichmair"). 10 | 11 | -include("faxe.hrl"). 12 | 13 | -behavior(df_component). 14 | %% API 15 | -export([init/3, process/3, shutdown/1, options/0, extract/4, check_options/0]). 16 | 17 | -record(state, { 18 | nodeid, 19 | paths = [], 20 | as = [], 21 | default 22 | }). 23 | 24 | options() -> 25 | [{path, binary_list}, {as, binary_list}, {default, any, 0}]. 26 | 27 | check_options() -> 28 | [{same_length, [path, as]}]. 29 | 30 | init(NodeId, _Inputs, #{path := Paths, as := As, default := Def} = _Args) -> 31 | {ok, all, #state{paths = Paths, as = As, default = Def, nodeid = NodeId}}. 32 | 33 | process(_Inport, P = #data_point{}, State=#state{paths = Paths, as = As, default = Def}) -> 34 | NewVal = extract(P, Paths, As, Def), 35 | {emit, NewVal, State}. 36 | 37 | shutdown(_State) -> 38 | ok. 39 | 40 | extract(Point = #data_point{ts = Ts}, Paths, As, Default) -> 41 | {_Ix, NewPoint} = 42 | lists:foldl( 43 | fun(Pa, {Idx, AccPoint}) -> 44 | NewV = 45 | case flowdata:field(Point, Pa) of 46 | undefined -> Default; 47 | Val -> Val 48 | end, 49 | {Idx+1, flowdata:set_field(AccPoint, lists:nth(Idx, As), NewV)} 50 | end, 51 | {1, #data_point{ts = Ts}}, 52 | Paths), 53 | 54 | NewPoint. -------------------------------------------------------------------------------- /apps/faxe/src/components/esp_fields_to_array.erl: -------------------------------------------------------------------------------- 1 | %% Date: 2023-04-06 20:10 2 | %% Ⓒ 2023 heyoka 3 | %% 4 | %% @doc 5 | %% for every given field in 'fields', holds the last value seen for this field 6 | %% output a data-point with all the fields currently in the buffer on every incoming data-item 7 | %% it is possible to set a default value for fields that have not be seen so far 8 | %% timestamp, delivery_tag and all tags of the current incoming data-point will be used in the output point 9 | %% @end 10 | -module(esp_fields_to_array). 11 | -author("Alexander Minichmair"). 12 | 13 | 14 | %% API 15 | -behavior(df_component). 16 | 17 | -include("faxe.hrl"). 18 | %% API 19 | -export([init/3, process/3, options/0 20 | , check_options/0 21 | , wants/0, emits/0]). 22 | 23 | -record(state, { 24 | node_id, 25 | current = #data_point{}, 26 | fields, 27 | key_name, 28 | value_name, 29 | as, 30 | keep 31 | }). 32 | 33 | options() -> [ 34 | {fields, string_list}, 35 | {key_name, string}, 36 | {value_name, string}, 37 | {ts_as, string, undefined}, 38 | {keep, string_list, undefined}, 39 | {as, string} 40 | ]. 41 | 42 | check_options() -> 43 | [ 44 | %% {same_length, [fields, ke]} 45 | ]. 46 | 47 | wants() -> point. 48 | emits() -> point. 49 | 50 | init(NodeId, _Ins, #{fields := Fields, key_name := KeyName, value_name := ValueName, as := As, keep := Keepers}) -> 51 | {ok, all, 52 | #state{node_id = NodeId, fields = Fields, key_name = KeyName, value_name = ValueName, as = As, keep = Keepers}}. 53 | 54 | process(_Port, #data_point{} = Point, 55 | State = #state{fields = Fields, key_name = KeyName, value_name = ValName, as = As, keep = Keepers}) -> 56 | %% get all fields 57 | AllFields0 = lists:zip(Fields, flowdata:fields(Point, Fields)), 58 | KeepFields = maps:from_list(lists:zip(Keepers, flowdata:fields(Point, Keepers))), 59 | F = fun({FName, Val}) -> 60 | M0 = #{KeyName => binary:replace(FName, <<"*">>, <<".">>), ValName => Val}, 61 | %% flowdata:merge(), 62 | maps:merge(M0, KeepFields) 63 | end, 64 | AllFields = lists:map(F, AllFields0), 65 | Result = flowdata:set_field(Point, As, AllFields), 66 | %% lager:notice("All Fields ~p",[AllFields]), 67 | %% {ok, State}. 68 | {emit, Result, State#state{}}. 69 | 70 | 71 | -------------------------------------------------------------------------------- /apps/faxe/src/components/esp_group_union.erl: -------------------------------------------------------------------------------- 1 | %% Date: 09.04.2021 - 21:41 2 | %% Ⓒ 2020 heyoka 3 | %% 4 | %% Union of multiple grouped streams. 5 | %% 6 | -module(esp_group_union). 7 | -author("Alexander Minichmair"). 8 | 9 | -behavior(df_component). 10 | %% API 11 | -export([init/3, process/3, options/0]). 12 | 13 | 14 | options() -> []. 15 | 16 | init(NodeId, _Inputs, #{}) -> 17 | {ok, all, NodeId}. 18 | 19 | process(_In, Item, NodeId) -> 20 | {emit, Item, NodeId}. -------------------------------------------------------------------------------- /apps/faxe/src/components/esp_if.erl: -------------------------------------------------------------------------------- 1 | %% Date: 27.03.23 - 14:11 2 | %% Ⓒ 2023 heyoka 3 | %% @doc 4 | %% simple if then else node 5 | %% @end 6 | -module(esp_if). 7 | -author("Alexander Minichmair"). 8 | 9 | %% API 10 | -behavior(df_component). 11 | 12 | -include("faxe.hrl"). 13 | %% API 14 | -export([init/3, process/3, options/0, check_options/0]). 15 | 16 | 17 | -record(state, { 18 | node_id, 19 | if_lambda, 20 | true_expression, 21 | false_expression, 22 | as 23 | }). 24 | 25 | options() -> [ 26 | {test, lambda}, 27 | {then, any}, 28 | {'else', any, undefined}, 29 | {as, string}]. 30 | 31 | check_options() -> 32 | [ 33 | %% {same_length, [lambdas, as]} 34 | ]. 35 | 36 | init(NodeId, _Ins, #{test := IfLambda, as := As, then := True, 'else' := False}) -> 37 | {ok, all, #state{if_lambda = IfLambda, node_id = NodeId, as = As, true_expression = True, false_expression = False}}. 38 | 39 | process(_In, #data_batch{points = Points} = Batch, State = #state{}) -> 40 | NewPoints = [process_point(Point, State) || Point <- Points], 41 | {emit, Batch#data_batch{points = NewPoints}, State}; 42 | process(_Inport, #data_point{} = Point, State = #state{}) -> 43 | NewPoint = process_point(Point, State), 44 | {emit, NewPoint, State}. 45 | 46 | process_point(Point, #state{if_lambda = FunIf, true_expression = True, false_expression = False, as = As}) -> 47 | case faxe_lambda:execute_bool(Point, FunIf) of 48 | true -> eval(True, Point, As); 49 | false -> 50 | case False of 51 | undefined -> Point; 52 | _ -> eval(False, Point, As) 53 | end 54 | end. 55 | 56 | eval(Fun, Point, As) when is_record(Fun, faxe_lambda) -> 57 | faxe_lambda:execute(Point, Fun, As); 58 | eval(Val, Point, As) -> 59 | flowdata:set_field(Point, As, Val). 60 | 61 | 62 | -------------------------------------------------------------------------------- /apps/faxe/src/components/esp_jsonsize.erl: -------------------------------------------------------------------------------- 1 | %% Date: 30.12.16 - 23:01 2 | %% Ⓒ 2019 heyoka 3 | %% 4 | %% the jsonsize node converts incoming values to json and logs the resulting byte-size 5 | %% emits data without touching it in any way 6 | %% 7 | -module(esp_jsonsize). 8 | -author("Alexander Minichmair"). 9 | 10 | -include("faxe.hrl"). 11 | 12 | -behavior(df_component). 13 | %% API 14 | -export([init/3, process/3, shutdown/1, options/0]). 15 | 16 | -record(state, { 17 | inject, 18 | fieldname 19 | }). 20 | 21 | 22 | options() -> 23 | [{inject, is_set}, {field, string, <<"jsonsize">>}]. 24 | 25 | init(_NodeId, _Inputs, #{inject := Inject, field := FName}) -> 26 | {ok, all, #state{inject = Inject, fieldname = FName}}. 27 | 28 | process(_Inport, Value, State = #state{inject = Inject, fieldname = FName}) -> 29 | Json = flowdata:to_json(Value), 30 | NewValue = 31 | case Inject of 32 | true -> flowdata:set_field(Value, FName, byte_size(Json)); 33 | false -> lager:notice("[~p] json binary message size: ~p",[?MODULE, byte_size(Json)]), Value 34 | end, 35 | {emit, NewValue, State}. 36 | 37 | shutdown(_State) -> 38 | ok. -------------------------------------------------------------------------------- /apps/faxe/src/components/esp_log.erl: -------------------------------------------------------------------------------- 1 | %% Date: 30.12.16 - 23:01 2 | %% Log everything that comes in to a file, line by line 3 | %% Ⓒ 2019 heyoka 4 | %% 5 | -module(esp_log). 6 | -author("Alexander Minichmair"). 7 | 8 | -include("faxe.hrl"). 9 | 10 | -behavior(df_component). 11 | %% API 12 | -export([init/3, process/3, options/0, check_options/0]). 13 | 14 | -record(state, { 15 | file :: list(), 16 | field :: undefined | list(), 17 | format :: binary() %% <<"json">> | <<"raw">> 18 | }). 19 | 20 | options() -> 21 | [ 22 | {file, string}, 23 | {format, string, <<"json">>}, 24 | {field, string, undefined} 25 | ]. 26 | 27 | check_options() -> 28 | [{not_empty, [file]}]. 29 | 30 | init(_NodeId, _Inputs, #{file := File, format := Format0, field := Field}) -> 31 | ok = filelib:ensure_dir(File), 32 | {ok, F} = file:open(File, [append, delayed_write]), 33 | Format = case Field of undefined -> Format0; _ -> <<"raw">> end, 34 | {ok, all, #state{file = F, field = Field, format = Format}}. 35 | 36 | process(_In, P, State = #state{file = F, field = Field}) -> 37 | log(P, F, Field), 38 | {emit, P, State}. 39 | %%process(_In, B = #data_batch{points = Ps}, State = #state{file = F, field = Field}) -> 40 | %% [log(P, F, Field) || P <- Ps], 41 | %% {emit, B, State}. 42 | 43 | %% whole datapoint will be written as json 44 | log(Point, File, undefined) -> 45 | Data = flowdata:to_json(Point), 46 | do_log(Data, File); 47 | %% single field value will be written raw 48 | log(Point, File, Field) -> 49 | Data0 = flowdata:field(Point, Field), 50 | Data = 51 | case Data0 of 52 | _D when is_map(Data0) -> jiffy:encode(Data0); 53 | _ -> Data0 54 | end, 55 | do_log(Data, File). 56 | 57 | do_log(undefined, _File) -> 58 | ok; 59 | do_log(Data, File) when is_binary(Data) -> 60 | do_log(binary_to_list(Data), File); 61 | do_log(Data, File) when is_integer(Data) -> 62 | do_log(integer_to_list(Data), File); 63 | do_log(Data, File) when is_float(Data) -> 64 | do_log(float_to_list(Data), File); 65 | do_log(Data, File) when is_list(Data) -> 66 | write(Data, File); 67 | do_log(_Data, _File) -> 68 | lager:notice("data is : ~p",[_Data]), 69 | ok. 70 | 71 | write(Data, File) -> 72 | io:format(File, "~s~n", [Data]). -------------------------------------------------------------------------------- /apps/faxe/src/components/esp_metrics.erl: -------------------------------------------------------------------------------- 1 | %% Date: 16.02.2020 2 | %% Ⓒ 2020 heyoka 3 | %% @doc 4 | %% collect internal flow metrics data_points 5 | %% 6 | %% 7 | %% 8 | -module(esp_metrics). 9 | -author("Alexander Minichmair"). 10 | 11 | -behaviour(df_component). 12 | 13 | -include("faxe.hrl"). 14 | 15 | %% API 16 | -export([init/3, process/3, handle_info/2, options/0]). 17 | 18 | -record(state, { 19 | flow_id, 20 | node_id, 21 | metrics 22 | }). 23 | 24 | options() -> 25 | [ 26 | {flow, string}, 27 | {node, string, undefined}, 28 | {metrics, string_list, undefined} 29 | ]. 30 | 31 | init({FId, _NId}, _Inputs, #{flow := FlowId, metrics := Metrics, node := NodeId}) -> 32 | case FlowId == FId of 33 | true -> lager:error("can not use 'metrics' node within source flow."); 34 | false -> 35 | gen_event:add_sup_handler(faxe_metrics, metrics_handler_dataflow, 36 | #{parent => self(), flow_id => FlowId, node_id => NodeId, metrics => Metrics}) 37 | end, 38 | State = #state{flow_id = FlowId, metrics = Metrics, node_id = NodeId}, 39 | {ok, all, State}. 40 | 41 | process(_, _Item, State=#state{} ) -> 42 | {ok, State}. 43 | 44 | handle_info({_, Point}, State = #state{}) -> 45 | %% lager:notice("got datapoint: ~p",[Point]), 46 | {emit, {1, Point}, State}; 47 | handle_info(_Request, State) -> 48 | {ok, State}. 49 | 50 | 51 | %%%=================================================================== 52 | %%% Internal functions 53 | %%%=================================================================== 54 | 55 | 56 | -------------------------------------------------------------------------------- /apps/faxe/src/components/esp_path_split.erl: -------------------------------------------------------------------------------- 1 | %% Date: 14.03.24 - 08:11 2 | %% Ⓒ 2024 heyoka 3 | %% @doc 4 | %% Split a data-point into several data-points by the root path element 5 | %% @end 6 | -module(esp_path_split). 7 | -author("Alexander Minichmair"). 8 | 9 | %% API 10 | -behavior(df_component). 11 | 12 | -include("faxe.hrl"). 13 | %% API 14 | -export([init/3, process/3, options/0]). 15 | 16 | -record(state, { 17 | path_level, 18 | include_level, 19 | include_as 20 | }). 21 | 22 | options() -> [ 23 | {include_name, boolean, true}, 24 | {include_as, string, <<"name">>} 25 | ]. 26 | 27 | init(_NodeId, _Ins, #{include_name := Include, include_as := As}) -> 28 | {ok, all, 29 | #state{include_level = Include, include_as = As}}. 30 | 31 | process(_In, P = #data_point{}, State = #state{}) -> 32 | do_process(P, State), 33 | {ok, State}; 34 | process(_In, #data_batch{points = Points}, State = #state{}) -> 35 | Fun = fun(Point) -> do_process(Point, State) end, 36 | lists:foreach(Fun, Points), 37 | {ok, State}. 38 | 39 | do_process(P = #data_point{fields = Fields}, #state{include_level = Include, include_as = As}) -> 40 | maps:foreach( 41 | fun 42 | (Key, Val) when is_map(Val) -> 43 | NewPoint0 = P#data_point{fields = Val}, 44 | NewPoint = 45 | case Include of 46 | true -> flowdata:set_field(NewPoint0, As, Key); 47 | false -> NewPoint0 48 | end, 49 | dataflow:emit(NewPoint); 50 | (_, _) -> 51 | ok 52 | end, 53 | Fields). 54 | 55 | -ifdef(TEST). 56 | -include_lib("eunit/include/eunit.hrl"). 57 | 58 | -endif. -------------------------------------------------------------------------------- /apps/faxe/src/components/esp_sample.erl: -------------------------------------------------------------------------------- 1 | %% Date: 05.01.17 - 14:11 2 | %% Ⓒ 2019 heyoka 3 | %% 4 | %% @doc 5 | %% Sample a stream of data based on count or duration. 6 | %% if rate is an integer, every n'th message will be passed on to the next node(s) 7 | %% if rate is a duration literal, the first message that comes in after the timeout will be passed on 8 | %% @end 9 | -module(esp_sample). 10 | -author("Alexander Minichmair"). 11 | 12 | %% API 13 | -behavior(df_component). 14 | 15 | -include("faxe.hrl"). 16 | %% API 17 | -export([init/3, process/3, options/0, handle_info/2, check_options/0]). 18 | 19 | -record(state, { 20 | node_id, 21 | point_count = 1, 22 | rate_count, 23 | rate_interval, 24 | gate_open = false 25 | }). 26 | 27 | options() -> [{rate, any}]. 28 | 29 | check_options() -> 30 | [{func, rate, fun check_rate/1, <<", must be of type 'integer' or 'duration'">>}]. 31 | 32 | check_rate(Param) when is_integer(Param) -> true; 33 | check_rate(Param) when is_binary(Param) -> faxe_time:is_duration_string(Param); 34 | check_rate(_) -> false. 35 | 36 | init(NodeId, _Ins, #{rate := Rate}) -> 37 | State = #state{node_id = NodeId}, 38 | NewState = 39 | case Rate of 40 | _Int when is_integer(Rate) -> 41 | State#state{rate_count = Rate}; 42 | _Dur when is_binary(Rate) -> 43 | Interval = faxe_time:duration_to_ms(Rate), 44 | State#state{rate_interval = Interval} 45 | end, 46 | start_timer(NewState), 47 | {ok, all, NewState}. 48 | 49 | process(_In, Item, State = #state{rate_interval = undefined, rate_count = Count, point_count = Count}) -> 50 | {emit, Item, State#state{point_count = 1}}; 51 | process(_In, _Item, State = #state{rate_interval = undefined, point_count = Count}) -> 52 | {ok, State#state{point_count = Count+1}}; 53 | 54 | process(_In, Item, State = #state{gate_open = true}) -> 55 | start_timer(State), 56 | {emit, Item, State#state{gate_open = false}}; 57 | process(_In, _Item, State) -> 58 | {ok, State}. 59 | 60 | handle_info(open_gate, State) -> 61 | {ok, State#state{gate_open = true}}; 62 | handle_info(_R, State) -> 63 | {ok, State}. 64 | 65 | start_timer(#state{rate_interval = undefined}) -> 66 | ok; 67 | start_timer(#state{rate_interval = Interval}) -> 68 | erlang:send_after(Interval, self(), open_gate). -------------------------------------------------------------------------------- /apps/faxe/src/components/esp_shift.erl: -------------------------------------------------------------------------------- 1 | %% Date: 05.01.17 - 14:11 2 | %% Ⓒ 2017 heyoka 3 | %% 4 | %% @doc 5 | %% Shift Timestamp of value(s) by unit and interval ie duration 6 | %% @end 7 | -module(esp_shift). 8 | -author("Alexander Minichmair"). 9 | 10 | %% API 11 | -behavior(df_component). 12 | 13 | -include("faxe.hrl"). 14 | %% API 15 | -export([init/3, process/3, options/0]). 16 | 17 | -record(state, { 18 | node_id, 19 | offset 20 | }). 21 | 22 | options() -> [{offset, duration, <<"-30s">>}]. 23 | 24 | init(NodeId, _Ins, #{offset := Offset}) -> 25 | {ok, all, #state{node_id = NodeId, offset = Offset}}. 26 | 27 | process(_In, #data_batch{points = Points} = Batch, State = #state{offset = Offset}) -> 28 | NewPoints = [execute(Point,Offset) || Point <- Points], 29 | NewBatch = flowdata:set_bounds(Batch#data_batch{points = NewPoints}), 30 | {emit, NewBatch, State} 31 | ; 32 | process(_Inport, #data_point{} = Point, State = #state{offset = Offset}) -> 33 | NewValue = execute(Point,Offset), 34 | {emit, NewValue, State}. 35 | 36 | 37 | -spec execute(#data_point{}, binary()) -> #data_point{}. 38 | execute(#data_point{ts = Ts} = Point, Offset) -> 39 | NewTs = faxe_time:add(Ts, Offset), 40 | flowdata:set_ts(Point, NewTs). -------------------------------------------------------------------------------- /apps/faxe/src/components/esp_state_count.erl: -------------------------------------------------------------------------------- 1 | %% Date: 15.07.2019 - 09:55 2 | %% Ⓒ 2019 heyoka 3 | %% @doc 4 | %% Computes the number of consecutive points in a given state. The state is defined via a lambda expression. 5 | %% For each consecutive point for which the expression evaluates as true, 6 | %% the state count will be incremented. 7 | %% When a point evaluates to false, the state count is reset. 8 | %% 9 | %% The state count will be added as an additional int64 field to each point. 10 | %% If the expression evaluates to false, the value will be -1. 11 | %% If the expression generates an error during evaluation, the point is discarded 12 | %% and does not affect the state count. 13 | %% 14 | -module(esp_state_count). 15 | -author("Alexander Minichmair"). 16 | 17 | %% API 18 | -behavior(df_component). 19 | 20 | -include("faxe.hrl"). 21 | %% API 22 | -export([init/3, process/3, options/0, wants/0, emits/0]). 23 | 24 | -record(state, { 25 | node_id, 26 | lambda, 27 | as, 28 | last_count = 0, 29 | state_change 30 | 31 | }). 32 | 33 | options() -> [ 34 | {lambda, lambda}, 35 | {as, binary, <<"state_count">>} 36 | ]. 37 | 38 | wants() -> point. 39 | emits() -> point. 40 | 41 | init(_NodeId, _Ins, #{lambda := Lambda, as := As}) -> 42 | StateTrack = state_change:new(Lambda), 43 | {ok, all, #state{lambda = Lambda, as = As, state_change = StateTrack}}. 44 | 45 | process(_In, #data_batch{points = _Points} = _Batch, _State = #state{lambda = _Lambda}) -> 46 | {error, not_implemented}; 47 | process(_Inport, #data_point{} = Point, State = #state{state_change = StateTrack, as = As}) -> 48 | case state_change:process(StateTrack, Point) of 49 | {ok, NewStateTrack} -> 50 | Count = state_change:get_count(NewStateTrack), 51 | NewPoint = flowdata:set_field(Point, As, Count), 52 | {emit, NewPoint, State#state{state_change = NewStateTrack}}; 53 | {error, _Error} -> 54 | {ok, State} 55 | end. 56 | 57 | -------------------------------------------------------------------------------- /apps/faxe/src/components/esp_state_duration.erl: -------------------------------------------------------------------------------- 1 | %% Date: 15.07.2019 - 09:55 2 | %% Ⓒ 2019 heyoka 3 | %% @doc 4 | %% Computes the duration of a given state. The state is defined via a lambda expression. 5 | %% Timestamps for the duration are taken from the incoming data-point. 6 | %% 7 | %% For each consecutive point for which the lambda expression evaluates as true, 8 | %% the state duration will be incremented by the duration between points. 9 | %% When a point evaluates as false, the state duration is reset. 10 | %% 11 | %% The state duration will be added as an additional field to each point. 12 | %% If the expression evaluates to false, the value will be -1. 13 | %% When the lambda expression generates an error during evaluation, the point is discarded 14 | %% and does not affect the state duration. 15 | %% 16 | -module(esp_state_duration). 17 | -author("Alexander Minichmair"). 18 | 19 | %% API 20 | -behavior(df_component). 21 | 22 | -include("faxe.hrl"). 23 | 24 | -define(TOTAL_NAME, <<"_total">>). 25 | 26 | %% API 27 | -export([init/3, process/3, options/0, wants/0, emits/0]). 28 | 29 | -record(state, { 30 | node_id, 31 | lambda, 32 | as, 33 | unit, 34 | emit_total = false, 35 | state_change 36 | 37 | }). 38 | 39 | options() -> [ 40 | {lambda, lambda}, 41 | {as, binary, <<"state_duration">>}, 42 | {unit, duration, <<"1s">>}, 43 | {emit_total, is_set, false} 44 | ]. 45 | 46 | wants() -> point. 47 | emits() -> point. 48 | 49 | init(_NodeId, _Ins, #{lambda := Lambda, as := As, unit := Unit, emit_total := EmitTotal}) -> 50 | StateTrack = state_change:new(Lambda), 51 | {ok, all, #state{lambda = Lambda, as = As, unit = Unit, emit_total = EmitTotal, state_change = StateTrack}}. 52 | 53 | process(_In, #data_batch{points = _Points} = _Batch, _State = #state{lambda = _Lambda}) -> 54 | {error, not_implemented}; 55 | process(_Inport, #data_point{} = Point, State = #state{ as = As, state_change = StateTrack}) -> 56 | 57 | case state_change:process(StateTrack, Point) of 58 | {ok, NewStateTrack} -> 59 | Duration = state_change:get_duration(NewStateTrack), 60 | NewPoint = flowdata:set_field(Point, As, Duration), 61 | {emit, NewPoint, State#state{state_change = NewStateTrack}}; 62 | {error, Error} -> 63 | lager:error("Error evaluating lambda: ~p",[Error]), 64 | {ok, State} 65 | end. 66 | 67 | -------------------------------------------------------------------------------- /apps/faxe/src/components/esp_statistics.erl: -------------------------------------------------------------------------------- 1 | %% Date: 05.01.17 - 14:11 2 | %% Ⓒ 2017 heyoka 3 | -module(esp_statistics). 4 | -author("Alexander Minichmair"). 5 | 6 | -include("faxe.hrl"). 7 | %% API 8 | 9 | -behavior(df_component). 10 | %% API 11 | -export([init/3, process/3, options/0]). 12 | 13 | -callback execute(tuple(), term()) -> tuple(). 14 | 15 | -record(state, { 16 | node_id, 17 | field, 18 | modules :: list(), 19 | module_state, 20 | as, 21 | mods_as 22 | }). 23 | 24 | options() -> [ 25 | {field, binary}, 26 | {fields, string_list}, 27 | {as, string_list, undefined}, 28 | {modules, string_list} 29 | ]. 30 | 31 | 32 | init(NodeId, _Ins, #{field := Field, as := As, modules := Funcs} = Args) -> 33 | Modules = [binary_to_existing_atom(<<"esp_", M/binary>>, latin1) || M <- Funcs], 34 | As1 = case As of undefined -> Funcs; _ -> As end, 35 | State = #state{field = Field, node_id = NodeId, as = As1, modules = Modules}, 36 | {ok, all, State#state{module_state = Args, mods_as = lists:zip(Modules, As1)}}. 37 | 38 | process(_Inport, #data_batch{} = Batch, State = #state{mods_as = Mods_As, module_state = MState, field = F}) -> 39 | 40 | Ps = prepare(Batch, F), 41 | Points = [call(Ps, Mod, MState, As) || {Mod, As} <- Mods_As], 42 | MPoint = flowdata:merge_points(Points), 43 | {emit, MPoint, State} 44 | . 45 | 46 | %%%%%%%%%%%%%%%%%%%% internal %%%%%%%%%%%% 47 | 48 | %% prepare a data_batch for aggregate execution 49 | prepare(B=#data_batch{}, Field) -> 50 | {flowdata:ts(B), flowdata:field(B, Field)}. 51 | 52 | -spec call(tuple(), atom(), term(), binary()) -> #data_batch{} | #data_point{}. 53 | call({Tss,_Vals}=Data, Module, MState, As) when is_list(Tss) -> 54 | c_agg:call(Data, Module, MState, As). 55 | -------------------------------------------------------------------------------- /apps/faxe/src/components/esp_time_elapsed.erl: -------------------------------------------------------------------------------- 1 | %% Ⓒ 2019 heyoka 2 | %% 3 | %% @doc 4 | %% The elapsed node adds a field to the current data-item 5 | %% containing the difference between the times of the consecutive items 6 | %% To make it clear, this node will measure the arrival time difference of consecutive data-items. 7 | %% 8 | %% With the 'as' option, the name of the output field can be changed. 9 | %% 10 | %% 'as' defaults to "elapsed" 11 | %% 12 | %% output values are in milliseconds 13 | %% @end 14 | -module(esp_time_elapsed). 15 | -author("Alexander Minichmair"). 16 | 17 | %% API 18 | -behavior(df_component). 19 | 20 | -include("faxe.hrl"). 21 | %% API 22 | -export([init/3, process/3, options/0]). 23 | 24 | -record(state, { 25 | node_id :: {binary(), binary()}, 26 | as :: binary(), 27 | default :: term(), 28 | last_time :: undefined|faxe_time:timestamp() 29 | }). 30 | 31 | options() -> [ 32 | {as, binary, <<"elapsed">>}, 33 | {default, any, 0} 34 | ]. 35 | 36 | init(NodeId, _Ins, #{as := As, default := Default}) -> 37 | {ok, all, #state{node_id = NodeId, as = As, default = Default}}. 38 | 39 | process(_In, Item, State = #state{last_time = undefined, as = As, default = Def}) -> 40 | {emit, flowdata:set_field(Item, As, Def), State#state{last_time = faxe_time:now()}}; 41 | process(_In, Item, State = #state{as = As, last_time = Last}) -> 42 | Now = faxe_time:now(), 43 | NewItem = flowdata:set_field(Item, As, Now - Last), 44 | {emit, NewItem, State#state{last_time = Now}}. 45 | 46 | %%%%%%%%%%%% 47 | -ifdef(TEST). 48 | -endif. -------------------------------------------------------------------------------- /apps/faxe/src/components/esp_udp_send.erl: -------------------------------------------------------------------------------- 1 | %%%------------------------------------------------------------------- 2 | %%% @author heyoka 3 | %%% @copyright (C) 2022 4 | %%% @doc 5 | %%% This node sends incoming data-items as json over udp 6 | %%% @end 7 | %%% Created : Sept 21. 2022 16:56 8 | %%%------------------------------------------------------------------- 9 | -module(esp_udp_send). 10 | -author("heyoka"). 11 | 12 | %% API 13 | -behavior(df_component). 14 | 15 | -include("faxe.hrl"). 16 | %% API 17 | -export([init/3, process/3, options/0, shutdown/1, metrics/0, handle_info/2]). 18 | 19 | -define(RECONNECT_TIME, 2000). 20 | 21 | -record(state, { 22 | peer, 23 | port, 24 | socket, 25 | as, 26 | fn_id 27 | }). 28 | 29 | -define(SOCKOPTS, [binary, {broadcast, false}]). 30 | 31 | options() -> [ 32 | {host, string}, 33 | {port, integer} 34 | ]. 35 | 36 | metrics() -> 37 | [ 38 | {?METRIC_BYTES_READ, meter, []} 39 | ]. 40 | 41 | init(NodeId, _Ins, #{port := Port, host := Host}) -> 42 | State = #state{port = Port, fn_id = NodeId, peer = binary_to_list(Host)}, 43 | reconnect(0), 44 | {ok, all, State}. 45 | 46 | process(_In, _DataItem, State = #state{socket = undefined}) -> 47 | lager:warning("got data-item but udp socket not setup yet"), 48 | {ok, State}; 49 | process(_In, DataItem, State = #state{socket = Socket, peer = Peer, port = Port}) -> 50 | Data = flowdata:to_json(DataItem), 51 | case catch gen_udp:send(Socket, Peer, Port, Data) of 52 | ok -> ok; 53 | Err -> lager:warning("error when sending udp packet: ~p",[Err, {Socket, Peer, Port, Data}]) 54 | end, 55 | {ok, State}. 56 | 57 | handle_info(connect, State) -> 58 | connect(State); 59 | handle_info(_, State) -> 60 | {ok, State}. 61 | 62 | shutdown(#state{socket = Sock}) -> 63 | catch (gen_udp:close(Sock)). 64 | 65 | connect(S = #state{port = _Port}) -> 66 | Socket = 67 | case gen_udp:open(0, ?SOCKOPTS) of 68 | {ok, Sock} -> Sock; 69 | {error, _What} -> 70 | reconnect(?RECONNECT_TIME), 71 | undefined 72 | end, 73 | {ok, S#state{socket = Socket}}. 74 | 75 | reconnect(T) -> 76 | erlang:send_after(T, self(), connect). 77 | 78 | 79 | 80 | 81 | 82 | -------------------------------------------------------------------------------- /apps/faxe/src/components/esp_unbatch.erl: -------------------------------------------------------------------------------- 1 | %% Date: 16.02.2020 2 | %% Ⓒ 2020 heyoka 3 | %% @doc 4 | %% This node is used to batch a number(size) of points. As soon as the node has collected size points it will emit them 5 | %% in a data_batch. 6 | %% A timeout can be set, after which all points currently in the batch 7 | %% will be emitted, regardless of the number of collected points. 8 | %% The timeout is started on the first datapoint coming in to an empty batch. 9 | %% 10 | %% 11 | %% 12 | -module(esp_unbatch). 13 | -author("Alexander Minichmair"). 14 | 15 | -behaviour(df_component). 16 | 17 | -include("faxe.hrl"). 18 | 19 | %% API 20 | -export([init/3, process/3, handle_info/2, options/0, wants/0, emits/0, shutdown/1]). 21 | 22 | -record(state, { 23 | 24 | }). 25 | 26 | options() -> 27 | []. 28 | 29 | wants() -> both. 30 | emits() -> point. 31 | 32 | init(_NodeId, _Inputs, #{}) -> 33 | {ok, false, #state{}}. 34 | 35 | 36 | process(_, #data_point{} = Point, State=#state{} ) -> 37 | {emit, Point, State}; 38 | process(_, #data_batch{points = [Point]}, State=#state{} ) -> 39 | {emit, Point, State}; 40 | process(_, #data_batch{points = Points}, State=#state{} ) -> 41 | [dataflow:emit(Point) || Point <- Points], 42 | {ok, State}. 43 | 44 | handle_info(_Request, State) -> 45 | {ok, State}. 46 | 47 | shutdown(#state{}) -> 48 | ok. 49 | 50 | %%%=================================================================== 51 | %%% Internal functions 52 | %%%=================================================================== 53 | -------------------------------------------------------------------------------- /apps/faxe/src/components/esp_union.erl: -------------------------------------------------------------------------------- 1 | %% Date: 15.02.2020 - 21:41 2 | %% Ⓒ 2020 heyoka 3 | %% 4 | %% Union of multiple streams. 5 | %% The union node takes the union of all of its parents as a simple pass through. 6 | %% Data points received from each parent are passed onto child nodes without modification 7 | %% 8 | -module(esp_union). 9 | -author("Alexander Minichmair"). 10 | 11 | -include("faxe.hrl"). 12 | 13 | -behavior(df_component). 14 | %% API 15 | -export([init/3, process/3, options/0]). 16 | 17 | 18 | options() -> []. 19 | 20 | init(NodeId, _Inputs, #{}) -> 21 | {ok, all, NodeId}. 22 | 23 | process(_In, Item, State) -> 24 | {emit, Item, State}. -------------------------------------------------------------------------------- /apps/faxe/src/components/esp_where.erl: -------------------------------------------------------------------------------- 1 | %% Date: 06.06 - 19:04 2 | %% Ⓒ 2019 heyoka 3 | %% @doc 4 | %% Filter points and batches with a lambda expression, which returns a boolean value; 5 | %% Data-items for which the lambda expression evaluates as false, will be discarded. 6 | %% For boolean value evaluation, @see dfs_std_lib:bool/1 7 | %% 8 | -module(esp_where). 9 | -author("Alexander Minichmair"). 10 | %% API 11 | -behavior(df_component). 12 | -include("faxe.hrl"). 13 | 14 | -export([init/3, process/3, options/0]). 15 | 16 | -record(state, { 17 | node_id :: binary(), 18 | lambda :: undefined|#faxe_lambda{}, 19 | emit_empty = false :: true|false 20 | }). 21 | 22 | options() -> [ 23 | {lambda, lambda}, 24 | {emit_empty, boolean, false} 25 | ]. 26 | 27 | init(_NodeId, _Ins, #{lambda := Lambda, emit_empty := Empty}) -> 28 | {ok, all, #state{lambda = Lambda, emit_empty = Empty}}. 29 | 30 | process(_In, #data_batch{points = Points} = Batch, State = #state{lambda = Lambda}) -> 31 | Res = lists:filter(fun(Point) -> exec(Point, Lambda) end, Points), 32 | maybe_emit(Batch, Res, State); 33 | process(_Inport, #data_point{} = Point, State = #state{lambda = Lambda}) -> 34 | case exec(Point, Lambda) of 35 | true -> {emit, Point, State}; 36 | false -> {ok, State} 37 | end. 38 | 39 | exec(Point, LFun) -> 40 | dfs_std_lib:bool( faxe_lambda:execute(Point, LFun) ). 41 | 42 | maybe_emit(_B, [], State = #state{emit_empty = false}) -> 43 | {ok, State}; 44 | maybe_emit(Batch, NewList, State = #state{}) -> 45 | {emit, Batch#data_batch{points = NewList}, State}. 46 | 47 | 48 | 49 | 50 | -------------------------------------------------------------------------------- /apps/faxe/src/components/esp_win_session.erl: -------------------------------------------------------------------------------- 1 | %% Date: 26.09.21 - 19:41 2 | %% Ⓒ 2021 heyoka 3 | %% @doc 4 | %% A session window aggregates records into a session, which represents a period of activity separated by a specified gap of inactivity. 5 | %% Any data_points with timestamps that occur within the inactivity gap of existing sessions will be added to this session. 6 | %% If a data_points's timestamp occurs outside of the session gap, a new session is created. 7 | %% A new session window starts if the last record that arrived is further back in time than the specified inactivity gap. 8 | %% 9 | %% window which refers it's timing to the timestamp contained in the incoming data-items just like the win_time node does 10 | %% but the window boundaries will be determined by an inactivity gap parameter (session_timeout) 11 | %% Session window durations vary ! 12 | %% @todo 13 | %% 14 | -module(esp_win_session). 15 | -author("Alexander Minichmair"). 16 | 17 | -behaviour(df_component). 18 | 19 | -include("faxe.hrl"). 20 | 21 | %% API 22 | -export([init/3, process/3, handle_info/2, options/0, wants/0, emits/0]). 23 | 24 | -record(state, { 25 | window :: queue:queue(), 26 | last_ts = undefined, 27 | inactivity_gap 28 | }). 29 | 30 | options() -> 31 | [ 32 | {session_timeout, duration, <<"30s">>} 33 | ]. 34 | 35 | wants() -> point. 36 | emits() -> batch. 37 | 38 | init(_NodeId, _Inputs, #{session_timeout := STimeout}) -> 39 | GapSize = faxe_time:duration_to_ms(STimeout), 40 | State = #state{window = queue:new(), inactivity_gap = GapSize}, 41 | {ok, all, State}. 42 | 43 | process(_Inport, Point = #data_point{ts = Ts}, State = #state{window = Win, last_ts = undefined}) -> 44 | {ok, State#state{last_ts = Ts, window = queue:in(Point, Win)}}; 45 | process(_Inport, Point = #data_point{ts = Ts}, State = #state{window = Win, last_ts = LastTs, inactivity_gap = Gap}) 46 | when (Ts-LastTs) =< Gap -> 47 | {ok, State#state{last_ts = Ts, window = queue:in(Point, Win)}}; 48 | process(_Inport, Point = #data_point{ts = Ts}, State = #state{window = Win}) -> 49 | Batch = #data_batch{points = queue:to_list(Win)}, 50 | WinNew = queue:new(), 51 | {emit, Batch, State#state{last_ts = Ts, window = queue:in(Point, WinNew)}}. 52 | 53 | 54 | handle_info(_Request, State) -> 55 | {ok, State}. 56 | 57 | %%%=================================================================== 58 | %%% Internal functions 59 | %%%=================================================================== 60 | 61 | -------------------------------------------------------------------------------- /apps/faxe/src/components/stats/c_agg.erl: -------------------------------------------------------------------------------- 1 | %% Date: 12.05.17 - 20:50 2 | %% Ⓒ 2017 heyoka 3 | -module(c_agg). 4 | -author("Alexander Minichmair"). 5 | 6 | -include("faxe.hrl"). 7 | 8 | %% API 9 | -export([call/4]). 10 | 11 | 12 | -spec call(tuple(), atom(), term(), binary()) -> #data_batch{} | #data_point{}. 13 | call({[], _}, _Module, _MState, _As) -> 14 | #data_batch{}; 15 | call({Tss,_Vals}=Data, Module, MState, As) when is_list(Tss) -> 16 | result(Module:execute(Data, MState), As, Tss). 17 | 18 | %%-spec result({Ts, V} | [{Ts, V}], binary(), list()) -> #data_point{} | #data_batch{}. 19 | result({first, Value}, As, Tss) -> 20 | Timestamp = lists:last(Tss), 21 | result({Timestamp, Value}, As, Tss) 22 | ; 23 | result({last, Value}, As, [Timestamp |_R]=Tss) -> 24 | result({Timestamp, Value}, As, Tss) 25 | ; 26 | result({Timestamp, Value}, As, _T) when is_integer(Timestamp) -> 27 | flowdata:set_field(#data_point{ts = Timestamp}, As, Value) 28 | ; 29 | result({Tss, ValueList}, As, _Tss) when is_list(Tss) andalso is_list(ValueList)-> 30 | L = lists:zip(Tss, ValueList), 31 | %% lists:foreach(fun({T, V}) -> 32 | % lager:notice("out: ~p :: ~p",[faxe_time:to_htime(T), V]) end, L), 33 | Points = lists:map(fun({_T, _V}=P) -> result(P, As, _T) end, L), 34 | #data_batch{points = Points}. 35 | 36 | -------------------------------------------------------------------------------- /apps/faxe/src/components/stats/c_noop.erl: -------------------------------------------------------------------------------- 1 | %% Date: 12.05.17 - 20:58 2 | %% Ⓒ 2017 LineMetrics GmbH 3 | -module(c_noop). 4 | -author("Alexander Minichmair"). 5 | 6 | 7 | -behavior(esp_stats). 8 | %% API 9 | -export([execute/2, options/0]). 10 | 11 | options() -> 12 | esp_stats:get_options() ++ [{module, atom, ?MODULE}]. 13 | 14 | execute({_Tss, _Values} = Data, _S) -> 15 | Data. 16 | 17 | -------------------------------------------------------------------------------- /apps/faxe/src/components/stats/esp_avg.erl: -------------------------------------------------------------------------------- 1 | %% Date: 09.12.16 - 18:02 2 | %% Ⓒ 2016 heyoka 3 | -module(esp_avg). 4 | -author("Alexander Minichmair"). 5 | 6 | 7 | -behavior(esp_stats). 8 | %% API 9 | -export([execute/2, options/0]). 10 | 11 | options() -> 12 | esp_stats:get_options() ++ [{module, atom, ?MODULE}]. 13 | 14 | execute({_Tss, Values}, _S) -> 15 | {first, lists:sum(Values)/length(Values)}. 16 | 17 | -ifdef(TEST). 18 | %% basic_test() -> ?assertEqual(16.6, execute([1,3,8,16,55], #{field => <<"val">>})). 19 | -endif. 20 | 21 | -------------------------------------------------------------------------------- /apps/faxe/src/components/stats/esp_bottom.erl: -------------------------------------------------------------------------------- 1 | %% Date: 09.12.16 - 18:02 2 | %% Ⓒ 2016 heyoka 3 | -module(esp_bottom). 4 | -author("Alexander Minichmair"). 5 | 6 | -include("faxe.hrl"). 7 | 8 | 9 | -behavior(esp_stats). 10 | %% API 11 | -export([execute/2, options/0]). 12 | 13 | options() -> 14 | esp_stats:get_options() ++ [{num, integer, 1},{module, atom, ?MODULE}]. 15 | 16 | execute({Tss, Values}, #{num := Num}) when is_list(Values) -> 17 | lager:debug("execute with: ~p",[Values]), 18 | New = lists:zip(Tss, Values), 19 | Sorted = lists:usort( fun({_Ts, V1}, {_Ts2, V2}) -> V2 =< V1 end, New), 20 | Len = length(Sorted), 21 | RLen = case Len >= Num of true -> Len-Num; false -> 0 end, 22 | lists:unzip(lists:reverse(lists:nthtail(RLen, Sorted))). 23 | 24 | 25 | -ifdef(TEST). 26 | 27 | basic_test() -> 28 | ?assertEqual( 29 | {[8,5,2,4,6],[319,322,326,328,331]}, 30 | esp_bottom:execute({[1,2,3,4,5,6,7,8,9],[399,326,354,328,322,331,388,319,377]},#{num=>5}) 31 | ), 32 | ?assertEqual( 33 | {[8,5,2,4,6,3,9,7,1],[319,322,326,328,331,354,377,388,399]}, 34 | esp_bottom:execute({[1,2,3,4,5,6,7,8,9],[399,326,354,328,322,331,388,319,377]},#{num=>65}) 35 | ). 36 | -endif. -------------------------------------------------------------------------------- /apps/faxe/src/components/stats/esp_count.erl: -------------------------------------------------------------------------------- 1 | %% Date: 09.12.16 - 18:02 2 | %% Ⓒ 2016 heyoka 3 | -module(esp_count). 4 | -author("Alexander Minichmair"). 5 | 6 | 7 | -behavior(esp_stats). 8 | %% API 9 | -export([execute/2, options/0]). 10 | 11 | options() -> 12 | esp_stats:get_options() ++ [{module, atom, ?MODULE}]. 13 | 14 | 15 | execute({Tss, _Values}, _Opts) when is_list(Tss) -> 16 | {first, length(Tss)}. 17 | -------------------------------------------------------------------------------- /apps/faxe/src/components/stats/esp_count_change._erl: -------------------------------------------------------------------------------- 1 | %% Date: 23.03.21 - 18:02 2 | %% Ⓒ 2021 heyoka 3 | %% counts value changes 4 | -module(esp_count_change). 5 | -author("Alexander Minichmair"). 6 | 7 | 8 | -behavior(esp_stats). 9 | 10 | -include("faxe.hrl"). 11 | %% API 12 | -export([execute/2, options/0]). 13 | 14 | options() -> 15 | esp_stats:get_options() ++ [{module, atom, ?MODULE}]. 16 | 17 | %% equality of values is: =:= (type and value) 18 | execute({Tss, Values}, _Opts) when is_list(Tss) -> 19 | F = fun 20 | (Last, {Last, Count}) -> {Last, Count}; 21 | (E, {_Last, Count}) -> {E, Count+1} 22 | end, 23 | {_L, Counter} = lists:foldl(F, {nil, 0}, Values), 24 | {first, Counter}. 25 | 26 | -ifdef(TEST). 27 | basic_test() -> 28 | ?assertEqual({first, 4}, execute({[1, 2, 3, 4, 5, 6], [1, 1, 1, 2, 1, 6]}, #{})). 29 | basic_equality_test() -> 30 | ?assertEqual({first, 6}, execute({[1, 2, 3, 4, 5, 6], [1, 1.0, 1, 2, 1, 6]}, #{})). 31 | basic_2_test() -> 32 | ?assertEqual({first, 5}, execute({[1, 2, 3, 4, 5, 6, 7], 33 | [<<"1">>, <<"2">>, <<"2">>, <<"a">>, <<"b">>, <<"b">>, <<"1">>]}, #{})). 34 | 35 | -endif. 36 | -------------------------------------------------------------------------------- /apps/faxe/src/components/stats/esp_count_distinct.erl: -------------------------------------------------------------------------------- 1 | %% Date: 23.03.21 - 18:02 2 | %% Ⓒ 2021 heyoka 3 | -module(esp_count_distinct). 4 | -author("Alexander Minichmair"). 5 | 6 | 7 | -behavior(esp_stats). 8 | 9 | -include("faxe.hrl"). 10 | %% API 11 | -export([execute/2, options/0]). 12 | 13 | options() -> 14 | esp_stats:get_options() ++ [{module, atom, ?MODULE}]. 15 | 16 | 17 | execute({Tss, Values}, _Opts) when is_list(Tss) -> 18 | Set = sets:from_list(Values, [{version, 2}]), 19 | {first, sets:size(Set)}. 20 | 21 | -ifdef(TEST). 22 | basic_test() -> 23 | ?assertEqual({first, 4}, execute({[1,2,3,4,5], [1,2,1,2,4,6]}, #{})). 24 | basic_2_test() -> 25 | ?assertEqual({first, 4}, execute({[1,2,3,4,5], [<<"1">>,<<"2">>,<<"1">>,<<"a">>,<<"b">>,<<"2">>]}, #{})). 26 | -endif. 27 | -------------------------------------------------------------------------------- /apps/faxe/src/components/stats/esp_distinct.erl: -------------------------------------------------------------------------------- 1 | %% Date: 09.12.16 - 18:02 2 | %% Ⓒ 2016 heyoka 3 | -module(esp_distinct). 4 | -author("Alexander Minichmair"). 5 | 6 | 7 | -behavior(esp_stats). 8 | %% API 9 | -export([execute/2, options/0]). 10 | 11 | options() -> 12 | esp_stats:get_options() ++ [{module, atom, ?MODULE}]. 13 | 14 | execute({_Tss, Values}, _Opts) when is_list(Values) -> 15 | Set = sets:from_list(Values), 16 | {first, sets:to_list(Set)}. -------------------------------------------------------------------------------- /apps/faxe/src/components/stats/esp_elapsed.erl: -------------------------------------------------------------------------------- 1 | %% Date: 09.12.16 - 18:02 2 | %% Ⓒ 2016 heyoka 3 | -module(esp_elapsed). 4 | -author("Alexander Minichmair"). 5 | 6 | 7 | -behavior(esp_stats). 8 | %% API 9 | -export([execute/2, options/0]). 10 | 11 | options() -> 12 | esp_stats:get_options() ++ [{module, atom, ?MODULE}]. 13 | 14 | execute({Tss, _Values}, _Opts) -> 15 | calc(Tss, []). 16 | 17 | calc([], Acc) -> 18 | Acc; 19 | calc([_Ts], Acc) -> 20 | Acc; 21 | calc([H|T], Acc) -> 22 | [Sec | _] = T, 23 | calc(T, [{Sec, abs(H - Sec)}|Acc]). 24 | 25 | -ifdef(TEST). 26 | %% basic_test() -> ?assertEqual([2,5,8,39], execute({[1,3,8,16,55], []}, #{})). 27 | -endif. -------------------------------------------------------------------------------- /apps/faxe/src/components/stats/esp_first.erl: -------------------------------------------------------------------------------- 1 | %% Date: 09.12.16 - 18:02 2 | %% Ⓒ 2016 heyoka 3 | -module(esp_first). 4 | -author("Alexander Minichmair"). 5 | 6 | 7 | -behavior(esp_stats). 8 | %% API 9 | -export([execute/2, options/0]). 10 | 11 | options() -> 12 | esp_stats:get_options() ++ [{module, atom, ?MODULE}]. 13 | 14 | execute({_Tss, Values}, _Opts) -> 15 | {first, lists:last(Values)}. -------------------------------------------------------------------------------- /apps/faxe/src/components/stats/esp_geometric_mean.erl: -------------------------------------------------------------------------------- 1 | %% Date: 09.12.16 - 18:02 2 | %% Ⓒ 2016 heyoka 3 | -module(esp_geometric_mean). 4 | -author("Alexander Minichmair"). 5 | 6 | 7 | -behavior(esp_stats). 8 | %% API 9 | -export([execute/2, options/0]). 10 | 11 | options() -> 12 | esp_stats:get_options() ++ [{module, atom, ?MODULE}]. 13 | 14 | execute({_Tss, Values}, _Opts) when is_list(Values) -> 15 | Length = length(Values), 16 | [First|Values1] = Values, 17 | Prod = lists:foldl( 18 | fun(E, Acc) -> E * Acc end, 19 | First, Values1 20 | ), 21 | {first, mathex:nth_root(Length, Prod)}. 22 | 23 | -------------------------------------------------------------------------------- /apps/faxe/src/components/stats/esp_kurtosis.erl: -------------------------------------------------------------------------------- 1 | %% Date: 09.12.16 - 18:02 2 | %% Ⓒ 2016 heyoka 3 | -module(esp_kurtosis). 4 | -author("Alexander Minichmair"). 5 | 6 | 7 | -behavior(esp_stats). 8 | %% API 9 | -export([execute/2, options/0]). 10 | 11 | options() -> 12 | esp_stats:get_options() ++ [{module, atom, ?MODULE}]. 13 | execute({_Tss, Values}, _Opts) -> 14 | lager:debug("execute with: ~p",[Values]), 15 | {first, mathex:kurtosis(Values)}. 16 | -------------------------------------------------------------------------------- /apps/faxe/src/components/stats/esp_last.erl: -------------------------------------------------------------------------------- 1 | %% Date: 09.12.16 - 18:02 2 | %% Ⓒ 2016 heyoka 3 | -module(esp_last). 4 | -author("Alexander Minichmair"). 5 | 6 | 7 | -behavior(esp_stats). 8 | %% API 9 | -export([execute/2, options/0]). 10 | 11 | options() -> 12 | esp_stats:get_options() ++ [{module, atom, ?MODULE}]. 13 | 14 | execute({[Ts| _Tss], [Val| _Values]}, _Opts) -> 15 | {Ts, Val}. 16 | -------------------------------------------------------------------------------- /apps/faxe/src/components/stats/esp_max.erl: -------------------------------------------------------------------------------- 1 | %% Date: 09.12.16 - 18:02 2 | %% Ⓒ 2016 heyoka 3 | -module(esp_max). 4 | -author("Alexander Minichmair"). 5 | 6 | 7 | -behavior(esp_stats). 8 | %% API 9 | -export([execute/2, options/0]). 10 | 11 | options() -> 12 | esp_stats:get_options() ++ [{module, atom, ?MODULE}]. 13 | 14 | execute({_T,Values}, _O) -> 15 | lager:debug("execute with: ~p",[Values]), 16 | {first, lists:max(Values)}. 17 | -------------------------------------------------------------------------------- /apps/faxe/src/components/stats/esp_mean.erl: -------------------------------------------------------------------------------- 1 | %% Date: 09.12.16 - 18:02 2 | %% Ⓒ 2016 heyoka 3 | -module(esp_mean). 4 | -author("Alexander Minichmair"). 5 | 6 | 7 | -behavior(esp_stats). 8 | %% API 9 | -export([execute/2, options/0]). 10 | 11 | options() -> 12 | esp_stats:get_options() ++ [{module, atom, ?MODULE}]. 13 | 14 | execute({_T, Vals}, _Opts) -> 15 | {first, lists:sum(Vals)/length(Vals)}. 16 | 17 | -ifdef(TEST). 18 | %% basic_test() -> ?assertEqual(16.6, execute([1,3,8,16,55], #{field => <<"val">>})). 19 | -endif. 20 | -------------------------------------------------------------------------------- /apps/faxe/src/components/stats/esp_median.erl: -------------------------------------------------------------------------------- 1 | %% Date: 09.12.16 - 18:02 2 | %% Ⓒ 2016 heyoka 3 | -module(esp_median). 4 | -author("Alexander Minichmair"). 5 | 6 | 7 | -behavior(esp_stats). 8 | %% API 9 | -export([execute/2, options/0]). 10 | 11 | options() -> 12 | esp_stats:get_options() ++ [{module, atom, ?MODULE}]. 13 | 14 | execute({_Tss, Values}, _Opts) -> 15 | Length = length(Values), 16 | Sorted = lists:sort(Values), 17 | M = 18 | case (Length rem 2) == 0 of 19 | false -> 20 | lists:nth(trunc((Length+1)/2), Sorted); 21 | true -> 22 | Nth = trunc(Length/2), 23 | (lists:nth(Nth, Sorted) + lists:nth(Nth+1, Sorted)) / 2 24 | end, 25 | {first, M}. 26 | -------------------------------------------------------------------------------- /apps/faxe/src/components/stats/esp_min.erl: -------------------------------------------------------------------------------- 1 | %% Date: 09.12.16 - 18:02 2 | %% Ⓒ 2016 heyoka 3 | -module(esp_min). 4 | -author("Alexander Minichmair"). 5 | 6 | 7 | -behavior(esp_stats). 8 | %% API 9 | -export([execute/2, options/0]). 10 | 11 | options() -> 12 | esp_stats:get_options() ++ [{module, atom, ?MODULE}]. 13 | 14 | execute({_T, Values}, _Opts) -> 15 | lager:debug("execute with: ~p",[Values]), 16 | {first, lists:min(Values)}. 17 | -------------------------------------------------------------------------------- /apps/faxe/src/components/stats/esp_percentile._erl: -------------------------------------------------------------------------------- 1 | %% Date: 09.12.16 - 18:02 2 | %% Ⓒ 2016 heyoka 3 | -module(esp_percentile). 4 | -author("Alexander Minichmair"). 5 | 6 | 7 | -behavior(esp_stats). 8 | %% API 9 | -export([execute/2, options/0]). 10 | 11 | options() -> 12 | esp_stats:get_options() ++ [{perc, integer, 95}, {module, atom, ?MODULE}]. 13 | 14 | execute({_T, Values}, #{perc := Perc}) -> 15 | lager:debug("execute with: ~p",[Values]), 16 | {first, mathex:percentile(Values, Perc)}. 17 | -------------------------------------------------------------------------------- /apps/faxe/src/components/stats/esp_range.erl: -------------------------------------------------------------------------------- 1 | %% Date: 09.12.16 - 18:02 2 | %% Ⓒ 2016 heyoka 3 | -module(esp_range). 4 | -author("Alexander Minichmair"). 5 | 6 | 7 | -behavior(esp_stats). 8 | %% API 9 | -export([execute/2, options/0]). 10 | 11 | options() -> 12 | esp_stats:get_options() ++ [{module, atom, ?MODULE}]. 13 | 14 | execute({_T, Values}, _Opts) -> 15 | lager:debug("execute with: ~p",[Values]), 16 | {first, abs(lists:max(Values) - lists:min(Values))}. 17 | -------------------------------------------------------------------------------- /apps/faxe/src/components/stats/esp_skew.erl: -------------------------------------------------------------------------------- 1 | %% Date: 09.12.16 - 18:02 2 | %% Ⓒ 2016 heyoka 3 | -module(esp_skew). 4 | -author("Alexander Minichmair"). 5 | 6 | 7 | -behavior(esp_stats). 8 | %% API 9 | -export([execute/2, options/0]). 10 | 11 | options() -> 12 | esp_stats:get_options() ++ [{module, atom, ?MODULE}]. 13 | 14 | execute({_T, Values}, _Opts) -> 15 | lager:debug("execute with: ~p",[Values]), 16 | {first, mathex:skew(Values)}. 17 | -------------------------------------------------------------------------------- /apps/faxe/src/components/stats/esp_stats_difference.erl: -------------------------------------------------------------------------------- 1 | %% Date: 09.12.16 - 18:02 2 | %% Ⓒ 2016 heyoka 3 | -module(esp_stats_difference). 4 | -author("Alexander Minichmair"). 5 | 6 | 7 | -behavior(esp_stats). 8 | %% API 9 | -export([execute/2, options/0]). 10 | 11 | options() -> 12 | esp_stats:get_options() ++ [{module, atom, ?MODULE}]. 13 | 14 | execute({Tss, Values}, _Opts) -> 15 | Res = calc(lists:reverse(Values), lists:reverse(Tss), []), 16 | lists:unzip(Res). 17 | 18 | calc([], [], Acc) -> 19 | Acc; 20 | calc([_Val], [_Ts], Acc) -> 21 | Acc; 22 | calc([H| TVal], [_HTs|TTs], Acc) -> 23 | [Sec | _] = TVal, 24 | [SecTs| _] = TTs, 25 | calc(TVal, TTs, [{SecTs, abs(H - Sec)}|Acc]). 26 | 27 | -ifdef(TEST). 28 | %% basic_test() -> ?assertEqual([2,5,8,39], execute([1,3,8,16,55])). 29 | -endif. -------------------------------------------------------------------------------- /apps/faxe/src/components/stats/esp_stddev.erl: -------------------------------------------------------------------------------- 1 | %% Date: 09.12.16 - 18:02 2 | %% Ⓒ 2016 heyoka 3 | -module(esp_stddev). 4 | -author("Alexander Minichmair"). 5 | 6 | 7 | -behavior(esp_stats). 8 | %% API 9 | -export([execute/2, options/0]). 10 | 11 | options() -> 12 | esp_stats:get_options() ++ [{module, atom, ?MODULE}]. 13 | 14 | execute({_Tss, Values}, _Opts) -> 15 | lager:debug("execute with: ~p",[Values]), 16 | {first, mathex:stdev_sample(Values)}. 17 | -------------------------------------------------------------------------------- /apps/faxe/src/components/stats/esp_sum.erl: -------------------------------------------------------------------------------- 1 | %% Date: 09.12.16 - 18:02 2 | %% Ⓒ 2016 heyoka 3 | -module(esp_sum). 4 | -author("Alexander Minichmair"). 5 | 6 | 7 | -behavior(esp_stats). 8 | %% API 9 | -export([execute/2, options/0]). 10 | 11 | options() -> 12 | esp_stats:get_options() ++ [{module, atom, ?MODULE}]. 13 | 14 | execute({_Tss, Values}, _Opts) -> 15 | lager:debug("execute with: ~p",[Values]), 16 | {first, lists:sum(Values)}. 17 | 18 | 19 | -ifdef(TEST). 20 | 21 | %%basic_test() -> 22 | %% ?assertEqual(22, execute([10,2,2,2,6])). 23 | 24 | -endif. -------------------------------------------------------------------------------- /apps/faxe/src/components/stats/esp_top.erl: -------------------------------------------------------------------------------- 1 | %% Date: 09.12.16 - 18:02 2 | %% Ⓒ 2016 heyoka 3 | -module(esp_top). 4 | -author("Alexander Minichmair"). 5 | 6 | -include("faxe.hrl"). 7 | 8 | 9 | -behavior(esp_stats). 10 | %% API 11 | -export([execute/2, options/0]). 12 | 13 | options() -> 14 | esp_stats:get_options() ++ [{num, integer, 1}, {module, atom, ?MODULE}]. 15 | 16 | %%% @todo look for mixed up timestamps after sorting the value-list !! 17 | execute({Tss, Values}, #{num := Num}) when is_list(Values) -> 18 | lager:debug("execute with: ~p",[Values]), 19 | New = lists:zip(Tss, Values), 20 | Sorted = lists:usort( fun({_Ts, V1}, {_Ts2, V2}) -> V1 =< V2 end, New), 21 | Len = length(Sorted), 22 | RLen = case Len >= Num of true -> Len-Num; false -> 0 end, 23 | lists:unzip(lists:reverse(lists:nthtail(RLen, Sorted))). 24 | 25 | 26 | -ifdef(TEST). 27 | basic_test() -> ?assertEqual( 28 | {[1,7,9,3],[399,388,377,354]}, 29 | execute({[1,2,3,4,5,6,7,8,9],[399,326,354,328,322,331,388,319,377]},#{num=>4}) 30 | ). 31 | -endif. 32 | 33 | 34 | 35 | -------------------------------------------------------------------------------- /apps/faxe/src/components/stats/esp_variance.erl: -------------------------------------------------------------------------------- 1 | %% Date: 09.12.16 - 18:02 2 | %% Ⓒ 2016 heyoka 3 | -module(esp_variance). 4 | -author("Alexander Minichmair"). 5 | 6 | 7 | 8 | -behavior(esp_stats). 9 | %% API 10 | -export([execute/2, options/0]). 11 | 12 | options() -> 13 | esp_stats:get_options() ++ [{num, integer, 1}, {module, atom, ?MODULE}]. 14 | 15 | execute({_Tss, Values}, _Opts) -> 16 | lager:debug("execute with: ~p",[Values]), 17 | {first, mathex:variance(Values)}. 18 | -------------------------------------------------------------------------------- /apps/faxe/src/components/win_util.erl: -------------------------------------------------------------------------------- 1 | %% Date: 06.01.17 - 19:29 2 | %% Ⓒ 2017 heyoka 3 | %% 4 | -module(win_util). 5 | -author("Alexander Minichmair"). 6 | 7 | %% API 8 | -export([sync/2, split/2, sync_q/2]). 9 | 10 | %% @doc 11 | %% head-drop as many entries in the first list, as there are in the second 12 | %% return the first, maybe shortened list 13 | %% @end 14 | -spec sync(list(), list()) -> list(). 15 | sync(List, []) -> 16 | List; 17 | sync([_LH|L], [_E|R]) -> 18 | sync(L, R). 19 | 20 | sync_q(Queue, []) -> 21 | Queue; 22 | sync_q(Q, [_E|R]) -> 23 | sync_q(queue:drop(Q), R). 24 | 25 | %% @doc 26 | %% provide a list of timestamps and a reference timestamp 27 | %% -> and get back a list of timestamps, which are older than the reference 28 | %% and a list of Timestamps which a are younger or equally old 29 | -spec split(list(), non_neg_integer()) -> {Keep :: list(), Evict :: list()}. 30 | split(List, Pred) -> 31 | %% lager:notice("Pred in split_ea : ~p ", [Pred]), 32 | split_ea(List, Pred, [], [], false). 33 | 34 | %% find old/outdated items, keep rest 35 | -spec split_ea( 36 | In :: list(), Pred :: non_neg_integer(), Keep :: list(), Evict :: list(), Done :: true|false) 37 | -> {Keep :: list(), Evict :: list()}. 38 | 39 | split_ea([], _, Keep, Evict, _) -> 40 | {Keep, Evict}; 41 | split_ea([Ts|T] = List, Pred, Keep, Evict, false) -> 42 | {K, Ev, Done} = 43 | case Ts > Pred of 44 | true -> %lager:info("done evicting when ~p > ~p: diff: ~p",[Ts, Pred, Pred-Ts]), 45 | {List, Evict, true}; 46 | false -> %lager:debug("~p evict ~p",[?MODULE, Ts]), 47 | {Keep, [Ts|Evict], false} 48 | 49 | end, 50 | split_ea(T, Pred, K, Ev, Done); 51 | split_ea(_, _, Keep, Evict, true) -> 52 | {Keep, Evict}. 53 | 54 | 55 | -------------------------------------------------------------------------------- /apps/faxe/src/faxe.app.src: -------------------------------------------------------------------------------- 1 | {application, faxe, 2 | [{description, "DataFlow based data collector / data processor"}, 3 | {vsn, "1.5.5"}, 4 | {registered, []}, 5 | {mod, { faxe_app, []}}, 6 | {applications, 7 | [kernel, 8 | stdlib, 9 | inets, 10 | xmerl, 11 | mnesia, 12 | plists, 13 | erlang_term, 14 | datestring, 15 | faxe_common, 16 | dynamic_compile, 17 | lager, 18 | lager_logstash_backend, 19 | esq, 20 | emqtt, 21 | jiffy, 22 | jsn, 23 | jwt, 24 | modbus, 25 | msgpack, 26 | cowboy, 27 | gun, 28 | pythra, 29 | snap7erl, 30 | dfs, 31 | qdate, 32 | epgsql, 33 | jamdb_oracle, 34 | carrot, 35 | uuid, 36 | mapz, 37 | gen_smtp, 38 | vmstats, 39 | ecount 40 | ]}, 41 | {env,[]}, 42 | {modules, [esp_stats]}, 43 | 44 | {maintainers, ["Alex Minichmair"]}, 45 | {licenses, ["Apache 2.0"]}, 46 | {links, []} 47 | ]}. 48 | -------------------------------------------------------------------------------- /apps/faxe/src/faxe_event_guard.erl: -------------------------------------------------------------------------------- 1 | %%%------------------------------------------------------------------- 2 | %%% @author heyoka 3 | %%% @copyright (C) 2022, 4 | %%% @doc 5 | %%% @end 6 | %%%------------------------------------------------------------------- 7 | -module(faxe_event_guard). 8 | 9 | -behaviour(gen_server). 10 | 11 | -export([start_link/3]). 12 | -export([init/1, handle_call/3, handle_cast/2, handle_info/2, terminate/2, 13 | code_change/3]). 14 | 15 | -define(SERVER, ?MODULE). 16 | 17 | -record(state, { 18 | event, 19 | module, 20 | config 21 | }). 22 | 23 | %%%=================================================================== 24 | %%% Spawning and gen_server implementation 25 | %%%=================================================================== 26 | 27 | start_link(Event, Module, Config) -> 28 | gen_server:start_link(?MODULE, [Event, Module, Config], []). 29 | 30 | init([Event, Module, Config]) -> 31 | install_handler(Event, Module, Config), 32 | {ok, #state{event=Event, module=Module, config=Config}}. 33 | 34 | handle_call(_Request, _From, State = #state{}) -> 35 | {reply, ok, State}. 36 | 37 | handle_cast(_Request, State = #state{}) -> 38 | {noreply, State}. 39 | 40 | handle_info({gen_event_EXIT, Module, normal}, #state{module=Module} = State) -> 41 | {stop, normal, State}; 42 | handle_info({gen_event_EXIT, Module, shutdown}, #state{module=Module} = State) -> 43 | {stop, normal, State}; 44 | handle_info({gen_event_EXIT, Module, Reason}, 45 | #state{event=Event, module=Module, config=Config} = State) -> 46 | lager:notice("event-handler ~p exit with reason: ~p",[Module, Reason]), 47 | install_handler(Event, Module, Config), 48 | {noreply, State}; 49 | handle_info(_Info, State = #state{}) -> 50 | {noreply, State}. 51 | 52 | terminate(_Reason, _State = #state{}) -> 53 | ok. 54 | 55 | code_change(_OldVsn, State = #state{}, _Extra) -> 56 | {ok, State}. 57 | 58 | %%%=================================================================== 59 | %%% Internal functions 60 | %%%=================================================================== 61 | install_handler(Event, Module, Config) -> 62 | ok = gen_event:add_sup_handler(Event, Module, Config). -------------------------------------------------------------------------------- /apps/faxe/src/faxe_event_guard_sup.erl: -------------------------------------------------------------------------------- 1 | %%%------------------------------------------------------------------- 2 | %%% @author heyoka 3 | %%% @copyright (C) 2020, 4 | %%% @doc 5 | %%% @end 6 | %%%------------------------------------------------------------------- 7 | -module(faxe_event_guard_sup). 8 | 9 | -behaviour(supervisor). 10 | 11 | -export([start_link/0, init/1]). 12 | 13 | start_link() -> 14 | supervisor:start_link({local, ?MODULE}, ?MODULE, []). 15 | 16 | init([]) -> 17 | P = [ 18 | % event handler guard process 19 | {faxe_event_guard , {faxe_event_guard, start_link, []}, 20 | permanent, 5000, worker, [faxe_event_guard]} 21 | ], 22 | 23 | {ok, {#{strategy => simple_one_for_one, 24 | intensity => 10, 25 | period => 60}, 26 | P} 27 | }. 28 | -------------------------------------------------------------------------------- /apps/faxe/src/faxe_event_sup.erl: -------------------------------------------------------------------------------- 1 | %%%------------------------------------------------------------------- 2 | %%% @author heyoka 3 | %%% @copyright (C) 2020, 4 | %%% @doc 5 | %%% @end 6 | %%%------------------------------------------------------------------- 7 | -module(faxe_event_sup). 8 | 9 | -behaviour(supervisor). 10 | 11 | -export([start_link/0, init/1]). 12 | 13 | start_link() -> 14 | supervisor:start_link({local, ?MODULE}, ?MODULE, []). 15 | 16 | init([]) -> 17 | P = [ 18 | % event handler guard supervisor 19 | {faxe_event_guard_sup, {faxe_event_guard_sup, start_link, []}, 20 | permanent, 5000, supervisor, [faxe_event_guard_sup]} 21 | , 22 | {faxe_metrics, 23 | {gen_event, start_link, [{local, faxe_metrics}]}, 24 | permanent, 5000, worker, []} 25 | , 26 | {conn_status, 27 | {gen_event, start_link, [{local, conn_status}]}, 28 | permanent, 5000, worker, []} 29 | , 30 | {faxe_debug, 31 | {gen_event, start_link, [{local, faxe_debug}]}, 32 | permanent, 5000, worker, []} 33 | , 34 | {flow_changed, 35 | {gen_event, start_link, [{local, flow_changed}]}, 36 | permanent, 5000, worker, []} 37 | 38 | ], 39 | 40 | {ok, {#{strategy => one_for_one, 41 | intensity => 10, 42 | period => 60}, 43 | P} 44 | }. 45 | -------------------------------------------------------------------------------- /apps/faxe/src/faxe_metrics_sup.erl: -------------------------------------------------------------------------------- 1 | %%%------------------------------------------------------------------- 2 | %%% @author heyoka 3 | %%% @copyright (C) 2020, 4 | %%% @doc 5 | %%% @end 6 | %%%------------------------------------------------------------------- 7 | -module(faxe_metrics_sup). 8 | 9 | -behaviour(supervisor). 10 | 11 | -export([start_link/0, init/1]). 12 | 13 | start_link() -> 14 | supervisor:start_link({local, ?MODULE}, ?MODULE, []). 15 | 16 | init([]) -> 17 | P = [ 18 | 19 | {event_handler_starter, 20 | {event_handler_starter, start_link, []}, 21 | transient, 2000, worker, []}, 22 | 23 | {rest_audit_server, 24 | {rest_audit_server, start_link, []}, 25 | permanent, 5000, worker, []} 26 | 27 | %% , 28 | %% {metrics_collector, 29 | %% {metrics_collector, start_link, []}, 30 | %% permanent, 5000, worker, []} 31 | ], 32 | 33 | {ok, {#{strategy => one_for_one, 34 | intensity => 5, 35 | period => 10}, 36 | P} 37 | }. 38 | -------------------------------------------------------------------------------- /apps/faxe/src/faxe_migration.erl: -------------------------------------------------------------------------------- 1 | %%%------------------------------------------------------------------- 2 | %%% @author heyoka 3 | %%% @copyright (C) 2021, 4 | %%% @doc 5 | %%% 6 | %%% @end 7 | %%% Created : 29. Jan 2021 11:50 8 | %%%------------------------------------------------------------------- 9 | -module(faxe_migration). 10 | -author("heyoka"). 11 | 12 | -include("faxe.hrl"). 13 | 14 | %% API 15 | -export([migrate/0]). 16 | 17 | migrate() -> 18 | %% mnesia:create_table(flow_amqp_queues, [ 19 | %% {attributes, record_info(fields, flow_amqp_queues)}, 20 | %% {type, set}, 21 | %% {disc_copies, [node()]} 22 | %% ]), 23 | %faxe_db:add_extra_nodes(nodes()), 24 | ok 25 | %% maybe_migrate_task_table() 26 | . 27 | 28 | 29 | maybe_migrate_task_table() -> 30 | NewFields = record_info(fields, task), 31 | case mnesia:table_info(task, attributes) of 32 | NewFields -> lager:info("no table transform needed !"), ok; 33 | _ -> do_migrate_task_table() 34 | end. 35 | 36 | 37 | do_migrate_task_table() -> 38 | lager:notice("transform task table"), 39 | mnesia:wait_for_tables(task, 5000), 40 | NewFields = record_info(fields, task), 41 | Fun = fun( {task, 42 | Id, 43 | Name, 44 | Dfs, 45 | Definition, 46 | Date, 47 | Pid, 48 | Last_start, 49 | Last_stop, 50 | Permanent, 51 | Is_running, 52 | Template_vars, 53 | Template, 54 | Tags} ) -> 55 | {task, 56 | Id, 57 | Name, 58 | Dfs, 59 | Definition, 60 | Date, 61 | Pid, 62 | Last_start, 63 | Last_stop, 64 | Permanent, 65 | Is_running, 66 | Template_vars, 67 | Template, 68 | Tags, 69 | Name, %% group 70 | true %% group_leader 71 | } 72 | end, 73 | {atomic, ok} = mnesia:transform_table(task, Fun, NewFields), 74 | %% add index to group-fieldll 75 | 76 | mnesia:add_table_index(task, group), 77 | ok 78 | . 79 | 80 | -------------------------------------------------------------------------------- /apps/faxe/src/flow/flow_deleter.erl: -------------------------------------------------------------------------------- 1 | %%%------------------------------------------------------------------- 2 | %%% @author heyoka 3 | %%% @copyright (C) 2025, 4 | %%% @doc serialize force delete action 5 | %%% @end 6 | %%%------------------------------------------------------------------- 7 | -module(flow_deleter). 8 | 9 | -behaviour(gen_server). 10 | 11 | -export([start_link/0, do/1]). 12 | -export([init/1, handle_call/3, handle_cast/2, handle_info/2, terminate/2, 13 | code_change/3]). 14 | 15 | -include("faxe.hrl"). 16 | 17 | -define(SERVER, ?MODULE). 18 | -define(MAX_RETRIES, 3). 19 | -define(RETRY_INTERVAL, 2000). 20 | 21 | -record(state, {}). 22 | 23 | do(Flow = #task{}) -> 24 | ?SERVER ! {{delete_flow, Flow}, 1}. 25 | %%%=================================================================== 26 | %%% Spawning and gen_server implementation 27 | %%%=================================================================== 28 | 29 | start_link() -> 30 | gen_server:start_link({local, ?SERVER}, ?MODULE, [], []). 31 | 32 | init([]) -> 33 | {ok, #state{}}. 34 | 35 | handle_call(_Request, _From, State = #state{}) -> 36 | {reply, ok, State}. 37 | 38 | handle_cast(_Request, State = #state{}) -> 39 | {noreply, State}. 40 | 41 | handle_info({{delete_flow, _Flow=#task{name = Name}}, NumTried}, State = #state{}) when NumTried > ?MAX_RETRIES -> 42 | lager:notice("cannot force delete flow: ~p with ~p retries",[Name, ?MAX_RETRIES]), 43 | {noreply, State}; 44 | handle_info({{delete_flow, Flow=#task{name = Name}} = Req, NumTried}, State = #state{}) -> 45 | case catch faxe:force_delete_task(Flow) of 46 | ok -> ok; 47 | Other -> 48 | lager:notice("force delete flow '~p' failed with ~p, retry",[Name, Other]), 49 | erlang:send_after(?RETRY_INTERVAL, self(), {Req, NumTried+1}) 50 | end, 51 | {noreply, State}. 52 | 53 | terminate(_Reason, _State = #state{}) -> 54 | ok. 55 | 56 | code_change(_OldVsn, State = #state{}, _Extra) -> 57 | {ok, State}. 58 | 59 | %%%=================================================================== 60 | %%% Internal functions 61 | %%%=================================================================== 62 | -------------------------------------------------------------------------------- /apps/faxe/src/flow/graph_sup.erl: -------------------------------------------------------------------------------- 1 | %% Date: 16.04.17 - 22:14 2 | %% Ⓒ 2017 Alexander Minichmair 3 | -module(graph_sup). 4 | -author("Alexander Minichmair"). 5 | 6 | -behaviour(supervisor). 7 | 8 | %% API 9 | -export([start_link/0, new/2]). 10 | 11 | %% Supervisor callbacks 12 | -export([init/1]). 13 | 14 | %%%=================================================================== 15 | %%% API functions 16 | %%%=================================================================== 17 | 18 | %%-------------------------------------------------------------------- 19 | %% @doc 20 | %% Starts the supervisor 21 | %% 22 | %% @end 23 | %%-------------------------------------------------------------------- 24 | -spec(start_link() -> 25 | {ok, Pid :: pid()} | ignore | {error, Reason :: term()}). 26 | start_link() -> 27 | supervisor:start_link({local, ?MODULE}, ?MODULE, []). 28 | 29 | %%%=================================================================== 30 | %%% Supervisor callbacks 31 | %%%=================================================================== 32 | 33 | %%-------------------------------------------------------------------- 34 | %% @private 35 | %% @doc 36 | %% Whenever a supervisor is started using supervisor:start_link/[2,3], 37 | %% this function is called by the new process to find out about 38 | %% restart strategy, maximum restart frequency and child 39 | %% specifications. 40 | %% 41 | %% @end 42 | %%-------------------------------------------------------------------- 43 | -spec(init([]) -> 44 | {ok, {SupFlags :: {RestartStrategy :: supervisor:strategy(), 45 | MaxR :: non_neg_integer(), MaxT :: non_neg_integer()}, 46 | [ChildSpec :: supervisor:child_spec()] 47 | }} | 48 | ignore | 49 | {error, Reason :: term()}). 50 | init([]) -> 51 | RestartStrategy = one_for_one, 52 | MaxRestarts = 15, 53 | MaxSecondsBetweenRestarts = 25, 54 | 55 | SupFlags = {RestartStrategy, MaxRestarts, MaxSecondsBetweenRestarts}, 56 | 57 | {ok, {SupFlags, []}}. 58 | 59 | 60 | %% 61 | %% @doc start a new graph under this supervisor 62 | new(Id, Defs) -> 63 | supervisor:start_child(?MODULE, child(Id, Defs)). 64 | 65 | %%%=================================================================== 66 | %%% Internal functions 67 | %%%=================================================================== 68 | child(Id, Params) -> 69 | {Id, { df_graph, start_link, [Id, Params]}, 70 | temporary, 3000, worker, dynamic}. 71 | -------------------------------------------------------------------------------- /apps/faxe/src/lib/amqp_options.erl: -------------------------------------------------------------------------------- 1 | %%%------------------------------------------------------------------- 2 | %%% @author heyoka 3 | %%% @copyright (C) 2020, 4 | %%% @doc 5 | %%% 6 | %%% @end 7 | %%% Created : 31. Aug 2020 18:42 8 | %%%------------------------------------------------------------------- 9 | -module(amqp_options). 10 | -author("heyoka"). 11 | 12 | -include_lib("amqp_client/include/amqp_client.hrl"). 13 | %% API 14 | -export([parse/1, parse/2]). 15 | 16 | -spec parse(list()|map()) -> #amqp_params_network{}. 17 | parse(Opts) -> 18 | parse(Opts, #{}). 19 | -spec parse(list()|map(), map()) -> #amqp_params_network{}. 20 | parse([], Acc) -> amqp_params(maps:to_list(Acc)); 21 | 22 | parse(Opts, Acc) when is_map(Opts) -> 23 | parse(maps:to_list(Opts), Acc); 24 | parse([{host, Host} | R], Acc) when is_binary(Host) -> 25 | parse(R, Acc#{host => binary_to_list(Host)}); 26 | parse([{host, Host} | R], Acc) when is_list(Host) -> 27 | parse(R, Acc#{host => Host}); 28 | parse([{port, Port} | R], Acc) when is_integer(Port) -> 29 | parse(R, Acc#{port => Port}); 30 | parse([{heartbeat, KeepAlive} | R], Acc) when is_integer(KeepAlive) -> 31 | parse(R, Acc#{heartbeat => KeepAlive}); 32 | parse([{user, User} | R], Acc) when is_binary(User) -> 33 | parse(R, Acc#{user => User}); 34 | parse([{pass, Pass} | R], Acc) when is_binary(Pass) -> 35 | parse(R, Acc#{pass => Pass}); 36 | parse([{vhost, VHost} | R], Acc) when is_binary(VHost) -> 37 | parse(R, Acc#{vhost => VHost}); 38 | parse([{connection_timeout, ConnTimeout} | R], Acc) when is_integer(ConnTimeout) -> 39 | parse(R, Acc#{connection_timeout => ConnTimeout}); 40 | parse([{ssl, false} | R], Acc) -> 41 | parse(R, Acc#{ssl => false}); 42 | parse([{ssl, true} | R], Acc) -> 43 | Opts = faxe_config:get_ssl_opts(amqp), 44 | parse(R, Acc#{ssl => true, ssl_opts => Opts}); 45 | parse([_ | R], Acc) -> 46 | parse(R, Acc). 47 | 48 | 49 | amqp_params(Config) -> 50 | #amqp_params_network{ 51 | username = proplists:get_value(user, Config, <<"guest">>), 52 | password = proplists:get_value(pass, Config, <<"guest">>), 53 | virtual_host = proplists:get_value(vhost, Config, <<"/">>), 54 | port = proplists:get_value(port, Config), 55 | host = proplists:get_value(host, Config), 56 | heartbeat = proplists:get_value(heartbeat, Config, 80), 57 | connection_timeout = proplists:get_value(connection_timeout, Config, 15000), 58 | ssl_options = proplists:get_value(ssl_opts, Config, none) 59 | }. 60 | -------------------------------------------------------------------------------- /apps/faxe/src/lib/bcd.erl: -------------------------------------------------------------------------------- 1 | -module(bcd). 2 | -compile(export_all). 3 | -compile(nowarn_export_all). 4 | 5 | encode(N, Size) -> 6 | encode0(N, Size * 2, <<>>). 7 | 8 | encode0(N, Size, Acc) when Size > 0 -> 9 | encode0(N div 10, Size - 1, <<(N rem 10):4, Acc/bits>>); 10 | encode0(_, _, Acc) -> 11 | Acc. 12 | 13 | decode(N, Size) -> 14 | case byte_size(N) of 15 | Size -> 16 | decode0(N, 0); 17 | _ -> 18 | error 19 | end. 20 | 21 | decode0(<>, Acc) -> 22 | decode0(Bin, Acc * 10 + X); 23 | decode0(<<>>, Acc) -> 24 | Acc. 25 | 26 | %% other version of decode 27 | decode2(Bits) -> 28 | list_to_integer([X+$0 || <> <= Bits]). 29 | 30 | -------------------------------------------------------------------------------- /apps/faxe/src/lib/conn_status_handler_mqtt.erl: -------------------------------------------------------------------------------- 1 | %% Date: 06.04.20 - 08:27 2 | %% Ⓒ 2020 heyoka 3 | -module(conn_status_handler_mqtt). 4 | -author("Alexander Minichmair"). 5 | 6 | -behaviour(event_handler_mqtt). 7 | 8 | %% event_handler_mqtt callbacks 9 | -export([ 10 | init/1, 11 | handle_event/2]). 12 | 13 | -record(state, { 14 | topic 15 | }). 16 | 17 | %%%=================================================================== 18 | %%% event_handler_mqtt callbacks 19 | %%%=================================================================== 20 | 21 | -spec(init(InitArgs :: term()) -> 22 | {ok, State :: #state{}} | 23 | {ok, State :: #state{}, hibernate} | 24 | {error, Reason :: term()}). 25 | init(Topic0) -> 26 | Topic = faxe_util:build_topic([Topic0, <<"conn_status">>]), 27 | {ok, #{retained => true, qos => 1}, #state{topic = Topic}}. 28 | 29 | handle_event({{FlowId, NodeId}, Item}, State = #state{topic = Topic}) -> 30 | T = <>, 31 | {publish, T, Item, State}. 32 | -------------------------------------------------------------------------------- /apps/faxe/src/lib/crate_ignore_rules.erl: -------------------------------------------------------------------------------- 1 | %%%------------------------------------------------------------------- 2 | %%% @author heyoka 3 | %%% @copyright (C) 2023, 4 | %%% @doc 5 | %%% 6 | %%% @end 7 | %%% Created : 25. Sep 2023 7:49 AM 8 | %%%------------------------------------------------------------------- 9 | -module(crate_ignore_rules). 10 | -author("heyoka"). 11 | 12 | %% API 13 | -export([check_ignore_error/2, init_rules/0, get_rules/0, add_rule/2]). 14 | 15 | 16 | init_rules() -> 17 | Rules0 = faxe_util:to_bin(faxe_config:get_sub(crate, ignore_rules, <<>>)), 18 | Rules1 = binary:split(Rules0, <<",">>, [global, trim_all]), 19 | Rules = lists:map(fun(R) -> 20 | [RType, RValue] = binary:split(R, <<"=">>, [global, trim_all]), 21 | Value = case RType of <<"code">> -> binary_to_integer(RValue); _ -> RValue end, 22 | {RType, Value} 23 | end, Rules1), 24 | ets:insert(crate_ignore_rules, {rules, Rules}). 25 | 26 | get_rules() -> 27 | case ets:lookup(crate_ignore_rules, rules) of 28 | [{rules, Rules}] -> Rules; 29 | _ -> [] 30 | end. 31 | 32 | add_rule(<<"code">> = T, Value) when is_binary(Value) -> 33 | add_rule(T, binary_to_integer(Value)); 34 | add_rule(Type, Value) -> 35 | Rules = get_rules(), 36 | NewRules = [{Type,Value}|Rules], 37 | ets:insert(crate_ignore_rules, {rules, NewRules}). 38 | 39 | check_ignore_error(Code, Message) -> 40 | Rules = get_rules(), 41 | Check = 42 | fun 43 | ({<<"code">>, RuleCode}) -> RuleCode == Code; 44 | ({<<"message">>, MsgPart}) -> estr:str_contains(Message, MsgPart) 45 | end, 46 | case lists:any(Check, Rules) of 47 | true -> 48 | % ignore 49 | lager:notice("IGNORE server error because of rule ~p",[Rules]), 50 | {error, invalid}; 51 | false -> 52 | {failed, server_error} 53 | end. -------------------------------------------------------------------------------- /apps/faxe/src/lib/debug_handler_mqtt.erl: -------------------------------------------------------------------------------- 1 | %% Date: 06.04.20 - 08:27 2 | %% Ⓒ 2020 heyoka 3 | -module(debug_handler_mqtt). 4 | -author("Alexander Minichmair"). 5 | 6 | -behaviour(event_handler_mqtt). 7 | 8 | -include("faxe.hrl"). 9 | %% event_handler_mqtt callbacks 10 | -export([ 11 | init/1, 12 | handle_event/2]). 13 | 14 | -record(state, { 15 | topic 16 | }). 17 | 18 | %%%=================================================================== 19 | %%% event_handler_mqtt callbacks 20 | %%%=================================================================== 21 | 22 | -spec(init(InitArgs :: term()) -> 23 | {ok, State :: #state{}} | 24 | {ok, State :: #state{}, hibernate} | 25 | {error, Reason :: term()}). 26 | init(Topic0) -> 27 | Topic = faxe_util:build_topic([Topic0, <<"debug">>]), 28 | {ok, #{retained => false, qos => 0}, #state{topic = Topic}}. 29 | 30 | handle_event({Key, {FlowId, NodeId} = _FNId, Port, Item}, State = #state{topic = Topic}) -> 31 | K = atom_to_binary(Key, utf8), 32 | T = <>, 33 | Out0 = #data_point{ts = faxe_time:now(), fields = #{<<"data_item">> => flowdata:to_json(Item)}}, 34 | Out = flowdata:set_fields(Out0, 35 | [<<"meta.type">>, <<"meta.flow_id">>, <<"meta.node_id">>, <<"meta.port">>], 36 | [K, FlowId, NodeId, Port]), 37 | %% lager:info("DEBUG [~p] :: ~p on Port ~p~n~s~n~s",[_FNId, Key, Port, flowdata:to_json(Out), T]), 38 | {publish, T, Out, State}. 39 | -------------------------------------------------------------------------------- /apps/faxe/src/lib/dfs_debug_handler.erl: -------------------------------------------------------------------------------- 1 | %%%------------------------------------------------------------------- 2 | %%% @author heyoka 3 | %%% @copyright (C) 2024, 4 | %%% @doc 5 | %%% 6 | %%% @end 7 | %%% Created : 18. Mar 2024 11:00 AM 8 | %%%------------------------------------------------------------------- 9 | -module(dfs_debug_handler). 10 | -author("heyoka"). 11 | 12 | -behaviour(gen_event). 13 | 14 | -export([code_change/3, handle_call/2, handle_event/2, 15 | handle_info/2, init/1, terminate/2]). 16 | 17 | -record(state, {flow_name}). 18 | 19 | init([FlowName]) -> 20 | lager:md([{flow, FlowName}]), 21 | {ok, #state{flow_name = FlowName}}. 22 | 23 | handle_event(#{expression := Expr0, result := Result0}, State) -> 24 | Expr = clean(Expr0), 25 | lager:notice("EXPRESSION: ~s || RESULT: ~p~n", [Expr, Result0]), 26 | {ok, State}; 27 | handle_event(Event, State) -> 28 | lager:notice("unexpected event ~p: ~p~n", [?MODULE, Event]), 29 | {ok, State}. 30 | 31 | handle_call(Request, State) -> 32 | lager:notice("unexpected call to ~p: ~p~n", [?MODULE, Request]), 33 | {ok, State, State}. 34 | 35 | code_change(_OldVsn, State, _Extra) -> {ok, State}. 36 | 37 | handle_info(_Info, State) -> 38 | {ok, State}. 39 | 40 | terminate(_Args, _State) -> ok. 41 | 42 | clean(String) -> 43 | %% remove the module names 44 | string:replace( 45 | string:replace( 46 | string:replace( 47 | string:replace(String, "math:", "", all), 48 | "estr:", "", all), 49 | "faxe_lambda_lib:", "", all), 50 | "dfs_std_lib:", "", all). -------------------------------------------------------------------------------- /apps/faxe/src/lib/faxe_epgsql_codec.erl: -------------------------------------------------------------------------------- 1 | %%%------------------------------------------------------------------- 2 | %%% @author heyoka 3 | %%% @copyright (C) 2019, 4 | %%% @doc 5 | %%% 6 | %%% @end 7 | %%% Created : 10. Oct 2019 08:36 8 | %%%------------------------------------------------------------------- 9 | -module(faxe_epgsql_codec). 10 | -author("heyoka"). 11 | 12 | -behavior(epgsql_codec). 13 | 14 | -define(POSTGRESQL_GS_EPOCH, 63113904000). % calendar:datetime_to_gregorian_seconds({{2000,1,1}, {0,0,0}}). 15 | -define(int64, 1/big-signed-unit:64). 16 | 17 | %% API 18 | -export([init/2, names/0, encode/3, decode/3]). 19 | 20 | 21 | init(_, _Sock) -> 22 | State = undefined, 23 | State. 24 | 25 | names() -> 26 | [timestamp]. 27 | 28 | encode(Data, _, _State) -> 29 | Data. 30 | 31 | decode(<> = _T, timestamp, _State) -> 32 | TsOutMicro = Ts + (?POSTGRESQL_GS_EPOCH * 1000000) - (62167219200 * 1000000), 33 | round(TsOutMicro / 1000); 34 | decode({Date, {Hour, Minute, SecondFrac}}=T, timestamp, _State) -> 35 | Second = erlang:trunc(SecondFrac), 36 | M0 = SecondFrac - Second, 37 | Milli = erlang:round(M0 * 1000), 38 | faxe_time:to_ms({Date, {Hour, Minute, Second, Milli}}). 39 | -------------------------------------------------------------------------------- /apps/faxe/src/lib/faxe_seq_check.erl: -------------------------------------------------------------------------------- 1 | %%%------------------------------------------------------------------- 2 | %%% @author heyoka 3 | %%% @copyright (C) 2025, 4 | %%% @doc 5 | %%% @end 6 | %%%------------------------------------------------------------------- 7 | -module(faxe_seq_check). 8 | 9 | -behaviour(gen_server). 10 | 11 | -include("faxe.hrl"). 12 | 13 | -export([start_link/1]). 14 | -export([init/1, handle_call/3, handle_cast/2, handle_info/2, terminate/2, 15 | code_change/3]). 16 | 17 | -define(TABLE, faxe_seq_checks). 18 | 19 | -define(META_FIELD, <<"_meta">>). 20 | 21 | %%%================================================================== 22 | %%% public api 23 | %%%================================================================== 24 | 25 | %%%=================================================================== 26 | %%% Spawning and gen_server implementation 27 | %%%=================================================================== 28 | 29 | start_link(CheckState) -> 30 | gen_server:start_link(?MODULE, [CheckState], []). 31 | 32 | init([CheckState]) -> 33 | {ok, CheckState}. 34 | 35 | handle_call(_Request, _From, State = #seq_check{}) -> 36 | {reply, ok, State}. 37 | 38 | handle_cast(_Request, State = #seq_check{}) -> 39 | {noreply, State}. 40 | 41 | handle_info({handle, Topic, Item}, State = #seq_check{}) -> 42 | lager:notice("handle ~p, ~p",[Topic, Item]), 43 | {noreply, State}; 44 | handle_info(_Info, State = #seq_check{}) -> 45 | {noreply, State}. 46 | 47 | terminate(_Reason, _State = #seq_check{}) -> 48 | ok. 49 | 50 | code_change(_OldVsn, State = #seq_check{}, _Extra) -> 51 | {ok, State}. 52 | 53 | %%%=================================================================== 54 | %%% Internal functions 55 | %%%=================================================================== -------------------------------------------------------------------------------- /apps/faxe/src/lib/faxe_tcp_server.erl: -------------------------------------------------------------------------------- 1 | -module(faxe_tcp_server). 2 | 3 | -behaviour(gen_statem). 4 | -behaviour(ranch_protocol). 5 | 6 | %% API. 7 | -export([start_link/4]). 8 | 9 | %% gen_statem. 10 | -export([callback_mode/0]). 11 | -export([init/1]). 12 | -export([connected/3]). 13 | -export([terminate/3]). 14 | -export([code_change/4]). 15 | 16 | -define(TIMEOUT, 5000). 17 | 18 | -record(state, {socket, transport, parent}). 19 | 20 | %% API. 21 | 22 | start_link(Ref, _Socket, Transport, Opts) -> 23 | {ok, proc_lib:spawn_link(?MODULE, init, [{Ref, Transport, Opts}])}. 24 | 25 | %% gen_statem. 26 | 27 | callback_mode() -> 28 | state_functions. 29 | 30 | init({Ref, Transport, #{parent := Parent, tcp_opts := Opts}}) -> 31 | {ok, Socket} = ranch:handshake(Ref), 32 | ok = Transport:setopts(Socket, Opts), 33 | Parent ! {tcp_server_up, self()}, 34 | gen_statem:enter_loop(?MODULE, [], connected, 35 | #state{socket = Socket, transport = Transport, parent = Parent}). 36 | 37 | connected(info, {data, Data}, #state{socket=Socket, transport=Transport}) -> 38 | Transport:send(Socket, Data), 39 | keep_state_and_data; 40 | connected(info, {tcp_closed, _Socket}, _StateData) -> 41 | {stop, normal}; 42 | connected(info, {tcp_error, _, Reason}, _StateData) -> 43 | {stop, Reason}; 44 | connected({call, From}, _Request, _StateData) -> 45 | gen_statem:reply(From, ok), 46 | keep_state_and_data; 47 | connected(cast, _Msg, _StateData) -> 48 | keep_state_and_data; 49 | connected(timeout, _Msg, _StateData) -> 50 | {stop, normal}; 51 | connected(_EventType, _Msg, _StateData) -> 52 | io:format("~ngot: ~p~n",[_Msg]), 53 | {stop, normal}. 54 | 55 | terminate(Reason, StateName, StateData=#state{socket=Socket, transport=Transport}) 56 | when Socket=/=undefined andalso Transport=/=undefined -> 57 | 58 | catch Transport:close(Socket), 59 | terminate(Reason, StateName, 60 | StateData#state{socket=undefined, transport=undefined}); 61 | terminate(_Reason, _StateName, _StateData) -> 62 | ok. 63 | 64 | code_change(_OldVsn, StateName, StateData, _Extra) -> 65 | {ok, StateName, StateData}. 66 | 67 | -------------------------------------------------------------------------------- /apps/faxe/src/lib/faxe_time_offset_monitor.erl: -------------------------------------------------------------------------------- 1 | %%%------------------------------------------------------------------- 2 | %%% @author heyoka 3 | %%% @copyright (C) 2021, 4 | %%% @doc 5 | %%% @end 6 | %%%------------------------------------------------------------------- 7 | -module(faxe_time_offset_monitor). 8 | 9 | -behaviour(gen_server). 10 | 11 | -export([start_link/0]). 12 | -export([init/1, handle_call/3, handle_cast/2, handle_info/2, terminate/2, 13 | code_change/3]). 14 | 15 | -define(SERVER, ?MODULE). 16 | 17 | -record(state, {current_offset = 0}). 18 | 19 | %%%=================================================================== 20 | %%% Spawning and gen_server implementation 21 | %%%=================================================================== 22 | 23 | start_link() -> 24 | gen_server:start_link({local, ?SERVER}, ?MODULE, [], []). 25 | 26 | init([]) -> 27 | erlang:monitor(time_offset, clock_service), 28 | {ok, #state{current_offset = erlang:time_offset(milli_seconds)}}. 29 | 30 | handle_call(_Request, _From, State = #state{}) -> 31 | {reply, ok, State}. 32 | 33 | handle_cast(_Request, State = #state{}) -> 34 | {noreply, State}. 35 | 36 | handle_info({'CHANGE', _MonitorReference, time_offset, clock_service, NewTimeOffset}, 37 | State = #state{current_offset = Off}) -> 38 | OffsetMs = erlang:convert_time_unit(NewTimeOffset, native, milli_seconds), 39 | case Off-OffsetMs /= 0 of 40 | true -> 41 | lager:warning("TIME_OFFSET changed by ~pms to: ~p", [Off-OffsetMs, OffsetMs]); 42 | false -> 43 | ok 44 | end, 45 | {noreply, State#state{current_offset = OffsetMs}}; 46 | handle_info(_Info, State = #state{}) -> 47 | lager:info("[~p]got info: ~p",[?MODULE, _Info]), 48 | {noreply, State}. 49 | 50 | terminate(_Reason, _State = #state{}) -> 51 | ok. 52 | 53 | code_change(_OldVsn, State = #state{}, _Extra) -> 54 | {ok, State}. 55 | 56 | %%%=================================================================== 57 | %%% Internal functions 58 | %%%=================================================================== 59 | -------------------------------------------------------------------------------- /apps/faxe/src/lib/flow_changed_handler_mqtt.erl: -------------------------------------------------------------------------------- 1 | %% Date: 27.08.21 - 08:27 2 | %% Ⓒ 2021 heyoka 3 | -module(flow_changed_handler_mqtt). 4 | -author("Alexander Minichmair"). 5 | 6 | -behaviour(event_handler_mqtt). 7 | 8 | -include("faxe.hrl"). 9 | 10 | %% event_handler_mqtt callbacks 11 | -export([ 12 | init/1, 13 | handle_event/2]). 14 | 15 | -record(state, { 16 | topic, 17 | topic_flow_export, 18 | topic_template_export 19 | }). 20 | 21 | %%%=================================================================== 22 | %%% event_handler_mqtt callbacks 23 | %%%=================================================================== 24 | 25 | -spec(init(InitArgs :: term()) -> 26 | {ok, Opts :: map(), State :: #state{}} | 27 | {ok, Opts :: map(), State :: #state{}, hibernate} | 28 | {error, Reason :: term()}). 29 | init(Topic0) -> 30 | %% topic not used for now, as we only do flow/template export at the moment 31 | Topic = faxe_util:build_topic([Topic0, <<"flow_changed">>]), 32 | TopicExport = faxe_util:build_topic([Topic0, <<"flows_export">>]), 33 | TopicTemplateExport = faxe_util:build_topic([Topic0, <<"templates_export">>]), 34 | {ok, #{qos => 0, retained => true}, 35 | #state{topic = Topic, topic_flow_export = TopicExport, topic_template_export = TopicTemplateExport}}. 36 | 37 | handle_event(Event = #data_point{}, 38 | State = #state{topic_template_export = TTempExport, topic_flow_export = TopicExport}) -> 39 | {Topic, Maps} = 40 | case flowdata:field(Event, <<"type">>) of 41 | template -> {TTempExport, [rest_helper:template_to_map(T) || T <- faxe:list_templates()]}; 42 | _ -> {TopicExport, [rest_helper:task_to_map(T) || T <- faxe:list_tasks()]} 43 | end, 44 | {publish, Topic, #data_point{ts = faxe_time:now(), fields = #{export => Maps}}, State}. -------------------------------------------------------------------------------- /apps/faxe/src/lib/http_lib.erl: -------------------------------------------------------------------------------- 1 | %%%------------------------------------------------------------------- 2 | %%% @author heyoka 3 | %%% @copyright (C) 2021, 4 | %%% @doc 5 | %%% 6 | %%% @end 7 | %%% Created : 13. Sep 2021 19:58 8 | %%%------------------------------------------------------------------- 9 | -module(http_lib). 10 | -author("heyoka"). 11 | 12 | %% API 13 | -export([user_agent/0, basic_auth_header/2, user_agent_header/0]). 14 | 15 | -spec user_agent() -> iodata(). 16 | user_agent() -> 17 | {ok, Vsn} = application:get_key(vsn), 18 | iolist_to_binary([<<"faxe/">>, Vsn]). 19 | 20 | user_agent_header() -> 21 | [{<<"user-agent">>, user_agent()}]. 22 | 23 | basic_auth_header(undefined, _) -> []; 24 | basic_auth_header(_, undefined) -> []; 25 | basic_auth_header(User, Pass) when is_binary(User), is_binary(Pass) -> 26 | Basic = base64:encode(<>), 27 | [{"authorization", <<"Basic ", Basic/binary>>}]; 28 | basic_auth_header(_ , _) -> []. 29 | 30 | -------------------------------------------------------------------------------- /apps/faxe/src/lib/metrics_handler_mqtt.erl: -------------------------------------------------------------------------------- 1 | %% Date: 06.04.20 - 08:27 2 | %% Ⓒ 2020 heyoka 3 | -module(metrics_handler_mqtt). 4 | -author("Alexander Minichmair"). 5 | 6 | -behaviour(event_handler_mqtt). 7 | 8 | %% event_handler_mqtt callbacks 9 | -export([ 10 | init/1, 11 | handle_event/2]). 12 | 13 | -record(state, { 14 | topic 15 | }). 16 | 17 | %%%=================================================================== 18 | %%% event_handler_mqtt callbacks 19 | %%%=================================================================== 20 | 21 | -spec(init(InitArgs :: term()) -> 22 | {ok, State :: #state{}} | 23 | {ok, State :: #state{}, hibernate} | 24 | {error, Reason :: term()}). 25 | init(Topic0) -> 26 | Topic = faxe_util:build_topic([Topic0, <<"metrics">>]), 27 | {ok, #{qos => 0, retained => false}, #state{topic = Topic}}. 28 | 29 | handle_event({{FlowId}, Item}, State = #state{topic = Topic}) -> 30 | T = <>, 31 | {publish, T, Item, State}; 32 | handle_event({{FlowId, NodeId, MetricName}, Item}, State = #state{topic = Topic}) -> 33 | case ets:lookup(metric_trace_flows, FlowId) of 34 | [{FlowId, true}] -> 35 | T = <>, 36 | {publish, T, Item, State}; 37 | _ -> 38 | {ok, State} 39 | end. -------------------------------------------------------------------------------- /apps/faxe/src/lib/mqtt_options.erl: -------------------------------------------------------------------------------- 1 | %%%------------------------------------------------------------------- 2 | %%% @author heyoka 3 | %%% @copyright (C) 2020, 4 | %%% @doc 5 | %%% 6 | %%% @end 7 | %%% Created : 31. Aug 2020 18:42 8 | %%%------------------------------------------------------------------- 9 | -module(mqtt_options). 10 | -author("heyoka"). 11 | 12 | %% API 13 | -export([parse/1, parse/2]). 14 | 15 | -spec parse(list()|map()) -> list(). 16 | parse(Opts) -> 17 | parse(Opts, #{}). 18 | -spec parse(list()|map(), map()) -> list(). 19 | parse([], Acc) -> maps:to_list(Acc); 20 | parse(Opts, Acc) when is_map(Opts) -> 21 | parse(maps:to_list(Opts), Acc); 22 | parse([{host, Host} | R], Acc) when is_binary(Host) -> 23 | parse(R, Acc#{host => binary_to_list(Host)}); 24 | parse([{host, Host} | R], Acc) when is_list(Host) -> 25 | parse(R, Acc#{host => Host}); 26 | parse([{port, Port} | R], Acc) when is_integer(Port) -> 27 | parse(R, Acc#{port => Port}); 28 | parse([{keepalive, KeepAlive} | R], Acc) when is_integer(KeepAlive) -> 29 | parse(R, Acc#{keepalive => KeepAlive}); 30 | parse([{user, User} | R], Acc) when is_binary(User) -> 31 | parse(R, Acc#{user => User}); 32 | parse([{pass, Pass} | R], Acc) when is_binary(Pass) -> 33 | parse(R, Acc#{pass => Pass}); 34 | parse([{retained, Ret} | R], Acc) when is_atom(Ret) -> 35 | parse(R, Acc#{retained => Ret}); 36 | parse([{qos, Qos} | R], Acc) when is_integer(Qos) -> 37 | parse(R, Acc#{qos => Qos}); 38 | parse([{client_id, ClientId} | R], Acc) when is_binary(ClientId) -> 39 | parse(R, Acc#{client_id => ClientId}); 40 | parse([{ssl, false} | R], Acc) -> 41 | parse(R, Acc#{ssl => false}); 42 | parse([{ssl, true} | R], Acc) -> 43 | Opts = faxe_config:get_ssl_opts(mqtt), 44 | parse(R, Acc#{ssl => true, ssl_opts => Opts}); 45 | parse([_ | R], Acc) -> 46 | parse(R, Acc). 47 | 48 | 49 | 50 | -------------------------------------------------------------------------------- /apps/faxe/src/lib/mqtt_pub_pool_sup.erl: -------------------------------------------------------------------------------- 1 | %%%------------------------------------------------------------------- 2 | %%% @author heyoka 3 | %%% @copyright (C) 2020, 4 | %%% @doc 5 | %%% @end 6 | %%%------------------------------------------------------------------- 7 | -module(mqtt_pub_pool_sup). 8 | 9 | -behaviour(supervisor). 10 | 11 | -export([start_link/0, init/1]). 12 | 13 | start_link() -> 14 | supervisor:start_link({local, ?MODULE}, ?MODULE, []). 15 | 16 | init([]) -> 17 | P = [ 18 | {mqtt_pub_pool_manager, {mqtt_pub_pool_manager, start_link, []}, 19 | permanent, 5000, worker, []} 20 | ], 21 | 22 | {ok, {#{strategy => one_for_one, 23 | intensity => 5, 24 | period => 10}, 25 | P} 26 | }. 27 | -------------------------------------------------------------------------------- /apps/faxe/src/lib/s7pool_sup.erl: -------------------------------------------------------------------------------- 1 | %%%------------------------------------------------------------------- 2 | %%% @author heyoka 3 | %%% @copyright (C) 2020, 4 | %%% @doc 5 | %%% @end 6 | %%%------------------------------------------------------------------- 7 | -module(s7pool_sup). 8 | 9 | -behaviour(supervisor). 10 | 11 | -export([start_link/0, init/1]). 12 | 13 | start_link() -> 14 | supervisor:start_link({local, ?MODULE}, ?MODULE, []). 15 | 16 | init([]) -> 17 | P = [ 18 | {s7pool_manager, {s7pool_manager, start_link, []}, 19 | permanent, 5000, worker, []} 20 | %% , 21 | %% {s7pool_con_handler, {s7pool_con_handler, start_link, []}, 22 | %% permanent, 5000, worker, []} 23 | ], 24 | 25 | {ok, {#{strategy => one_for_one, 26 | intensity => 5, 27 | period => 10}, 28 | P} 29 | }. 30 | -------------------------------------------------------------------------------- /apps/faxe/src/lib/s7reader_sup.erl: -------------------------------------------------------------------------------- 1 | %%%------------------------------------------------------------------- 2 | %%% @author heyoka 3 | %%% @copyright (C) 2022, 4 | %%% @doc 5 | %%% @end 6 | %%%------------------------------------------------------------------- 7 | -module(s7reader_sup). 8 | 9 | -behaviour(supervisor). 10 | 11 | -export([start_link/0, init/1, start_reader/1]). 12 | 13 | start_reader(Opts) -> 14 | supervisor:start_child(?MODULE, child(Opts)). 15 | 16 | start_link() -> 17 | supervisor:start_link({local, ?MODULE}, ?MODULE, []). 18 | 19 | init([]) -> 20 | {ok, {#{strategy => one_for_one, 21 | intensity => 5, 22 | period => 30}, 23 | []} 24 | }. 25 | 26 | child(Opts = #{ip := Ip}) -> 27 | #{id => Ip, 28 | start => {s7reader, start_link, [Opts]}, 29 | restart => transient, 30 | type => worker, 31 | shutdown => 4000}. -------------------------------------------------------------------------------- /apps/faxe/src/perf.erl: -------------------------------------------------------------------------------- 1 | %%%------------------------------------------------------------------- 2 | %%% @author heyoka 3 | %%% @copyright (C) 2021 4 | %%% @doc 5 | %%% 6 | %%% @end 7 | %%% Created : 01. Aug 2021 21:08 8 | %%%------------------------------------------------------------------- 9 | -module(perf). 10 | -author("heyoka"). 11 | 12 | -include("faxe.hrl"). 13 | %% API 14 | -export([lambda_perf/0, lambda_perf/1, lambda_perf/2, execute/3]). 15 | 16 | lambda_perf() -> 17 | lambda_perf(1, 10). 18 | lambda_perf(Num) -> 19 | lambda_perf(Num, auto). 20 | lambda_perf(Num, auto) -> 21 | Par = round((Num * 8) / 100), 22 | lambda_perf(Num, Par); 23 | lambda_perf(Num, Par) -> 24 | Vars = ["dat.val2", "val1"], 25 | %% Refs = [<<"Dat_val2">>, <<"Val1">>], 26 | Lambda = " dfs_std_lib:int(Dat_val2 > 3) ", 27 | %% Lambda = " dfs_std_lib:int(Dat_val2 / 1000) + math:sqrt(Val1) / 4000", 28 | LambdaCall = lambda_helper(Lambda, Vars), 29 | Batch = make_batch(Num), 30 | %% lager:info("LambdaCall: ~p",[LambdaCall]), 31 | {TMy, _Res} = timer:tc(perf, execute, [Batch, LambdaCall, Par]), 32 | {time_to_process_lambdas, {items, Num}, {concurrency, Par}, {time_spent, erlang:round(TMy/1000), millis}}. 33 | 34 | execute(#data_batch{points = Points}, LambdaCall, Par) -> 35 | plists:map( 36 | fun(Point) -> 37 | faxe_lambda:execute(Point, LambdaCall, <<"_">>) 38 | end, 39 | Points, 40 | Par 41 | ). 42 | 43 | make_batch(Num) -> 44 | Points = [ 45 | #data_point{ 46 | ts=faxe_time:now(), 47 | fields=#{<<"val1">> => Z, <<"dat">> => #{<<"val2">> => Z*2}}} 48 | || Z <- lists:seq(1,Num) 49 | ], 50 | #data_batch{points = Points}. 51 | 52 | lambda_helper(LambdaString, VarList) -> 53 | VarBindings = [{replace_dots(string:titlecase(Var)), list_to_binary(string:lowercase(Var))} || Var <- VarList], 54 | %% lager:notice("VarBindings: ~p",[VarBindings]), 55 | {Vars, Bindings} = lists:unzip(VarBindings), 56 | faxe_dfs:make_lambda_fun(LambdaString, Vars, Bindings). 57 | 58 | replace_dots(String) -> 59 | binary_to_list(estr:str_replace(list_to_binary(String), <<".">>, <<"_">>)). -------------------------------------------------------------------------------- /apps/faxe/src/web/cmw_headers.erl: -------------------------------------------------------------------------------- 1 | %%%------------------------------------------------------------------- 2 | %%% @author heyoka 3 | %%% @copyright (C) 2020 4 | %%% 5 | %%% @end 6 | %%% Created : 13. May 2020 14:51 7 | %%%------------------------------------------------------------------- 8 | -module(cmw_headers). 9 | -author("heyoka"). 10 | 11 | -behavior(cowboy_middleware). 12 | 13 | %% API 14 | -export([execute/2]). 15 | 16 | %% on the one hand we add an OPTIONS method to all endpoints implicitely, 17 | %% on the other hand we set cors headers when OPTIONS is used 18 | execute(#{method := <<"OPTIONS">>} = Req, Env) -> 19 | #{handler := Handler, handler_opts := HandlerOpts} = Env, 20 | {cowboy_rest, _Req, State} = Handler:init(Req, HandlerOpts), 21 | {AllowedMethods, _ , _} = Handler:allowed_methods(Req, State), 22 | {ok, ReqWithCorsHeaders} = set_cors_headers(Req, AllowedMethods), 23 | ReqFinal = cowboy_req:reply(200, ReqWithCorsHeaders), 24 | {stop, ReqFinal}; 25 | execute(Req, Env) -> 26 | Req2 = cowboy_req:set_resp_header(<<"access-control-allow-origin">>, <<"*">>, Req), 27 | {ok, Req2, Env}. 28 | 29 | set_cors_headers(Req, Methods) -> 30 | Headers = [ 31 | {<<"access-control-allow-origin">>, <<"*">>}, 32 | {<<"access-control-allow-methods">>, lists:join(<<", ">>, Methods ++ [<<"OPTIONS">>])}, 33 | {<<"access-control-allow-headers">>, <<"*">>}, 34 | {<<"access-control-max-age">>, <<"900">>}], 35 | set_headers(Headers, Req). 36 | 37 | set_headers(Headers, Req) -> 38 | ReqWithHeaders = lists:foldl( 39 | fun({Header, Value}, ReqIn) -> 40 | cowboy_req:set_resp_header(Header, Value, ReqIn) 41 | end, Req, Headers), 42 | {ok, ReqWithHeaders}. 43 | 44 | -------------------------------------------------------------------------------- /apps/faxe/src/web/rest_audit_server.erl: -------------------------------------------------------------------------------- 1 | %%%------------------------------------------------------------------- 2 | %%% @author heyoka 3 | %%% @copyright (C) 2020, 4 | %%% @doc 5 | %%% @end 6 | %%%------------------------------------------------------------------- 7 | -module(rest_audit_server). 8 | 9 | -behaviour(gen_server). 10 | 11 | -export([start_link/0, audit/2]). 12 | -export([init/1, handle_call/3, handle_cast/2, handle_info/2, terminate/2, 13 | code_change/3]). 14 | 15 | -define(SERVER, ?MODULE). 16 | 17 | -record(rest_audit_server_state, {}). 18 | 19 | %%%=================================================================== 20 | %%% Spawning and gen_server implementation 21 | %%%=================================================================== 22 | audit(_User, _Req) -> 23 | ok. 24 | %% ?SERVER ! {audit, User, Req}. 25 | 26 | start_link() -> 27 | gen_server:start_link({local, ?SERVER}, ?MODULE, [], []). 28 | 29 | init([]) -> 30 | {ok, #rest_audit_server_state{}}. 31 | 32 | handle_call(_Request, _From, State = #rest_audit_server_state{}) -> 33 | {reply, ok, State}. 34 | 35 | handle_cast(_Request, State = #rest_audit_server_state{}) -> 36 | {noreply, State}. 37 | 38 | handle_info({audit, User, _CowboyReq = #{path := Path, bindings := Bindings}}, State = #rest_audit_server_state{}) -> 39 | lager:notice("[AUDIT-LOG] Api-User: ~s || Path: ~s || Bindings: ~p", [User, Path, Bindings]), 40 | {noreply, State}. 41 | 42 | terminate(_Reason, _State = #rest_audit_server_state{}) -> 43 | ok. 44 | 45 | code_change(_OldVsn, State = #rest_audit_server_state{}, _Extra) -> 46 | {ok, State}. 47 | 48 | %%%=================================================================== 49 | %%% Internal functions 50 | %%%=================================================================== 51 | -------------------------------------------------------------------------------- /apps/faxe/src/web/rest_tags_handler.erl: -------------------------------------------------------------------------------- 1 | %%%------------------------------------------------------------------- 2 | %%% @author $author 3 | %%% @copyright (C) $year, $company 4 | %%% @doc 5 | %%% 6 | %%% @end 7 | %%% Created : $fulldate 8 | %%%------------------------------------------------------------------- 9 | -module(rest_tags_handler). 10 | 11 | -include("faxe.hrl"). 12 | 13 | %% 14 | %% Cowboy callbacks 15 | -export([ 16 | init/2 17 | , allowed_methods/2, list_json/2, content_types_provided/2, is_authorized/2]). 18 | 19 | %% 20 | %% Additional callbacks 21 | -export([ 22 | ]). 23 | 24 | -record(state, {mode}). 25 | 26 | init(Req, [{op, Mode}]) -> 27 | {cowboy_rest, Req, #state{mode = Mode}}. 28 | 29 | is_authorized(Req, State) -> 30 | rest_helper:is_authorized(Req, State). 31 | 32 | allowed_methods(Req, State) -> 33 | Value = [<<"GET">>], 34 | {Value, Req, State}. 35 | 36 | content_types_provided(Req, State) -> 37 | {[ 38 | {{<<"application">>, <<"json">>, []}, list_json} 39 | ], Req, State}. 40 | 41 | 42 | list_json(Req, State=#state{mode = _Mode}) -> 43 | Tags = faxe:get_all_tags(), 44 | Map = #{<<"tags">> => Tags}, 45 | {jiffy:encode(Map), Req, State}. 46 | 47 | -------------------------------------------------------------------------------- /apps/faxe/test/component_SUITE.erl: -------------------------------------------------------------------------------- 1 | %%%------------------------------------------------------------------- 2 | %%% @author heyoka 3 | %%% @copyright (C) 2019, 4 | %%% @doc 5 | %%% 6 | %%% @end 7 | %%% Created : 15. Jul 2019 15:06 8 | %%%------------------------------------------------------------------- 9 | -module(component_SUITE). 10 | -author("heyoka"). 11 | 12 | -include_lib("common_test/include/ct.hrl"). 13 | %% API 14 | -export([suite/0, all/0]). 15 | %% tests 16 | -export([test1/1, test2/1]). 17 | 18 | suite() -> 19 | [{timetrap, {minutes, 2}}]. 20 | 21 | 22 | all() -> 23 | [test1, test2]. 24 | 25 | test1(_Config) -> 26 | 1 = 1. 27 | 28 | test2(_Config) -> 29 | A = 0, 30 | 1/A. 31 | 32 | 33 | -------------------------------------------------------------------------------- /apps/faxe/test/dfs/batch_test.dfs: -------------------------------------------------------------------------------- 1 | |value_emitter() 2 | .every(8000ms) 3 | .jitter(3700ms) 4 | .type(point) 5 | 6 | |batch(5) 7 | .timeout(5750ms) 8 | 9 | |debug('warning') -------------------------------------------------------------------------------- /apps/faxe/test/dfs/mqtt_amqp_bridge_test.dfs: -------------------------------------------------------------------------------- 1 | def topic = 'some/crazy/topic/this/is' 2 | def user = 'rabbitmq-cluster-user' 3 | def exchange = 'x_root_fanout' 4 | def rk = 'some.crazy.topic.this.is' 5 | 6 | def in = 7 | |mqtt_subscribe() 8 | .host('10.102.1.102') 9 | .port(1883) 10 | .topic(topic) 11 | .ssl(false) 12 | |debug() 13 | in 14 | |amqp_publish() 15 | .host('some.amqp_host') % dev 16 | .user(user) 17 | .pass('asdf323232') % dev 18 | .routing_key(rk) 19 | .exchange(exchange) 20 | .ssl(false) 21 | 22 | in 23 | |amqp_publish() 24 | .host('some.other_amqp_host') %% preprod 25 | .user(user) 26 | .pass('adfafdwewef3') % preprod 27 | .routing_key(rk) 28 | .exchange(exchange) 29 | .ssl(false) 30 | 31 | in 32 | |amqp_publish() 33 | .host('15.45.48.1') % prod 34 | .user(user) 35 | .pass('dfwefwef8ePI78we') % prod 36 | .routing_key(rk) 37 | .exchange(exchange) 38 | .ssl(true) -------------------------------------------------------------------------------- /apps/faxe/test/dfs/script_expr_test.dfs: -------------------------------------------------------------------------------- 1 | %% new in version 0.14.0 2 | def topic = 'some/crazy/topic/this/is' 3 | def user = 'rabbitmq-cluster-user' 4 | def exchange = 'x_root_fanout' 5 | def rk = e: str_replace(topic, '/', '.') 6 | 7 | def in = 8 | |mqtt_subscribe() 9 | .host('10.102.1.102') 10 | .port(1883) 11 | .topic(topic) 12 | .ssl(false) 13 | |debug() 14 | in 15 | |amqp_publish() 16 | .host('some.amqp_host') % dev 17 | .user(user) 18 | .pass('asdf323232') % dev 19 | .routing_key(rk) 20 | .exchange(exchange) 21 | .ssl(false) 22 | 23 | in 24 | |amqp_publish() 25 | .host('some.other_amqp_host') %% preprod 26 | .user(user) 27 | .pass('adfafdwewef3') % preprod 28 | .routing_key(rk) 29 | .exchange(exchange) 30 | .ssl(false) 31 | 32 | in 33 | |amqp_publish() 34 | .host('15.45.48.1') % prod 35 | .user(user) 36 | .pass('dfwefwef8ePI78we') % prod 37 | .routing_key(rk) 38 | .exchange(exchange) 39 | .ssl(true) -------------------------------------------------------------------------------- /apps/faxe/test/dfs/unknown_options_test.dfs: -------------------------------------------------------------------------------- 1 | %% send alarm, if the mean exceeds a certain threshold 2 | 3 | def mqtt_broker = '10.14.204.3' 4 | def alarm_topic = 'ttopic/alarm/energy_max' 5 | 6 | def point = 7 | |json_emitter() 8 | .every(1s) 9 | .json('{"condition": {"id": 0, "name": "OK"}, "condition_reason": "", "predicted_maintenance_time": 1584246411783, 10 | "vac_on_without_contact": [1.2, 2.5, 4.33], "vac_on_with_contact": [5.6, 45.98, 7.012]}', 11 | '{"condition": {"id": 1, "name": "Warning"}, "condition_reason": "bad succer", "predicted_maintenance_time": 1583246411783, 12 | "vac_on_without_contact": [0.2, 2.5, 8.01], "vac_on_with_contact": [6.001, 4.798, 7.012]} ', 13 | ' {"condition": {"id": 2, "name": "Error"}, "condition_reason": "something went really wrong!", "predicted_maintenance_time": 1582246411783, 14 | "vac_on_without_contact": [0.5, 2.5, 0.44], "vac_on_with_contact": [2.06, 4.98, 2.901]} ') 15 | 16 | 17 | |debug() 18 | 19 | point 20 | |batch(10).timeout(3s) 21 | 22 | |avg() 23 | .field('val') 24 | .as('avg') 25 | 26 | |debug() 27 | .ls_mem('avg10') 28 | .ls_mem_field('avg') 29 | 30 | point 31 | |where() 32 | .lambda(lambda: "val" > mem('avg10')) 33 | 34 | |default() 35 | .fields('message') 36 | .field_values('val is > avg10') 37 | 38 | |debug() -------------------------------------------------------------------------------- /apps/faxe/test/faxe.spec: -------------------------------------------------------------------------------- 1 | {suites,".",all}. -------------------------------------------------------------------------------- /apps/faxe/test/faxe_lambdalib_tests.erl: -------------------------------------------------------------------------------- 1 | %%%------------------------------------------------------------------- 2 | %%% @author heyoka 3 | %%% @copyright (C) 2019, 4 | %%% @doc 5 | %%% 6 | %%% @end 7 | %%% Created : 09. Sep 2019 13:50 8 | %%%------------------------------------------------------------------- 9 | -module(faxe_lambdalib_tests). 10 | -author("heyoka"). 11 | 12 | %% API 13 | -include("faxe.hrl"). 14 | -ifdef(TEST). 15 | -compile(nowarn_export_all). 16 | -compile([export_all]). 17 | -include_lib("eunit/include/eunit.hrl"). 18 | 19 | round_float_test() -> 20 | ?assertEqual(125.456, faxe_lambda_lib:round_float(125.455679, 3)). 21 | 22 | max_test() -> 23 | ?assertEqual(12, faxe_lambda_lib:max([1,12,3,6.8])). 24 | 25 | min_test() -> 26 | ?assertEqual(1, faxe_lambda_lib:min([1,12,3,6.8])). 27 | 28 | map_get_test() -> 29 | ?assertEqual(<<"yes">>, faxe_lambda_lib:map_get(<<"k2">>, test_map())). 30 | map_get_undefined_test() -> 31 | ?assertEqual(<<"undefined">>, faxe_lambda_lib:map_get(<<"k8">>, test_map())). 32 | 33 | test_map() -> #{<<"k1">> => 2134.23, <<"k2">> => <<"yes">>, <<"four">> => 5}. 34 | -endif. 35 | -------------------------------------------------------------------------------- /apps/faxe/test/faxe_time_tests.erl: -------------------------------------------------------------------------------- 1 | %%%------------------------------------------------------------------- 2 | %%% @author heyoka 3 | %%% @copyright (C) 2019, 4 | %%% @doc 5 | %%% 6 | %%% @end 7 | %%% Created : 09. Sep 2019 13:50 8 | %%%------------------------------------------------------------------- 9 | -module(faxe_time_tests). 10 | -author("heyoka"). 11 | 12 | %% API 13 | -include("faxe.hrl"). 14 | -ifdef(TEST). 15 | -compile(nowarn_export_all). 16 | -compile([export_all]). 17 | -include_lib("eunit/include/eunit.hrl"). 18 | 19 | duration_to_ms_basic_test() -> 20 | Dur = <<"3h">>, 21 | ?assertEqual(3*60*60*1000, faxe_time:duration_to_ms(Dur)). 22 | 23 | duration_to_ms_neg_test() -> 24 | Dur = <<"-3h">>, 25 | ?assertEqual(3*60*60*1000*-1, faxe_time:duration_to_ms(Dur)). 26 | 27 | duration_zero_test() -> 28 | Dur = <<"0m">>, 29 | ?assertEqual(0, faxe_time:duration_to_ms(Dur)). 30 | 31 | duration_big_test() -> 32 | Dur = <<"70000ms">>, 33 | ?assertEqual(70*1000, faxe_time:duration_to_ms(Dur)). 34 | 35 | not_valid_duration_test() -> 36 | Dur = <<"25mm">>, 37 | ?assertError(function_clause, faxe_time:duration_to_ms(Dur)). 38 | 39 | valid_duration_test() -> 40 | Dur = <<"1255ms">>, 41 | ?assertEqual(true, faxe_time:is_duration_string(Dur)). 42 | 43 | not_valid_duration_2_test() -> 44 | Dur = <<"1255ds">>, 45 | ?assertEqual(false, faxe_time:is_duration_string(Dur)). 46 | 47 | -endif. 48 | -------------------------------------------------------------------------------- /apps/faxe/test/jsn_tests.erl: -------------------------------------------------------------------------------- 1 | %%%------------------------------------------------------------------- 2 | %%% @author heyoka 3 | %%% @copyright (C) 2019, 4 | %%% @doc 5 | %%% 6 | %%% @end 7 | %%% Created : 09. Sep 2019 13:50 8 | %%%------------------------------------------------------------------- 9 | -module(jsn_tests). 10 | -author("heyoka"). 11 | 12 | %% API 13 | -include("faxe.hrl"). 14 | -ifdef(TEST). 15 | -compile(nowarn_export_all). 16 | -compile([export_all]). 17 | -include_lib("eunit/include/eunit.hrl"). 18 | 19 | jsn_set_basic_test() -> 20 | M1 = #{<<"data">> => #{<<"bar">> =>[#{<<"first">> => 1}]}}, 21 | M2 = #{<<"second">> => 2}, 22 | ?assertEqual( 23 | #{<<"data">> => #{<<"bar">> =>[#{<<"first">> => 1}, #{<<"second">> => 2}]}}, 24 | jsn:set(flowdata:path(<<"data.bar[2]">>), M1, M2) 25 | ). 26 | -endif. 27 | -------------------------------------------------------------------------------- /config/vars.config: -------------------------------------------------------------------------------- 1 | {console_log_level, info}. 2 | {emit_log_level, warning}. 3 | {nodename, 'faxe@127.0.0.1'}. 4 | %%{mnesia_dir, "/opt/mnesia"}. 5 | {mnesia_dir, "./mnesia_data"}. 6 | {http_api_port, 8081}. 7 | {http_api_tls_enable, off}. 8 | {mqtt_host, "example.com"}. 9 | {amqp_host, "example.com"}. 10 | {crate_host, "example.com"}. 11 | {crate_http_host, "example.com"}. 12 | {crate_http_port, 4200}. 13 | {dfs_script_path, "/dfs/"}. 14 | {python_script_path, "/python/"}. 15 | {queue_base_dir, "/tmp"}. 16 | {rabbitmq_root_exchange, "x_root"}. 17 | {flow_auto_start, off}. 18 | {allow_anonymous, true}. 19 | 20 | 21 | 22 | -------------------------------------------------------------------------------- /config/vars/balena.config: -------------------------------------------------------------------------------- 1 | {nodename, 'faxe@127.0.0.1'}. 2 | {sbwt, "none"}. 3 | {sbwtdcpu, "short"}. 4 | {sbwtdio, "short"}. 5 | {console_log_level, warning}. 6 | {mnesia_dir, "/data"}. 7 | {emit_log_level, warning}. 8 | {http_api_port, 8081}. 9 | {http_api_tls_enable, off}. 10 | {mqtt_host, "example.com"}. 11 | {amqp_host, "example.com"}. 12 | {crate_host, "example.com"}. 13 | {crate_http_host, "example.com"}. 14 | {crate_http_port, 4200}. 15 | {dfs_script_path, "/dfs/"}. 16 | {python_script_path, "/data/python"}. 17 | {queue_base_dir, "/data/esq"}. 18 | {rabbitmq_root_exchange, "x_root"}. 19 | {flow_auto_start, off}. 20 | {allow_anonymous, false}. -------------------------------------------------------------------------------- /config/vars/dev1.config: -------------------------------------------------------------------------------- 1 | {sbwt, "medium"}. 2 | {sbwtdcpu, "short"}. 3 | {sbwtdio, "short"}. 4 | {nodename, 'faxe@127.0.0.1'}. 5 | {mqtt_host, "10.14.204.20"}. 6 | {amqp_host, "10.14.204.28"}. 7 | {crate_host, "10.14.204.10"}. 8 | {crate_http_host, "10.14.204.10"}. 9 | {crate_http_port, 4200}. 10 | {dfs_script_path, "/home/heyoka/workspace/faxe/dfs/"}. 11 | {python_script_path, "/home/heyoka/workspace/faxe/python/"}. 12 | {rabbitmq_root_exchange, "x_lm_fanout"}. 13 | {flow_auto_start, off}. 14 | 15 | -------------------------------------------------------------------------------- /config/vars/dev2.config: -------------------------------------------------------------------------------- 1 | {nodename, 'faxe2@127.0.0.1'}. 2 | {sbwt, "medium"}. 3 | {sbwtdcpu, "short"}. 4 | {sbwtdio, "short"}. 5 | {mqtt_host, "10.14.204.20"}. 6 | {amqp_host, "10.14.204.28"}. 7 | {crate_host, "10.14.204.10"}. 8 | {crate_http_host, "10.14.204.10"}. 9 | {crate_http_port, 4200}. 10 | {dfs_script_path, "/home/heyoka/workspace/faxe/dfs/"}. 11 | {python_script_path, "/home/heyoka/workspace/faxe/python/"}. 12 | {rabbitmq_root_exchange, "x_lm_fanout"}. 13 | {flow_auto_start, off}. 14 | {mnesia_dir, "./mnesia_data"}. 15 | {http_api_port, 8082}. 16 | {http_api_tls_enable, off}. -------------------------------------------------------------------------------- /config/vars/dev3.config: -------------------------------------------------------------------------------- 1 | {nodename, 'faxe3@127.0.0.1'}. 2 | {sbwt, "medium"}. 3 | {sbwtdcpu, "short"}. 4 | {sbwtdio, "short"}. 5 | {mqtt_host, "10.14.204.20"}. 6 | {amqp_host, "10.14.204.28"}. 7 | {crate_host, "10.14.204.10"}. 8 | {crate_http_host, "10.14.204.10"}. 9 | {crate_http_port, 4200}. 10 | {dfs_script_path, "/home/heyoka/workspace/faxe/dfs/"}. 11 | {python_script_path, "/home/heyoka/workspace/faxe/python/"}. 12 | {rabbitmq_root_exchange, "x_lm_fanout"}. 13 | {flow_auto_start, off}. 14 | {mnesia_dir, "./mnesia_data"}. 15 | {http_api_port, 8083}. 16 | {http_api_tls_enable, off}. -------------------------------------------------------------------------------- /config/vars/dev4.config: -------------------------------------------------------------------------------- 1 | {nodename, "faxe4"}. -------------------------------------------------------------------------------- /config/vars/dev5.config: -------------------------------------------------------------------------------- 1 | {nodename, "faxe5"}. -------------------------------------------------------------------------------- /config/vars/k8s.config: -------------------------------------------------------------------------------- 1 | {nodename, "faxe"}. -------------------------------------------------------------------------------- /config/vars/prod.config: -------------------------------------------------------------------------------- 1 | {nodename, 'faxe@127.0.0.1'}. 2 | {sbwt, "medium"}. 3 | {sbwtdcpu, "short"}. 4 | {sbwtdio, "short"}. 5 | {console_log_level, warning}. 6 | {http_api_port, 8081}. 7 | {mqtt_host, "10.14.204.20"}. 8 | {amqp_host, "10.14.204.28"}. 9 | {crate_host, "10.14.204.10"}. 10 | {crate_http_host, "10.14.204.10"}. 11 | {crate_http_port, 4200}. 12 | {dfs_script_path, "./dfs/"}. 13 | {python_script_path, "./python/"}. 14 | {rabbitmq_root_exchange, "x_lm_fanout"}. 15 | {flow_auto_start, on}. 16 | {allow_anonymous, false}. -------------------------------------------------------------------------------- /dfs/aggregate/test.dfs: -------------------------------------------------------------------------------- 1 | %def target_topic = 'tgw/data/lenze/diagnosis' 2 | %def target_topic = 'tgw/data/lenze/lifediagnosis' 3 | 4 | def percent = 50 5 | 6 | def data = 7 | |json_emitter( 8 | '{"val1": 13, "val2": 3.453, "added": 1}', 9 | '{"val1": 16, "val2": 3.453, "added": 0}', 10 | '{"val1": 7, "val2": 3.453}', 11 | '{"val1": 18, "val2": 3.453}', 12 | '{"val1": 24, "val2": 3.458, "added": 1}', 13 | '{"val1": 32, "val2": 3.453}', 14 | '{"val1": 11, "val2": 3.453,"added": 0}', 15 | '{"val1": 4, "val2": 3.453,"added": 1}' 16 | ) 17 | .select('batch') 18 | 19 | data 20 | |percentile() 21 | .fields('val1') 22 | .as('prec_{{percent}}_val1') 23 | .at(percent) 24 | 25 | |debug('info') 26 | 27 | 28 | data 29 | |aggregate() 30 | .functions( 31 | 'count', 32 | 'sum', 33 | 'avg', 34 | 'min', 35 | 'max', 36 | 'range', 37 | 'count_distinct', 38 | 'count_change', 39 | 'geometric_mean') 40 | .fields( 41 | 'added', 42 | 'added', 43 | 'val1', 44 | 'val1', 45 | 'val1', 46 | 'val2', 47 | 'added', 48 | 'added', 49 | 'val1') 50 | .as( 51 | 'agg.count_added', 52 | 'agg.sum_added', 53 | 'agg.avg_val1', 54 | 'agg.min_val1', 55 | 'agg.max_val1', 56 | 'agg.range_val2', 57 | 'agg.countd_added', 58 | 'agg.countc_added', 59 | 'agg.geommean_val1') 60 | .keep_tail(false) 61 | 62 | |debug() -------------------------------------------------------------------------------- /dfs/array_explode.dfs: -------------------------------------------------------------------------------- 1 | |json_emitter( 2 | '{"drive.motor.ap": [1,2,3,4,5,6,7,8,9], "torque": [6,7,8,9,1,2,3,4,5], "zip": [4,5,6,7,8,9,1,2,3]}' 3 | ) 4 | |debug('info') 5 | |eval(lambda:string("ts"), random_latin_string(12)).as('data.id', 'latin.string.id') 6 | |array_explode() 7 | .fields('drive*motor*ap', 'zip', 'zap', 'storque') 8 | .as('data.ex_drive', 'data.ex_zip', 'data.ex_zap', 'data.ex_torque') 9 | %.keep('data', 'latin') 10 | |debug() -------------------------------------------------------------------------------- /dfs/debug/map_get.dfs: -------------------------------------------------------------------------------- 1 | def letter_map = '{"a": 1, "d": 2, "c": 3}' 2 | 3 | |json_emitter('{"a": "b", "c": 4}') 4 | .every(3s) 5 | 6 | |eval(lambda: map_get("a", letter_map, 'defaultvalue')).as('mapped') 7 | |debug() -------------------------------------------------------------------------------- /dfs/dev/mqtt_pub_pool.dfs: -------------------------------------------------------------------------------- 1 | def data = 2 | |json_emitter('{"hello": "you", "how": {"are": "you"}, "int": 1}') 3 | .every(500ms) 4 | 5 | def data1 = 6 | |json_emitter('{"hello": "you", "how": {"are": "you"}, "int": 2}') 7 | .every(800ms) 8 | 9 | def data2 = 10 | |json_emitter('{"hello": "you", "how": {"are": "you"}, "int": 3}') 11 | .every(2s) 12 | 13 | data1 14 | |mqtt_publish() 15 | .topic('tgw/data/alex_test/mqtt_pub_pool1') 16 | .host('10.14.204.20') 17 | .port(1883) 18 | .use_pool(true) 19 | 20 | data 21 | |mqtt_publish() 22 | .topic('tgw/data/alex_test/mqtt_pub_pool2') 23 | .host('10.14.204.20') 24 | .port(1883) 25 | .use_pool(true) 26 | 27 | data1 28 | |mqtt_publish() 29 | .topic('tgw/data/alex_test/mqtt_pub_pool3') 30 | .host('10.14.204.20') 31 | .port(1883) 32 | .use_pool(true) 33 | 34 | data 35 | |mqtt_publish() 36 | .topic('tgw/data/alex_test/mqtt_pub_pool4') 37 | .host('10.14.204.20') 38 | .port(1883) 39 | .use_pool(true) 40 | 41 | data2 42 | |mqtt_publish() 43 | .topic('tgw/data/alex_test/mqtt_pub_pool5') 44 | .host('10.14.204.20') 45 | .port(1883) 46 | .use_pool(true) 47 | .qos(1) 48 | .retained(true) 49 | 50 | data 51 | |mqtt_publish() 52 | .topic('tgw/data/alex_test/mqtt_pub_pool6') 53 | .host('10.14.204.20') 54 | .port(1883) 55 | .use_pool(true) 56 | 57 | data1 58 | |mqtt_publish() 59 | .topic('tgw/data/alex_test/mqtt_pub_pool6') 60 | .host('10.14.204.20') 61 | .port(1883) 62 | .use_pool(true) 63 | 64 | data 65 | |mqtt_publish() 66 | .topic('tgw/data/alex_test/mqtt_pub_pool6') 67 | .host('10.14.204.20') 68 | .port(1883) 69 | .use_pool(true) 70 | 71 | data 72 | |mqtt_publish() 73 | .topic('tgw/data/alex_test/mqtt_pub_pool6') 74 | .host('10.14.204.20') 75 | .port(1883) 76 | .use_pool(true) 77 | 78 | data 79 | |mqtt_publish() 80 | .topic('tgw/data/alex_test/mqtt_pub_pool6') 81 | .host('10.14.204.20') 82 | .port(1883) 83 | .use_pool(true) 84 | 85 | data 86 | |mqtt_publish() 87 | .topic('tgw/data/alex_test/mqtt_pub_pool6') 88 | .host('10.14.204.20') 89 | .port(1883) 90 | .use_pool(true) 91 | 92 | data 93 | |mqtt_publish() 94 | .topic('tgw/data/alex_test/mqtt_pub_pool6') 95 | .host('10.14.204.20') 96 | .port(1883) 97 | .use_pool(true) -------------------------------------------------------------------------------- /dfs/dev/mqtt_pub_pool2.dfs: -------------------------------------------------------------------------------- 1 | def data = 2 | |json_emitter('{"hello": "you", "how": {"are": "you"}, "int": 1}') 3 | .every(1s) 4 | .align() 5 | 6 | 7 | data 8 | |mqtt_publish() 9 | .topic('tgw/data/alex_test/mqtt_pub_pool1') 10 | .host('10.14.204.20') 11 | .port(1883) 12 | .use_pool(true) 13 | 14 | 15 | def data1 = 16 | |json_emitter('{"hello": "you", "how": {"are": "you"}, "int": 1}') 17 | .every(500ms) 18 | .align() 19 | 20 | data1 21 | |mqtt_publish() 22 | .topic('tgw/data/alex_test/mqtt_pub_pool2-1') 23 | .host('10.14.204.20') 24 | .port(1883) 25 | .use_pool(false) 26 | .retained(true) 27 | -------------------------------------------------------------------------------- /dfs/dev/path_split.dfs: -------------------------------------------------------------------------------- 1 | def path_prefix = 'module' 2 | 3 | %% mock s7_read 4 | |json_emitter( 5 | '{"module32201" : {"what" : "ever"}, "module2203": {"this" : 555}, "e14069" : 14}' 6 | ) 7 | 8 | %% split data-points by 'module' and keep the root path name as the field 'module' 9 | |path_split() 10 | .include_as(path_prefix) 11 | 12 | %% get rid of the prefix 'module' so that instead of ie: 'module32201' we get '32201' 13 | |eval(lambda: str_slice("{{path_prefix}}", str_length(path_prefix))).as(path_prefix) 14 | -------------------------------------------------------------------------------- /dfs/dev/python2.dfs: -------------------------------------------------------------------------------- 1 | |json_emitter( 2 | %'{}' 3 | '{"field1" : 1, "field2": "2", "field3": {"deeper": {"list": [1,2,{"inlist": 999}, "33", "44", "55", "yes-no-but-yes"]}}}', 4 | '{"field1" : 21, "field22": "2", "field3": {"deeper": {"list": [3,4,{"inlist": 444}]}}}', 5 | '{"field1" : 14, "field2": "2", "field3": {"deeper": {"list": [1,2,{"inlist": 999}]}}}', 6 | '{"field1" : 99, "field22": "2", "field3": {"deeper": {"list": [3,4,{"inlist": 444}]}}}' 7 | ) 8 | .every(1s) 9 | .as('pyth') 10 | .select('seq') 11 | 12 | @batch_start() 13 | %.field('pyth.field3.deeper.list[2]') 14 | 15 | %@data_demo() 16 | 17 | |debug() -------------------------------------------------------------------------------- /dfs/dev/python_double.dfs: -------------------------------------------------------------------------------- 1 | |json_emitter( 2 | '{"field1" : 1, "field2": "2"}', 3 | '{"field1" : 2, "field2": "3"}', 4 | '{"field1" : 3, "field2": "4"}' 5 | ) 6 | .every(500ms) 7 | .as('data') 8 | .select('batch') 9 | 10 | @double() 11 | .field('data.field1') 12 | .as('data.double.field1') 13 | 14 | |keep('data.double.field1') 15 | |debug() 16 | 17 | -------------------------------------------------------------------------------- /dfs/dev/python_time.dfs: -------------------------------------------------------------------------------- 1 | |json_emitter('{"hallo": "du"}').every(3s) 2 | 3 | @python_time() 4 | 5 | |eval(lambda: str_concat( 6 | string(abs("python.time" - "ts")) , 7 | 'ms') 8 | ) 9 | .as('time_diff') 10 | 11 | |debug() 12 | 13 | -------------------------------------------------------------------------------- /dfs/other/alarm2.dfs: -------------------------------------------------------------------------------- 1 | %% send alarm, if the the mean exceeds a certain threshold 2 | 3 | def mqtt_broker = '10.14.204.3' 4 | def alarm_topic = 'ttopic/alarm/energy_max' 5 | 6 | def point = 7 | |json_emitter() 8 | .every(1s) 9 | .json(<<< {"condition": {"id": 0, "name": "OK"}, "condition_reason": "", "predicted_maintenance_time": 1584246411783, 10 | "vac_on_without_contact": [1.2, 2.5, 4.33], "vac_on_with_contact": [5.6, 45.98, 7.012]} >>>, 11 | <<< {"condition": {"id": 1, "name": "Warning"}, "condition_reason": "bad succer", "predicted_maintenance_time": 1583246411783, 12 | "vac_on_without_contact": [0.2, 2.5, 8.01], "vac_on_with_contact": [6.001, 4.798, 7.012]} >>>, 13 | <<< {"condition": {"id": 2, "name": "Error"}, "condition_reason": "something went really wrong!", "predicted_maintenance_time": 1582246411783, 14 | "vac_on_without_contact": [0.5, 2.5, 0.44], "vac_on_with_contact": [2.06, 4.98, 2.901]} >>>) 15 | 16 | 17 | |debug() 18 | 19 | point 20 | |batch(10).timeout(3s) 21 | 22 | |avg() 23 | .field('val') 24 | .as('avg') 25 | 26 | |debug() 27 | 28 | point 29 | |where() 30 | .lambda(lambda: "val" > ls_mem('avg10')) 31 | 32 | |default() 33 | .fields('message') 34 | .field_values('val is > avg10') 35 | 36 | |debug() -------------------------------------------------------------------------------- /dfs/other/batch_test.dfs: -------------------------------------------------------------------------------- 1 | |value_emitter() 2 | .every(8000ms) 3 | .jitter(3700ms) 4 | .type(point) 5 | 6 | |batch(5) 7 | .timeout(5750ms) 8 | 9 | |debug('warning') -------------------------------------------------------------------------------- /dfs/other/case_test.dfs: -------------------------------------------------------------------------------- 1 | %% send alarm, if the the mean exceeds a certain threshold 2 | 3 | def postfix = '---hehehe' 4 | 5 | def point = 6 | |json_emitter() 7 | .every(1s) 8 | .json(<<< {"condition": {"id": 0, "name": "OK"}, "condition_reason": "", "predicted_maintenance_time": 1584246411783, 9 | "vac_on_without_contact": [1.2, 2.5, 4.33], "vac_on_with_contact": [5.6, 45.98, 7.012]} >>>, 10 | <<< {"condition": {"id": 1, "name": "Warning"}, "condition_reason": "bad succer", "predicted_maintenance_time": 1583246411783, 11 | "vac_on_without_contact": [0.2, 2.5, 8.01], "vac_on_with_contact": [6.001, 4.798, 7.012]} >>>, 12 | <<< {"condition": {"id": 2, "name": "Error"}, "condition_reason": "something went really wrong!", "predicted_maintenance_time": 1582246411783, 13 | "vac_on_without_contact": [0.5, 2.5, 0.44], "vac_on_with_contact": [2.06, 4.98, 2.901]} >>>) 14 | 15 | 16 | |case( 17 | lambda: "data.condition.name" == 'Alright', 18 | lambda: "data.condition.name" == 'Warning', 19 | lambda: "data.condition.name" == 'Error' 20 | ) 21 | .values('OOOOOKKKKK', 'Waaaaarning', 'EErroorrr') 22 | .as('data.condition.alt_name') 23 | .default('Nothing matched!!!') 24 | 25 | |keep('data.condition.alt_name') 26 | 27 | |debug() -------------------------------------------------------------------------------- /dfs/other/case_test2.dfs: -------------------------------------------------------------------------------- 1 | %% send alarm, if the the mean exceeds a certain threshold 2 | 3 | def postfix = '---hehehe' 4 | 5 | def point = 6 | |json_emitter() 7 | .every(1s) 8 | .json(<<< {"condition": {"id": 0, "name": "OK"}, "condition_reason": "", "predicted_maintenance_time": 1584246411783, 9 | "vac_on_without_contact": [1.2, 2.5, 4.33], "vac_on_with_contact": [5.6, 45.98, 7.012]} >>>, 10 | <<< {"condition": {"id": 1, "name": "Warning"}, "condition_reason": "bad succer", "predicted_maintenance_time": 1583246411783, 11 | "vac_on_without_contact": [0.2, 2.5, 8.01], "vac_on_with_contact": [6.001, 4.798, 7.012]} >>>, 12 | <<< {"condition": {"id": 2, "name": "Error"}, "condition_reason": "something went really wrong!", "predicted_maintenance_time": 1582246411783, 13 | "vac_on_without_contact": [0.5, 2.5, 0.44], "vac_on_with_contact": [2.06, 4.98, 2.901]} >>>) 14 | 15 | 16 | |case( 17 | lambda: "data.condition.name" == 'OK', 18 | lambda: "data.condition.name" == 'Warning', 19 | lambda: "data.condition.name" == 'Error' 20 | ) 21 | .values( 22 | <<<{"cond": "Everything OK!"}>>>, 23 | <<<{"cond": "Oh, oh, a Warning!"}>>>, 24 | <<<{"cond": "Damn, Error!"}>>> 25 | ) 26 | .json() 27 | .as('data') 28 | .default(<<<{"cond": "Nothing matched!!!"}>>>) 29 | 30 | %|keep('data.condition.alt_name') 31 | 32 | |debug() -------------------------------------------------------------------------------- /dfs/other/case_test3.dfs: -------------------------------------------------------------------------------- 1 | %% send alarm, if the the mean exceeds a certain threshold 2 | 3 | def postfix = '---hehehe' 4 | 5 | def point = 6 | |json_emitter() 7 | .every(1s) 8 | .json(<<< {"condition": {"id": 0, "name": "OK"}, "condition_reason": "", "predicted_maintenance_time": 1584246411783, 9 | "vac_on_without_contact": [1.2, 2.5, 4.33], "vac_on_with_contact": [5.6, 45.98, 7.012]} >>>, 10 | <<< {"condition": {"id": 1, "name": "Warning"}, "condition_reason": "bad succer", "predicted_maintenance_time": 1583246411783, 11 | "vac_on_without_contact": [0.2, 2.5, 8.01], "vac_on_with_contact": [6.001, 4.798, 7.012]} >>>, 12 | <<< {"condition": {"id": 2, "name": "Error"}, "condition_reason": "something went really wrong!", "predicted_maintenance_time": 1582246411783, 13 | "vac_on_without_contact": [0.5, 2.5, 0.44], "vac_on_with_contact": [2.06, 4.98, 2.901]} >>>, 14 | <<< {"condition": {"id": 3, "name": "Error"}, "condition_reason": "something went really wrong!", "predicted_maintenance_time": 1582246411783, 15 | "vac_on_without_contact": [0.5, 2.5, 0.44], "vac_on_with_contact": [2.06, 4.98, 2.901]} >>>) 16 | 17 | |debug('info') 18 | |case( 19 | lambda: "data.condition.id" == 0, 20 | lambda: "data.condition.id" == 1, 21 | lambda: "data.condition.id" == 2 22 | ) 23 | .values( 24 | <<<{"cond": "0"}>>>, 25 | <<<{"cond": "1"}>>>, 26 | <<<{"cond": "2"}>>> 27 | ) 28 | .json() 29 | .as('data') 30 | .default(<<<{"cond": "Nothing matched!!!"}>>>) 31 | 32 | %|keep('data.condition.alt_name') 33 | 34 | |debug() -------------------------------------------------------------------------------- /dfs/other/change_detect_test.dfs: -------------------------------------------------------------------------------- 1 | def emit_every = 2s 2 | def emit_every_jitter = 1s 3 | def debug_type = 'info' 4 | 5 | |json_emitter() 6 | .every(emit_every) 7 | .jitter(emit_every_jitter) 8 | .json( 9 | <<<{"condition": {"state": "IDLE"}, "topic": "in2"} >>>, 10 | <<<{"condition": {"state": "IDLE"}, "topic": "in1"} >>>, 11 | <<<{"condition": {"state": "IDLE"}, "topic": "in1"} >>>, 12 | <<<{"condition": {"state": "IDLE"}, "topic": "in2"} >>>, 13 | <<<{"condition": {"state": "IDLE"}, "topic": "in3"} >>>, 14 | <<<{"condition": {"state": "IDLE"}, "topic": "in1"} >>>, 15 | <<<{"condition": {"state": "IDLER"}, "topic": "in1"} >>>, 16 | <<<{"condition": {"state": "IDLE"}, "topic": "in1"} >>>, 17 | <<<{"condition": {"state": "IDLE"}, "topic": "in1"} >>> 18 | ) 19 | 20 | |debug(debug_type) 21 | |change_detect('data.condition.state', 'data.topic') 22 | |debug() -------------------------------------------------------------------------------- /dfs/other/change_detect_timeout_test.dfs: -------------------------------------------------------------------------------- 1 | def emit_every = 2s 2 | def debug_type = 'info' 3 | 4 | |json_emitter() 5 | .every(emit_every) 6 | .json( 7 | <<<{"condition": {"state": "IDLE"}, "topic": "in2"} >>> 8 | ) 9 | 10 | |debug(debug_type) 11 | |change_detect() 12 | .timeout(30s) 13 | |debug() -------------------------------------------------------------------------------- /dfs/other/collect_unique.dfs: -------------------------------------------------------------------------------- 1 | %% collect values from data-streams 2 | 3 | 4 | |json_emitter() 5 | .every(3s) 6 | .json(<<< {"condition": {"id": 0, "name": "OK"}, "condition_reason": "OK-Reason", "predicted_maintenance_time": 1584246411783, 7 | "vac_on_without_contact": [1.2, 2.5, 4.33], "vac_on_with_contact": [5.6, 45.98, 7.012]} >>>, 8 | <<< {"condition": {"id": 1, "name": "Warning"}, "condition_reason": "bad succer", "predicted_maintenance_time": 1583246411783, 9 | "vac_on_without_contact": [0.2, 2.5, 8.01], "vac_on_with_contact": [6.001, 4.798, 7.012]} >>>, 10 | <<< {"condition": {"id": 1, "name": "Warning"}, "condition_reason": "bad suxxer", "predicted_maintenance_time": 1583246411783, 11 | "vac_on_without_contact": [0.2, 2.5, 8.01], "vac_on_with_contact": [6.001, 4.798, 7.012]} >>>, 12 | <<< {"condition": {"idx": 1, "name": "Warning"}, "condition_reason": "bad suxxer", "predicted_maintenance_time": 1583246411783, 13 | "vac_on_without_contact": [0.2, 2.5, 8.01], "vac_on_with_contact": [6.001, 4.798, 7.012]} >>>, 14 | <<< {"condition": {"id": 2, "name": "Error"}, "condition_reason": "something went really wrong!", "predicted_maintenance_time": 1582246411783, 15 | "vac_on_without_contact": [0.5, 2.5, 0.44], "vac_on_with_contact": [2.06, 4.98, 2.901]} >>>) 16 | 17 | 18 | %|debug() 19 | 20 | |collect_unique('data.condition.id') 21 | .keep('data.condition_reason', 'data.condition.name', 'data.condition.id') 22 | .keep_as('condition_reason', 'condition_name', 'condition_id') 23 | .min_vals(2) 24 | % .max_age(7s) 25 | 26 | |debug('warning') 27 | -------------------------------------------------------------------------------- /dfs/other/combine.dfs: -------------------------------------------------------------------------------- 1 | def in1 = 2 | |value_emitter() 3 | .every(500ms) 4 | .type(point) 5 | .fields(val) 6 | 7 | def in2 = 8 | |value_emitter() 9 | .every(4s) 10 | .type(point) 11 | .fields('val2', 'val3') 12 | 13 | def in3 = 14 | |value_emitter() 15 | .every(3s) 16 | .type(point) 17 | .format(ejson) 18 | 19 | def combined = 20 | in1 21 | |combine(in2) 22 | .fields('val2', 'val3') 23 | .prefix('comb') 24 | .prefix_delimiter('-') 25 | 26 | combined 27 | |combine(in3) 28 | .fields('val.val') 29 | .aliases('val3') 30 | 31 | |debug() -------------------------------------------------------------------------------- /dfs/other/combine1.dfs: -------------------------------------------------------------------------------- 1 | %% demonstrates the use of the combine node 2 | 3 | 4 | %% we setup 2 random-value streams 5 | 6 | def in1 = 7 | |value_emitter() 8 | .every(50ms) 9 | .type(point) 10 | .fields('vall') 11 | 12 | def in2 = 13 | |value_emitter() 14 | .every(2s) 15 | .type(point) 16 | .fields('val2', 'val3') 17 | 18 | %%% combine the 2 streams, so that we get a value every 50ms (from in1) but only after initially 2s passed (in2) 19 | %%% the values of the resulting fields 'val2' and 'val3' will change every 2s 20 | %%% while the value of 'vall' will change every 50ms 21 | in1 22 | |combine(in2) 23 | .fields('val2', 'val3') 24 | %.prefix('comb') 25 | %.prefix_delimiter('-') 26 | 27 | |debug() -------------------------------------------------------------------------------- /dfs/other/combine_merge.dfs: -------------------------------------------------------------------------------- 1 | def v1 = 2 | |json_emitter() 3 | .every(1s) 4 | .json(<<< {"condition": {"id": 0, "name": "OK", "sub_cond": 5 | [{"value": 33}]}, "condition_reason": "", 6 | "predicted_maintenance_time": 1584246411783, 7 | "vac_on_without_contact": [1.2, 2.5, 4.33]} >>>) 8 | 9 | def v2 = 10 | |json_emitter() 11 | .every(5s) 12 | .json(<<< {"condition": {"id1": 0, "name1": "OK", "sub_cond": 13 | [{"number": 44}]}, "condition_reason": "", 14 | "predicted_maintenance_time": 1584246411785, 15 | "vac_on_without_contact": [2.2, 2.5, 4.33], 16 | "vac_on_with_contact": [5.6, 45.98, 7.012]} >>>) 17 | 18 | v1 19 | |combine(v2) 20 | .merge_field('data') 21 | 22 | |debug() 23 | 24 | 25 | -------------------------------------------------------------------------------- /dfs/other/cond_test1.dfs: -------------------------------------------------------------------------------- 1 | def timeout = 0s 2 | def condition_reason = 'timeout' 3 | 4 | def in1 = 5 | |json_emitter() 6 | .every(12s) 7 | .jitter(2s) 8 | .json(<<<{"condition": {"state": "IDLE"}, "topic": "in1"} >>>) 9 | 10 | 11 | def in2 = 12 | |json_emitter() 13 | .every(15s) 14 | .jitter(32s) 15 | .json(<<<{"Function": "interface::WInterface::progress", "topic": "in2"}>>>) 16 | 17 | def in3 = 18 | |json_emitter() 19 | .every(4s) 20 | .jitter(13s) 21 | .json(<<<{"Function": "interface::WInterface::process", "topic": "in3"}>>>) 22 | 23 | 24 | in1 25 | |state_sequence(in2, in3) 26 | .states( 27 | lambda: "data.topic" == 'in1', 28 | lambda: "data.topic" == 'in2', 29 | lambda: "data.topic" == 'in3' 30 | ) 31 | .within(25s, 20s) 32 | %.strict() 33 | 34 | |debug('warning') 35 | 36 | def ignore = 37 | |where(lambda: "data.Function" == 'interface::WInterface::progress' AND "data.condition.state" == 'IDLE') 38 | |debug() 39 | |deadman(60m) 40 | .fields('combined.condition.name', 'combined.condition_reason', 'combined.condition.id') 41 | .field_values('Error', condition_reason, 2) 42 | 43 | |where(lambda: "combined.condititon_reason" == condition_reason) 44 | 45 | 46 | -------------------------------------------------------------------------------- /dfs/other/conditional_delete.dfs: -------------------------------------------------------------------------------- 1 | def emit_every = 2s 2 | def emit_every_jitter = 1s 3 | def debug_type = 'info' 4 | 5 | |json_emitter() 6 | .every(emit_every) 7 | .jitter(emit_every_jitter) 8 | .json( 9 | <<<{"condition": {"state": "IDLE"}, "topic": "in2"} >>>, 10 | <<<{"condition": {"state": "IDLE"}, "topic": "in1"} >>>, 11 | <<<{"condition": {"state": "IDLE"}, "topic": "in1"} >>>, 12 | <<<{"condition": {"state": "IDLE"}, "topic": "in2"} >>>, 13 | <<<{"condition": {"state": "IDLE"}, "topic": "in3"} >>>, 14 | <<<{"condition": {"state": "IDLE"}, "topic": "in1"} >>>, 15 | <<<{"condition": {"state": "IDLER"}, "topic": "in1"} >>>, 16 | <<<{"condition": {"state": "IDLE"}, "topic": "in1"} >>>, 17 | <<<{"condition": {"state": "IDLE"}, "topic": "in1"} >>> 18 | ) 19 | .as('data') 20 | 21 | |debug(debug_type) 22 | 23 | |delete() 24 | .fields('data.condition.state') 25 | .where(lambda: "data.topic" == 'in2' OR "data.topic" == 'in3') 26 | 27 | |debug() -------------------------------------------------------------------------------- /dfs/other/conf_test.dfs: -------------------------------------------------------------------------------- 1 | |amqp_consume() 2 | .routing_key('rkba') 3 | .queue('qba') 4 | .exchange('xba') -------------------------------------------------------------------------------- /dfs/other/deadman_2.dfs: -------------------------------------------------------------------------------- 1 | def json = 2 | |json_emitter() 3 | .every(5s) 4 | .json(<<< {"condition": {"id": 0, "name": "OK"}, "condition_reason": ""} >>>, 5 | <<< {"condition": {"id": 1, "name": "Warning"}, "condition_reason": "bad succer"} >>>, 6 | <<< {"condition": {"id": 2, "name": "Error"}, "condition_reason": "something went really wrong!"} >>>) 7 | 8 | 9 | def start = 10 | json 11 | |where(lambda: "data.condition.id" == 0) 12 | 13 | |debug('warning') 14 | 15 | def continue = 16 | json 17 | |where(lambda: "data.condition.id" == 1) 18 | 19 | start 20 | |combine(continue) 21 | .prefix('combined') 22 | 23 | |deadman(10s) 24 | .trigger_on_value() 25 | .fields('HI') 26 | .field_values('ALLLLEEEERRRRRRTTTTTT') 27 | 28 | |debug() -------------------------------------------------------------------------------- /dfs/other/deadman_test.dfs: -------------------------------------------------------------------------------- 1 | |value_emitter() 2 | .every(5s) 3 | .type(point) 4 | 5 | |deadman(3s) 6 | .repeat_last() 7 | .trigger_on_value() 8 | 9 | |eval(lambda: to_iso8601("ts" - (60000 * 5))) 10 | .as('datetime') 11 | 12 | |debug() 13 | 14 | %|email() 15 | %.to(<<>>,<<>>) 16 | %.subject('Alert #ex3 EnergyData') 17 | %.body(<<>>) -------------------------------------------------------------------------------- /dfs/other/deadman_test_repeat.dfs: -------------------------------------------------------------------------------- 1 | def republish_timeout = 2s 2 | 3 | |value_emitter() 4 | .every(10s) 5 | .align() 6 | 7 | |deadman(republish_timeout) 8 | .repeat_last(true) 9 | .trigger_on_value(true) 10 | .repeat_interval(republish_timeout) 11 | 12 | |debug() -------------------------------------------------------------------------------- /dfs/other/delete_test.dfs: -------------------------------------------------------------------------------- 1 | |value_emitter() 2 | .every(5s) 3 | .type(point) 4 | 5 | |delete() 6 | .fields('harr') 7 | 8 | |debug() -------------------------------------------------------------------------------- /dfs/other/did_you_mean.dfs: -------------------------------------------------------------------------------- 1 | |value_emitter() 2 | .every(8000ms) 3 | .jitter(3700ms) 4 | .type(point) 5 | 6 | |batch(5) 7 | .timeou(5750ms) 8 | 9 | |debug('warning') -------------------------------------------------------------------------------- /dfs/other/email_param.dfs: -------------------------------------------------------------------------------- 1 | |json_emitter( 2 | '{"sub_topic": "A-140-2", "body_string": "schallalallala"}', 3 | '{"sub_topic": "A-183-1", "body_string": "trollolololo"}', 4 | '{"sub_topic": "A-111-2", "body_string": "trallalalala"}' 5 | 6 | ) 7 | .every(5s) 8 | .select('rand') 9 | |eval(lambda: str_concat('Doepfer-Module ', "sub_topics")).as('email_subject') 10 | |email() 11 | .to('amini@kar12.com') 12 | .subject_field('email_subject') 13 | .body('a {{"body_string"}}') 14 | .template('/home/heyoka/workspace/faxe/_build/dev1/rel/faxe/templates/email_template.html') -------------------------------------------------------------------------------- /dfs/other/extract.dfs: -------------------------------------------------------------------------------- 1 | |mqtt_subscribe() 2 | .host('10.10.1.102') 3 | .topic('ttgw/data/mib/#') 4 | |keep() 5 | .fields('topic') 6 | 7 | |debug() 8 | -------------------------------------------------------------------------------- /dfs/other/grip_calib_images.dfs: -------------------------------------------------------------------------------- 1 | def topic = 'ttgw/grip_images/#' 2 | def mqtt_host = '10.10.1.102' 3 | def topic_base = 'ttgw/data/grip_images/' 4 | 5 | def image = 6 | |mqtt_subscribe() 7 | .host(mqtt_host) 8 | .port(1883) 9 | .topic(topic) 10 | 11 | |where() 12 | .lambda(lambda: "data.image_name" == 'calibration_image') 13 | 14 | |mqtt_publish() 15 | .host(mqtt_host) 16 | .topic_lambda(lambda: str_concat( [topic_base, "data.device", '/calibration_image/' , "data.location"]) ) 17 | %% important to send retained 18 | .retained() 19 | -------------------------------------------------------------------------------- /dfs/other/helix_convtrack_crate.dfs: -------------------------------------------------------------------------------- 1 | def host = '10.14.204.3' 2 | def port = 5672 3 | def routing_key = 'ttgw.conveyor_tracking.petzold_test' 4 | 5 | |mqtt_subscribe() 6 | .topic('ttgw/conveyor_tracking/petzoldstr') 7 | 8 | %def parser = parser_conv_tracking_v1 9 | %def v1 = 10 | % |tcp_recv_line() 11 | % .ip('127.0.0.1') 12 | % .port(1111) 13 | % .parser(parser) 14 | % .min_length(61) 15 | % .extract() 16 | 17 | %|log() 18 | %.file('helix_conv_track.txt') 19 | 20 | |http_post_crate() 21 | .host(<<< http://devat-cw-ds.tgwdev.internal >>>) 22 | .port(4201) 23 | .database('doc') 24 | .table('conveyortracking_parted') 25 | .db_fields('id', 'df', 'vs', 'data_obj') 26 | .faxe_fields('id', 'df', 'vs', 'data') 27 | 28 | 29 | 30 | -------------------------------------------------------------------------------- /dfs/other/http.dfs: -------------------------------------------------------------------------------- 1 | 2 | |value_emitter() 3 | .every(2s) 4 | .type(point) 5 | 6 | |debug() 7 | 8 | |http_post() 9 | .host('127.0.0.1') 10 | .port(8081) 11 | %.tls() -------------------------------------------------------------------------------- /dfs/other/https_get_test.dfs: -------------------------------------------------------------------------------- 1 | %% triggers the 'http_get' node 2 | |value_emitter() 3 | .every(5s) 4 | 5 | %% get stats from this faxe s http endpoint 6 | |http_get() 7 | .host('localhost') 8 | .port(8081) 9 | .every(5s) 10 | .path('/v1/stats') 11 | 12 | |debug() -------------------------------------------------------------------------------- /dfs/other/influx_test.dfs: -------------------------------------------------------------------------------- 1 | |value_emitter() 2 | .every(100ms) 3 | .type(point) 4 | 5 | |set() 6 | .tags('mytag').tag_values('mytags_value 123546') 7 | 8 | |batch(20) 9 | .timeout(7s) 10 | 11 | |debug() 12 | 13 | |influx_out() 14 | .host('127.0.0.1') 15 | .port(8086) 16 | .measurement('m1') 17 | .database('mydb') 18 | %.retpol('myretpol') -------------------------------------------------------------------------------- /dfs/other/join.dfs: -------------------------------------------------------------------------------- 1 | 2 | def v1 = 3 | |value_emitter() 4 | .every(3s) 5 | .type(point) 6 | .align() 7 | 8 | def v2 = 9 | |value_emitter() 10 | .every(5s) 11 | .type(point) 12 | .align() 13 | 14 | v1 15 | |join(v2) 16 | .merge_field('val') 17 | .tolerance(3s) 18 | .missing_timeout(3s) 19 | .fill(none) 20 | 21 | v1 22 | %|join(v2) 23 | %.prefix('v1.joined', 'v2.joined') 24 | %.tolerance(3s) 25 | %.missing_timeout(3s) 26 | %.fill(none) 27 | 28 | |debug() 29 | 30 | -------------------------------------------------------------------------------- /dfs/other/join2.dfs: -------------------------------------------------------------------------------- 1 | 2 | def v1 = 3 | |value_emitter() 4 | .every(1s) 5 | .type('point') 6 | .jitter(300ms) 7 | 8 | def v2 = 9 | |value_emitter() 10 | .every(700ms) 11 | .type('point') 12 | .jitter(200ms) 13 | 14 | def v3 = 15 | |value_emitter() 16 | .every(900ms) 17 | .type('point') 18 | .jitter(100ms) 19 | 20 | v1 21 | |join2(v2, v3) 22 | .prefix('v1_', 'v2_', 'v3_') 23 | .tolerance(1s) 24 | .missing_timeout(300ms) 25 | 26 | |debug('notice') 27 | .message('JOIN2') 28 | 29 | %v1 30 | % |join(v2, v3) 31 | % .prefix('v1_', 'v2_', 'v3_') 32 | % .tolerance(1s) 33 | % .missing_timeout(300ms) 34 | 35 | % |debug('notice') 36 | % .message('JOIN') 37 | 38 | -------------------------------------------------------------------------------- /dfs/other/jpath.dfs: -------------------------------------------------------------------------------- 1 | def every = 100ms 2 | 3 | def in = 4 | |value_emitter() 5 | .every(every) 6 | .type(point) 7 | .format(ejson) 8 | 9 | |win_time_q() 10 | .every(5s) 11 | .period(10s) 12 | .fill_period() 13 | 14 | in 15 | |mean() 16 | .field('val.val') 17 | .as('val') 18 | 19 | |eval() 20 | .lambdas(lambda: "val" * 2) 21 | .as('val') 22 | 23 | |default() 24 | .fields('var.val2') 25 | .field_values(5) 26 | 27 | |eval() 28 | .lambdas(lambda: "var.val2" * 2) 29 | .as('var.val2') 30 | |debug() 31 | 32 | |default() 33 | .fields('wuzz.wazz.wizz') 34 | .field_values('tadaaa!!') 35 | 36 | |delete() 37 | .fields('var.val2') 38 | 39 | |default() 40 | .fields('var.val2') 41 | .field_values('deleted') 42 | 43 | |debug() 44 | 45 | |rename() 46 | .fields('var.val2') 47 | .as_fields('var.vallee') 48 | 49 | |debug() -------------------------------------------------------------------------------- /dfs/other/json_emitter.dfs: -------------------------------------------------------------------------------- 1 | def topic = 'ttgw/data/what/ever' 2 | def message = <<< {"message": "test"} >>> 3 | 4 | def host = '10.14.204.3' 5 | 6 | |json_emitter() 7 | .every(2s) 8 | .json(message) 9 | 10 | |mqtt_publish() 11 | .host(host) 12 | .topic(topic) 13 | %.safe() 14 | 15 | |debug() 16 | -------------------------------------------------------------------------------- /dfs/other/json_test.dfs: -------------------------------------------------------------------------------- 1 | |value_emitter() 2 | .type(point) 3 | .every(5s) 4 | 5 | |eval() 6 | .lambdas(lambda: random_real(4)) 7 | .as('data.mt[1].ccRdy') 8 | 9 | |default() 10 | .fields('data.mt[1].moduleNo') 11 | .field_values(1101) 12 | 13 | |debug() -------------------------------------------------------------------------------- /dfs/other/lambda_list_test.dfs: -------------------------------------------------------------------------------- 1 | |json_emitter() 2 | .every(1s) 3 | .json(<<< {"condition": {"id": 0, "name": "OK"}, "condition_reason": "", "predicted_maintenance_time": 1584246411783, 4 | "vac_on_without_contact": [1.2, 2.5, 4.33], "vac_on_with_contact": [5.6, 45.98, 7.012]} >>>, 5 | <<< {"condition": {"id": 1, "name": "Warning"}, "condition_reason": "bad succer", "predicted_maintenance_time": 1583246411783, 6 | "vac_on_without_contact": [0.2, 2.5, 8.01], "vac_on_with_contact": [6.001, 4.798, 7.012]} >>>, 7 | <<< {"condition": {"id": 2, "name": "Error"}, "condition_reason": "something went really wrong!", "predicted_maintenance_time": 1582246411783, 8 | "vac_on_without_contact": [0.5, 2.5, 0.44], "vac_on_with_contact": [2.06, 4.98, 2.901]} >>>) 9 | 10 | |eval() 11 | .lambdas( 12 | lambda: str_concat( 13 | ['The reason ', 'is ', "data.condition_reason", ' because of ', string("data.vac_on_with_contact[2]")] 14 | ), 15 | lambda: "data.vac_on_with_contact[2]" * 1 16 | ) 17 | .as('data.concat_string', 'data.vac_selection') 18 | 19 | .keep() 20 | .fields('data.concat_string') 21 | 22 | |debug() 23 | 24 | -------------------------------------------------------------------------------- /dfs/other/lambda_op.dfs: -------------------------------------------------------------------------------- 1 | |value_emitter() 2 | .every(5s) 3 | .type(point) 4 | 5 | |eval() 6 | .lambdas( 7 | lambda: int(str_concat(string(int("val")), string(int("val")))) 8 | ) 9 | .as('concat_string.int') 10 | 11 | 12 | |debug() 13 | 14 | -------------------------------------------------------------------------------- /dfs/other/lambda_test.dfs: -------------------------------------------------------------------------------- 1 | |value_emitter() 2 | .every(5s) 3 | .type(point) 4 | 5 | |eval() 6 | .lambdas( 7 | lambda: int(str_concat(string(int("val")),string(int("val")))) 8 | ) 9 | .as('concat_string.int') 10 | 11 | @mirror() 12 | .foo('some string') 13 | %.bar(55.78577) 14 | .baz(1256) 15 | 16 | |debug() 17 | 18 | 19 | %%%%%%%% 20 | def n = 21 | |eval() 22 | .lambdas(lambda: "data.axis.z.tor" - "data.axis.y.tor") 23 | .as('diff_tor') 24 | 25 | |debug() -------------------------------------------------------------------------------- /dfs/other/live_data_test.dfs: -------------------------------------------------------------------------------- 1 | 2 | def topic = 'ttgw/data/live' 3 | def mqtt_host = '10.14.204.3' 4 | def df = '03.002' 5 | 6 | def stream = 7 | |value_emitter() 8 | .type(point) 9 | .every(3s) 10 | 11 | |default() 12 | .fields('id', 'df', 'vs') 13 | .field_values('zu23zui23zi2uz3i', '05.007', 1) 14 | 15 | 16 | stream 17 | 18 | |default() 19 | .fields('topic') 20 | .field_values(topic) 21 | 22 | |win_event() 23 | .every(5) 24 | .period(5) 25 | 26 | |http_post_crate() 27 | .host(<<>>) 28 | .port(4201) 29 | .database('doc') 30 | .table('data_stream_test') 31 | .db_fields('id', 'df', 'vs', 'topic') 32 | .faxe_fields('id', 'df', 'vs', 'topic') 33 | .remaining_fields_as('data_obj') -------------------------------------------------------------------------------- /dfs/other/macro1.dfs: -------------------------------------------------------------------------------- 1 | %%%%%%%%%%%%%%%%%%%% MARCOS %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% 2 | %% macro : flow_id : ctc_module_condition 3 | def val = 1 4 | |case( 5 | lambda: "data.State.Err" == val OR "data.State.Warn" == val, 6 | lambda: "data.State.Auto" == val 7 | ) 8 | .values('Err', 'Ok') 9 | .as('data.condition') 10 | .default('Warn') 11 | 12 | 13 | %% use 14 | |s7read() 15 | .vars('DB12.DBX0.0', 'DB12.DBX0.1', 'DB12.DBX0.2', 'DB12.DBX0.3') 16 | .as('data.State.Err', 'data.State.Warn', 'data.State.Auto', 'data.State.AutoRdy') 17 | 18 | ||ctc_module_condition() 19 | .val(2) 20 | 21 | %% erd88verbindung{-D -------------------------------------------------------------------------------- /dfs/other/mem_test.dfs: -------------------------------------------------------------------------------- 1 | def mem_key = 'mkey' 2 | 3 | |json_emitter() 4 | .every(5s) 5 | .json(<<< {"condition": {"id": 0, "name": "OK"}, "condition_reason": "", "predicted_maintenance_time": 1584246411783, 6 | "vac_on_without_contact": [1.2, 2.5, 4.33], "vac_on_with_contact": [5.6, 45.98, 7.012]} >>>, 7 | <<< {"condition": {"id": 1, "name": "Warning"}, "condition_reason": "bad succer", "predicted_maintenance_time": 1583246411783, 8 | "vac_on_without_contact": [0.2, 2.5, 8.01], "vac_on_with_contact": [6.001, 4.798, 7.012]} >>>, 9 | <<< {"condition": {"id": 2, "name": "Error"}, "condition_reason": "something went really wrong!", "predicted_maintenance_time": 1582246411783, 10 | "vac_on_without_contact": [0.5, 2.5, 0.44], "vac_on_with_contact": [2.06, 4.98, 2.901]} >>>) 11 | 12 | 13 | |eval(lambda: 14 | str_concat( 15 | [ 16 | string("data.condition.id"), 17 | ' is member: ', 18 | string(member("data.condition.id", ls_mem_list(mem_key))) 19 | ] 20 | ) 21 | ) 22 | .as('member') 23 | 24 | |mem() 25 | .key(mem_key) 26 | .field('data.condition.id') 27 | .type('list') 28 | 29 | |debug() -------------------------------------------------------------------------------- /dfs/other/modbus_mqtt_pub.dfs: -------------------------------------------------------------------------------- 1 | def whid = 'ab33zr83489urf34r43j' 2 | def device_id = 'DV0025LM33' 3 | def mqtt_broker = '10.14.204.3' 4 | 5 | def energy = 6 | |modbus() 7 | .ip('127.0.0.1') 8 | .port(8899) 9 | .device(255) 10 | .every(1s) 11 | 12 | .function('coils', 'hregs', 'iregs') 13 | .from(2127, 3008, 104) 14 | .count(1, 2, 2) 15 | .as('Energy.ActiveEnergyConsumption', 'Energy.MaximalCurrentValue', 'Energy.BlindEnergyDelivered') 16 | .output('int16', 'float32', 'float32') 17 | .signed(true, true, false) 18 | 19 | %% add some tags 20 | 21 | |default() 22 | .fields('id', 'vs', 'df') 23 | .field_values('cd999999999', 1, '01.010') 24 | 25 | %% publish to mqtt broker 26 | 27 | |mqtt_publish() 28 | .host(mqtt_broker) 29 | .port(1883) 30 | .qos(1) 31 | .topic('ttgw/m1') 32 | .retained() 33 | 34 | -------------------------------------------------------------------------------- /dfs/other/modbus_multi_test.dfs: -------------------------------------------------------------------------------- 1 | 2 | 3 | %|value_emitter() 4 | %.every(5s) 5 | %.type(point) 6 | 7 | |modbus_multi() 8 | .ip('127.0.0.1') 9 | .port(8899) 10 | .device(255) 11 | .every(37s) 12 | .align() 13 | .function('hregs', 'hregs', 'hregs', 'hregs', 'hregs', 'hregs', 'hregs') 14 | .from(2701, 2699, 2709, 2707, 2717, 2715, 3009) 15 | .count(2,2,2,2,2,2,2) 16 | .as( 17 | 'ActiveEnergyRcvd', 18 | 'ActiveEnergyDelvd', 19 | 'ReactiveEnergyRcvd', 20 | 'ReactiveEnergyDelvd', 21 | 'ApparentEnergyRcvd', 22 | 'ApparentEnergyDelvd', 23 | 'MaximalCurrentValue' 24 | ) 25 | .output('float32', 'float32', 'float32', 'float32', 'float32', 'float32', 'float32') 26 | .max_connections(333) 27 | 28 | |debug() 29 | 30 | 31 | 32 | 33 | -------------------------------------------------------------------------------- /dfs/other/mqtt_lambda_topic_test.dfs: -------------------------------------------------------------------------------- 1 | def mqtt_host = 'devat-cw-mqtt.tgwdev.internal' 2 | def topic_base = 'ttgw/test/' 3 | 4 | |json_emitter() 5 | .every(3s) 6 | .json(<<< {"condition": {"id": 0, "name": "OK"}, "condition_reason": "none", "predicted_maintenance_time": 1584246411783, 7 | "vac_on_without_contact": [1.2, 2.5, 4.33], "vac_on_with_contact": [5.6, 45.98, 7.012]} >>>, 8 | <<< {"condition": {"id": 1, "name": "Warning"}, "condition_reason": "bad_succer", "predicted_maintenance_time": 1583246411783, 9 | "vac_on_without_contact": [0.2, 2.5, 8.01], "vac_on_with_contact": [6.001, 4.798, 7.012]} >>>, 10 | <<< {"condition": {"id": 2, "name": "Error"}, "condition_reason": "something_went_really_wrong", "predicted_maintenance_time": 1582246411783, 11 | "vac_on_without_contact": [0.5, 2.5, 0.44], "vac_on_with_contact": [2.06, 4.98, 2.901]} >>>) 12 | .as('data') 13 | %|debug() 14 | 15 | %def ignore = 16 | 17 | |mqtt_publish() 18 | %.host(mqtt_host) 19 | .topic_lambda(lambda: str_concat([ topic_base, str_downcase("data.condition.name"), '/ttw/' , "data.condition_reason"]) ) 20 | .retained() -------------------------------------------------------------------------------- /dfs/other/mqtt_publish.dfs: -------------------------------------------------------------------------------- 1 | |mqtt_subscribe() 2 | .port(2883) 3 | .user('admin') 4 | .pass('admin') 5 | .topic('ttgw/grip_images/rovolutionwels') 6 | .ssl() 7 | 8 | |debug() 9 | 10 | 11 | 12 | 13 | -------------------------------------------------------------------------------- /dfs/other/mqtt_subscribe.dfs: -------------------------------------------------------------------------------- 1 | |mqtt_subscribe() 2 | .host('10.10.1.102') 3 | .topics('ttgw/robot_plc/petzoldstr') 4 | .topic_as('flatta.topic') 5 | .include_topic(false) 6 | |debug() -------------------------------------------------------------------------------- /dfs/other/node_not_found_err.dfs: -------------------------------------------------------------------------------- 1 | %% send alarm, if the the mean exceeds a certain threshold 2 | 3 | |son_emitter() 4 | .every(12m) 5 | 6 | def mqtt_broker = '10.14.204.3' 7 | def alarm_topic = 'ttopic/alarm/energy_max' 8 | 9 | def point = 10 | |json_emitter() 11 | .every(1s) 12 | .json(<<< {"condition": {"id": 0, "name": "OK"}, "condition_reason": "", "predicted_maintenance_time": 1584246411783, 13 | "vac_on_without_contact": [1.2, 2.5, 4.33], "vac_on_with_contact": [5.6, 45.98, 7.012]} >>>, 14 | <<< {"condition": {"id": 1, "name": "Warning"}, "condition_reason": "bad succer", "predicted_maintenance_time": 1583246411783, 15 | "vac_on_without_contact": [0.2, 2.5, 8.01], "vac_on_with_contact": [6.001, 4.798, 7.012]} >>>, 16 | <<< {"condition": {"id": 2, "name": "Error"}, "condition_reason": "something went really wrong!", "predicted_maintenance_time": 1582246411783, 17 | "vac_on_without_contact": [0.5, 2.5, 0.44], "vac_on_with_contact": [2.06, 4.98, 2.901]} >>>) 18 | 19 | 20 | |debug() 21 | 22 | point 23 | |batch(10).timeout(3s) 24 | 25 | |avg() 26 | .field('val') 27 | .as('avg') 28 | 29 | |debug() 30 | .ls_mem('avg10') 31 | .ls_mem_field('avg') 32 | 33 | point 34 | |where() 35 | .lambda(lambda: "val" > ls_mem('avg10')) 36 | 37 | |default() 38 | .fields('message') 39 | .field_values('val is > avg10') 40 | 41 | |debug() -------------------------------------------------------------------------------- /dfs/other/option_check.dfs: -------------------------------------------------------------------------------- 1 | def test = 1 2 | 3 | def flow = 4 | |value_emitter() 5 | .every(3s) 6 | .type(point) 7 | 8 | |eval() 9 | .lambdas(lambda: 2 + 5) 10 | .as('seven') 11 | 12 | |shift() 13 | .offset(6s) 14 | 15 | 16 | 17 | -------------------------------------------------------------------------------- /dfs/other/oracle_query_test.dfs: -------------------------------------------------------------------------------- 1 | % [{host, "db-arche.vm.net"}, {port, 1521}, {user, "MMX_TS_TRACKING_V4"}, 2 | %% {password, "MMX_TS_TRACKING_V4"}, {service_name, "ARCHE.vm.net"}]. 3 | 4 | 5 | def host = 'db-arche.vm.net' 6 | def port = 1521 7 | def user = 'MMX_TS_TRACKING_V4' 8 | def password = 'MMX_TS_TRACKING_V4' 9 | def service_name = 'ARCHE.vm.net' 10 | %def query = <<< 11 | % select connection, sent, received from tr_keepalive 12 | %>>> 13 | def query = <<< 14 | select * from tr_keepalive 15 | >>> 16 | 17 | def s = 18 | |oracle_query() 19 | .host(host) 20 | .port(port) 21 | .user(user) 22 | .pass(password) 23 | .service_name(service_name) 24 | .query(query) 25 | .every(10s) 26 | 27 | |default() 28 | .fields('id', 'topic', 'df', 'vs') 29 | .field_values('aghfdhg5dahfahdf5dadhaf', service_name, '06.002', 1) 30 | 31 | |debug() 32 | 33 | 34 | |http_post_crate() 35 | .host('10.14.204.8') 36 | .port(4201) 37 | .database('doc') 38 | .table('oracle_test') 39 | .db_fields('id', 'df', 'vs', 'topic') 40 | .faxe_fields('id', 'df', 'vs', 'topic') 41 | .remaining_fields_as('data_obj') -------------------------------------------------------------------------------- /dfs/other/param_list_test.dfs: -------------------------------------------------------------------------------- 1 | 2 | |value_emitter() 3 | .every(3s) 4 | .type(point) 5 | .align() 6 | 7 | |default() 8 | .fields('v1.val') 9 | .field_values(0.0) 10 | 11 | -------------------------------------------------------------------------------- /dfs/other/pg_query.dfs: -------------------------------------------------------------------------------- 1 | def host = '127.0.0.1' 2 | def port = 5432 3 | def sql = <<< SELECT AVG(data_obj['x']['cur']) as avg_x_cur FROM table WHERE id < 33 >>> 4 | def user = 'crate' 5 | def database = 'doc' 6 | 7 | |postgresql_query() 8 | .host(host) 9 | .port(port) 10 | .database(database) 11 | .user(user) 12 | 13 | .query(sql) 14 | .group_by_time(3m) 15 | .every(5s) 16 | .period(18m) 17 | .align() -------------------------------------------------------------------------------- /dfs/other/python1.dfs: -------------------------------------------------------------------------------- 1 | 2 | def s = 3 | |value_emitter() 4 | .every(3s) 5 | .type(point) 6 | 7 | |default() 8 | .fields('id', 'df', 'vs') 9 | .field_values('oi23u4oi23u4oi32u34oi2u3', '02.005', 1) 10 | 11 | 12 | @callback() 13 | %.foo('sefooo') 14 | %.bar(12.5) 15 | %.baz(55) 16 | 17 | |debug() -------------------------------------------------------------------------------- /dfs/other/python2.dfs: -------------------------------------------------------------------------------- 1 | |value_emitter() 2 | .every(200ms) 3 | ._name('emitter') 4 | 5 | |default() 6 | .fields('id', 'df', 'vs') 7 | .field_values('oi23u4oi23u4oi32u34oi2u3', '02.005', 1) 8 | 9 | @double() 10 | .field('val') 11 | .as('double_val') 12 | ._name('python_double') 13 | 14 | |batch(5) 15 | 16 | @callback() 17 | ._name('python_passthrough') 18 | 19 | |debug() 20 | 21 | -------------------------------------------------------------------------------- /dfs/other/python3.dfs: -------------------------------------------------------------------------------- 1 | 2 | def s = 3 | |value_emitter() 4 | .every(2s) 5 | .type(point) 6 | 7 | @double() 8 | .field('val') 9 | .as('double_val') 10 | 11 | |debug() -------------------------------------------------------------------------------- /dfs/other/python4.dfs: -------------------------------------------------------------------------------- 1 | 2 | def s = 3 | |value_emitter() 4 | .every(3s) 5 | .type('point') 6 | 7 | |default() 8 | .fields('id', 'df', 'vs') 9 | .field_values('oi23u4oi23u4oi32u34oi2u3', '02.005', 1) 10 | 11 | @callback() 12 | .stop_on_exit(true) 13 | .as('data.data.data') 14 | 15 | |debug() -------------------------------------------------------------------------------- /dfs/other/rename.dfs: -------------------------------------------------------------------------------- 1 | def whid = 'ab33zr83489urf34r43j' 2 | def device_id = 'DV0025LM33' 3 | 4 | def energy = 5 | |modbus() 6 | .ip('127.0.0.1') 7 | .port(8899) 8 | .device(255) 9 | .every(5s) 10 | .function('hregs', 'hregs') 11 | .from(2127, 255) 12 | .count(2, 1) 13 | .as('Energy.Max.A', 'somevalue') 14 | .output('real', '') 15 | .signed(true, false) 16 | 17 | %% add some tags 18 | |default() 19 | .tags('name', 'warehouse_id', 'device_id') 20 | .tag_values('persistent_publisher', whid, device_id) 21 | 22 | %% rename some fields 23 | |rename() 24 | .fields('Energy.Max.A') 25 | .as_fields('EMaxA') 26 | .tags('warehouse_id') 27 | .as_tags('whid') 28 | -------------------------------------------------------------------------------- /dfs/other/rename2.dfs: -------------------------------------------------------------------------------- 1 | def vals = 2 | |json_emitter() 3 | .every(3s) 4 | .json(<<< 5 | { 6 | "object" : 7 | {"Payload": 8 | { "Occupancies": [3] } 9 | } 10 | } 11 | >>>) 12 | 13 | |debug() 14 | 15 | |rename() 16 | .fields('Payload.Occupancies[1]') 17 | .as_fields('Payload.Occupancies') 18 | 19 | |debug() -------------------------------------------------------------------------------- /dfs/other/s7_read_test1.dfs: -------------------------------------------------------------------------------- 1 | def plc_host = '192.168.121.201' 2 | def rack = 0 3 | def slot = 1 4 | def interval = 200ms 5 | def db_number = '11101' 6 | def db = 'DB{{db_number}}.DB' 7 | def pre = 'data.' 8 | %% cpu s71517f 9 | def s7_1 = 10 | |s7read() 11 | .ip(plc_host) 12 | .rack(rack) 13 | .slot(slot) 14 | .every(interval).align() 15 | .diff() 16 | .vars( 17 | '{{db}}X30.0', '{{db}}X30.1', 18 | '{{db}}X30.2', '{{db}}X30.3', 19 | '{{db}}X30.4', '{{db}}X30.5', 20 | '{{db}}X30.6', '{{db}}X30.7', 21 | '{{db}}X31.0', '{{db}}X31.1', 22 | 23 | 24 | 25 | 26 | '{{db}}W8', '{{db}}W10', 27 | '{{db}}D62', '{{db}}D66', 28 | '{{db}}D70', '{{db}}D74' 29 | ) 30 | .as( 31 | '1', '2', 32 | '3', '4', 33 | '5', '6', 34 | '7', '8', 35 | '9', '10', 36 | 37 | 38 | '11', '12', 39 | '13', '14', 40 | '15', '16' 41 | ) 42 | 43 | |debug('info') 44 | 45 | -------------------------------------------------------------------------------- /dfs/other/s7_test.dfs: -------------------------------------------------------------------------------- 1 | def whid = 'ab33zr83489urf34r43j' 2 | def device_id = 'DV0025LM33' 3 | def mqtt_host = '10.14.204.3' 4 | 5 | def s7 = 6 | |s7read() 7 | .ip('127.0.0.1') 8 | .rack(0) 9 | .slot(1) 10 | .every(100ms) 11 | %.diff() 12 | .vars('DB4,R1', 'DB4,DWORD7', 'DB4,DInt22', 'DB4,Int42', 'DB4,Int106') 13 | .as('StrapDrv.ActMotorTorque', 'StGa.ActVal', 'LiftDrv.TargetPos', 'LiftDrv.SetpVelo', 'LiftDrv.ActVelo') 14 | 15 | %% overwrite the above values for testing purposes 16 | |eval() 17 | .lambdas(lambda: random_real(4), lambda: random_real(44), 18 | lambda: random(30), lambda: random(30), lambda: random(30) * -1) 19 | .as('StrapDrv.ActMotorTorque', 'StGa.ActVal', 'LiftDrv.TargetPos', 'LiftDrv.SetpVelo', 'LiftDrv.ActVelo') 20 | 21 | 22 | %% add some tags 23 | |default() 24 | .tags('name', 'warehouse_id', 'device_id') 25 | .tag_values('measurement1', whid, device_id) 26 | 27 | |win_event() 28 | .every(10) 29 | .period(10) 30 | 31 | %% publish to mqtt broker 32 | 33 | |mqtt_publish() 34 | .host(mqtt_host) 35 | .port(1883) 36 | .qos(1) 37 | .topic('ttgw/m1') 38 | .retained() 39 | 40 | 41 | %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% 42 | 43 | 44 | %% exmaples from former PoC 45 | %.vars('DB4902,DInt1222', 'DB4902,DInt1226', 'DB4902,DWORD2636', 'DB4905,Int1172', 'DB4902,Int2616') 46 | %.as('TrvDrv.OpHrRet', 'TrvDrv.OpHr', 'TrvDrv.ErrorCode', 'LiftDrv.SetpVelo', 'TrvDrv.ActVelo') 47 | 48 | -------------------------------------------------------------------------------- /dfs/other/sample_test.dfs: -------------------------------------------------------------------------------- 1 | |value_emitter() 2 | .every(50ms) 3 | .type(point) 4 | |debug('info') 5 | |sample(500ms) 6 | |debug() -------------------------------------------------------------------------------- /dfs/other/set_test.dfs: -------------------------------------------------------------------------------- 1 | |value_emitter() 2 | .every(2s) 3 | .type(point) 4 | 5 | |set() 6 | .fields('set_val', 'data.val').field_values('hallo string!', 'hallo data string!') 7 | .tags('set_tag', 'data.tag').tag_values('blue', 'data_red') 8 | 9 | |debug() -------------------------------------------------------------------------------- /dfs/other/shift._dfs: -------------------------------------------------------------------------------- 1 | def charge = 'a7fwewh7734-1E' 2 | def avg = 33.565 3 | def threshold = 30 4 | 5 | def s = 6 | |value_emitter() 7 | .every(5s) 8 | .type(point) 9 | 10 | def t = 11 | |value_emitter() 12 | .every(10s) 13 | .type(point) 14 | 15 | s 16 | |combine(t) 17 | .fields('val') 18 | .prefix('combined') 19 | |default() 20 | .tag('charge') 21 | .tag_value(charge) 22 | .field('avg') 23 | .field_value(avg) 24 | 25 | |shift(-6h) 26 | 27 | |delete() 28 | .fields('val', 'gal') 29 | 30 | %% add a new field named 'val.combined' with the 31 | %% value which is the higher one of the two fields 32 | %% 'combined.val' | 'avg' 33 | |eval(lambda: max("combined.val", "avg")) 34 | .as('val.combined') 35 | 36 | |eval(lambda: str_reverse(charge)) 37 | .as('charge_reversed') 38 | 39 | %% add a new tag named "second" to the datapoint 40 | %% the value of the field will be 'true' if the second portion of 41 | %% the timestamp is greater than a ceatain value 42 | %% otherwise value for tag "second" will be 'false' 43 | |eval(lambda: if((second("ts") > threshold), 'true', 'false')) 44 | .as('second') 45 | .tags('second') 46 | 47 | %% set a new tag with name 'second_on_five' to 'true', if 48 | %% the last digit of the second portion of the timestamp 49 | %% equals '5', 50 | %% otherwise 'second_on_five' will be set to 'false' 51 | |eval( 52 | lambda: string(str_ends_with(string("ts"), '5')), 53 | lambda: to_date_string("ts") 54 | ) 55 | .as('second_string_endwith_5', 'intermediate.hour') 56 | .tags('second_on_five') 57 | |avg() 58 | .field('val.combined') 59 | .as('val.combined.avg') 60 | 61 | -------------------------------------------------------------------------------- /dfs/other/shift.dfs: -------------------------------------------------------------------------------- 1 | 2 | def s = 3 | |value_emitter() 4 | .every(1s) 5 | .type(point) 6 | 7 | |default() 8 | .fields('id', 'df', 'vs') 9 | .field_values('oi23u4oi23u4oi32u34oi2u3', '02.005', 1) 10 | 11 | 12 | |win_event() 13 | .every(7) 14 | .period(7) 15 | 16 | @mirror() 17 | .foo('sefooo') 18 | .bar(12.5) 19 | .baz(55) -------------------------------------------------------------------------------- /dfs/other/shift2.dfs: -------------------------------------------------------------------------------- 1 | def vals = 2 | |value_emitter() 3 | .every(2s) 4 | .type(point) 5 | .format(ejson) 6 | 7 | |shift(-3m) 8 | 9 | |debug() -------------------------------------------------------------------------------- /dfs/other/state_change_bulk_test.dfs: -------------------------------------------------------------------------------- 1 | %% if val1 2 | 3 | def pre = 'obj' 4 | 5 | def state = 6 | |json_emitter() 7 | .every(5s).align() 8 | .json( 9 | <<< {"err1": 0, "err2":0, "err3":0, "nr" : 233, "errcode1": 0, "errcode2": 0, "errcode3": 0, "sub": {"val1": 22}} >>>, 10 | <<< {"err1": 1, "err2":0, "err3":0, "nr" : 233, "errcode1": 111, "errcode2": 0, "errcode3": 0} >>>, 11 | <<< {"err1": 1, "err2":0, "err3":0, "nr" : 233, "errcode1": 111, "errcode2": 0, "errcode3": 0} >>>, 12 | <<< {"err1": 1, "err2":0, "err3":0, "nr" : 233, "errcode1": 111, "errcode2": 0, "errcode3": 0} >>>, 13 | <<< {"err1": 1, "err2":0, "err3":0, "nr" : 233, "errcode1": 111, "errcode2": 0, "errcode3": 0} >>>, 14 | <<< {"err1": 1, "err2":1, "err3":0, "nr" : 233, "errcode1": 111, "errcode2": 0, "errcode3": 0} >>>, 15 | <<< {"err1": 1, "err2":0, "err3":0, "nr" : 233, "errcode1": 111, "errcode2": 0, "errcode3": 0} >>>, 16 | <<< {"err1": 0, "err2":1, "err3":1, "nr" : 233, "errcode1": 0, "errcode2": 0, "errcode3": 333} >>>, 17 | <<< {"err1": 1, "err2":0, "err3":0, "nr" : 233, "errcode1": 111, "errcode2": 0, "errcode3": 0} >>>, 18 | <<< {"err1": 0, "err2":1, "err3":0, "nr" : 233, "errcode1": 0, "errcode2": 222, "errcode3": 0} >>>, 19 | <<< {"err1": 0, "err2":0, "err3":1, "nr" : 233, "errcode1": 0, "errcode2": 0, "errcode3": 333} >>> 20 | ) 21 | |rename() 22 | .fields('data') 23 | .as_fields('data.obj') 24 | % |debug('info') 25 | 26 | |state_change_bulk() 27 | %.lambda_pattern(<<<$field == 1>>>) 28 | .state_value(1) 29 | .field('data.obj') 30 | .exclude_fields('data.obj.nr') 31 | .enter() 32 | .enter_keep('data.obj.nr') 33 | .leave() 34 | .leave_keep('data.obj.errcode1', 'data.object.errcode3') 35 | 36 | |eval(lambda: "data.obj.errcode1" * 13).as('13nr') 37 | 38 | %|debug('warning') -------------------------------------------------------------------------------- /dfs/other/state_count_test.dfs: -------------------------------------------------------------------------------- 1 | def in = 2 | |value_emitter() 3 | .every(2s) 4 | .type(point) 5 | 6 | |state_count() 7 | .lambda(lambda: "val" < 7) 8 | .as('val_below_7') 9 | 10 | |debug() 11 | -------------------------------------------------------------------------------- /dfs/other/state_duration_test.dfs: -------------------------------------------------------------------------------- 1 | %% if val1 2 | 3 | def state = 4 | |json_emitter() 5 | .every(1s).align() 6 | .json( 7 | <<< {"err1": 0, "err2":0, "err3":0, "nr" : 233, "errcode1": 0, "errcode2": 0, "errcode3": 0} >>>, 8 | <<< {"err1": 1, "err2":0, "err3":0, "nr" : 233, "errcode1": 111, "errcode2": 0, "errcode3": 0} >>>, 9 | <<< {"err1": 1, "err2":0, "err3":0, "nr" : 233, "errcode1": 111, "errcode2": 0, "errcode3": 0} >>>, 10 | <<< {"err1": 1, "err2":0, "err3":0, "nr" : 233, "errcode1": 111, "errcode2": 0, "errcode3": 0} >>>, 11 | <<< {"err1": 1, "err2":0, "err3":0, "nr" : 233, "errcode1": 111, "errcode2": 0, "errcode3": 0} >>>, 12 | <<< {"err1": 1, "err2":0, "err3":0, "nr" : 233, "errcode1": 111, "errcode2": 0, "errcode3": 0} >>>, 13 | <<< {"err1": 1, "err2":0, "err3":0, "nr" : 233, "errcode1": 111, "errcode2": 0, "errcode3": 0} >>>, 14 | <<< {"err1": 0, "err2":0, "err3":1, "nr" : 233, "errcode1": 0, "errcode2": 0, "errcode3": 333} >>>, 15 | <<< {"err1": 1, "err2":0, "err3":0, "nr" : 233, "errcode1": 111, "errcode2": 0, "errcode3": 0} >>>, 16 | <<< {"err1": 0, "err2":1, "err3":0, "nr" : 233, "errcode1": 0, "errcode2": 222, "errcode3": 0} >>>, 17 | <<< {"err1": 0, "err2":0, "err3":1, "nr" : 233, "errcode1": 0, "errcode2": 0, "errcode3": 333} >>> 18 | ) 19 | 20 | |debug('info') 21 | 22 | |state_change() 23 | .lambda(lambda: "data.err1" == 1) 24 | .leave() 25 | .leave_keep('data.nr', 'data.errcode1') 26 | 27 | |debug() 28 | 29 | -------------------------------------------------------------------------------- /dfs/other/state_duration_test_new.dfs: -------------------------------------------------------------------------------- 1 | def in = 2 | |value_emitter() 3 | .every(2s) 4 | .type(point) 5 | 6 | def new = 7 | in 8 | |state_duration() 9 | .lambda(lambda: "val" < 7) 10 | .as('val_below_7') 11 | |debug('info') 12 | 13 | def old = 14 | in 15 | |state_duration() 16 | .lambda(lambda: "val" < 7) 17 | .as('val_below_7') 18 | |debug() 19 | -------------------------------------------------------------------------------- /dfs/other/state_seq_test.dfs: -------------------------------------------------------------------------------- 1 | def timeout = 0s 2 | def condition_reason = 'timeout' 3 | 4 | def in1 = 5 | |json_emitter() 6 | .every(12s) 7 | .jitter(2s) 8 | .json(<<<{"condition": {"state": "IDLE"}, "topic": "in1"} >>>) 9 | 10 | 11 | def in2 = 12 | |json_emitter() 13 | .every(15s) 14 | .jitter(32s) 15 | .json(<<<{"Function": "interface::WInterface::progress", "topic": "in2"}>>>) 16 | 17 | def in3 = 18 | |json_emitter() 19 | .every(4s) 20 | .jitter(13s) 21 | .json(<<<{"Function": "interface::WInterface::process", "topic": "in3"}>>>) 22 | 23 | 24 | in1 25 | |state_sequence(in2, in3) 26 | .states( 27 | lambda: "data.topic" == 'in1', %% state 1 28 | lambda: "data.topic" == 'in2', %% state 2 29 | lambda: "data.topic" == 'in3' %% state 3 30 | ) 31 | .within( 32 | 25s, %% time for state 2 to be fulfilled 33 | 20s %% time for state 3 to be fulfilled 34 | ) 35 | %.strict() 36 | 37 | |debug('warning') 38 | 39 | 40 | -------------------------------------------------------------------------------- /dfs/other/statistics.dfs: -------------------------------------------------------------------------------- 1 | |value_emitter() 2 | .every(200ms) 3 | .type(point) 4 | 5 | |batch(10) 6 | 7 | |statistics() 8 | .modules('avg', 'min', 'max') 9 | .field('val') 10 | 11 | |debug() -------------------------------------------------------------------------------- /dfs/other/stats_test.dfs: -------------------------------------------------------------------------------- 1 | def go = 2 | |value_emitter() 3 | .every(1s) 4 | .type(point) 5 | 6 | |win_time() 7 | .every(7s) 8 | .period(7s) 9 | 10 | |kurtosis() 11 | .field('val') 12 | 13 | |debug() 14 | 15 | def path_perc = 16 | |value_emitter() 17 | .every(1s) 18 | .type(point) 19 | .format(ejson) 20 | 21 | | win_time() 22 | .every(5s) 23 | .period(5s) 24 | 25 | |range() 26 | .field('val.val') 27 | .as('val.val.range') 28 | 29 | |debug() -------------------------------------------------------------------------------- /dfs/other/stats_test_fail.dfs: -------------------------------------------------------------------------------- 1 | 2 | def path_perc = 3 | |value_emitter() 4 | .every(1s) 5 | .type(point) 6 | .format(ejson) 7 | 8 | | win_time() 9 | .every(5s) 10 | .period(5s) 11 | 12 | |range() 13 | .field('val') 14 | .as('val.val.range') 15 | 16 | |debug() -------------------------------------------------------------------------------- /dfs/other/tcp_conveyor_track.dfs: -------------------------------------------------------------------------------- 1 | def parser = parser_conv_tracking_v1 2 | def v1 = 3 | |tcp_recv_line() 4 | .ip('127.0.0.1') 5 | .port(1111) 6 | .parser(parser) 7 | .min_length(61) 8 | 9 | |win_event() 10 | .period(5s) 11 | 12 | |mqtt_publish() 13 | .host('10.14.204.3') 14 | .port(1883) 15 | .qos(1) 16 | .topic('t_tgw/m1') 17 | .retained() 18 | 19 | 20 | -------------------------------------------------------------------------------- /dfs/other/tcp_line.dfs: -------------------------------------------------------------------------------- 1 | %def ip = '10.200.190.123' 2 | %def port = 2008 3 | def parser = parser_lrep_v1 4 | def ip = '192.168.121.201' 5 | def port = 2000 6 | 7 | |tcp_recv_line() 8 | .ip(ip) 9 | .port(port) 10 | .parser(parser) 11 | .min_length(40) 12 | .changed() 13 | 14 | |set() 15 | .fields('df', 'id').field_values('11.001', 'KDR_LREP') 16 | 17 | %% write to crate 18 | 19 | |batch(5) 20 | .timeout(10s) 21 | 22 | |http_post_crate() 23 | .table('teckdr') 24 | .db_fields('id', 'df', 'data_obj') 25 | .faxe_fields('id', 'df', 'data') 26 | 27 | %% publish mqtt 28 | |mqtt_publish() 29 | .topic('ttgw/data/kdr_lrep') 30 | 31 | 32 | -------------------------------------------------------------------------------- /dfs/other/tcp_line_robot_plc.dfs: -------------------------------------------------------------------------------- 1 | def parser = parser_robot_plc_v1 2 | def v1 = 3 | |tcp_recv_line() 4 | .ip('127.0.0.1') 5 | .port(2222) 6 | %.parser(parser) 7 | .min_length(5) 8 | 9 | 10 | -------------------------------------------------------------------------------- /dfs/other/tcp_robot_plc.dfs: -------------------------------------------------------------------------------- 1 | def parser = parser_robot_plc_v1 2 | def v1 = 3 | |tcp_recv() 4 | .ip('127.0.0.1') 5 | .port(1122) 6 | .parser(parser) 7 | 8 | 9 | -------------------------------------------------------------------------------- /dfs/other/tcp_window.dfs: -------------------------------------------------------------------------------- 1 | def v1 = 2 | |tcp_recv_line() 3 | .ip('127.0.0.1') 4 | .port(1111) 5 | 6 | def window = 7 | v1 8 | |win_time() 9 | .period(12s) 10 | .every(12s) 11 | 12 | window 13 | |variance() 14 | .perc(85) 15 | .field('val') 16 | .as('variance') 17 | window 18 | |sum() 19 | .as('sum') 20 | .field('val') 21 | 22 | def median = 23 | window 24 | |median() 25 | .as('median') 26 | .field('val') 27 | 28 | median 29 | |first() 30 | .as('first') 31 | .field('median') 32 | 33 | -------------------------------------------------------------------------------- /dfs/other/template_test.dfs: -------------------------------------------------------------------------------- 1 | def emit_every = 2s 2 | def emit_every_jitter = 1s 3 | def debug_type = 'info' 4 | def rewrite_fun = lambda: str_concat("data.condition.state", '_post') 5 | 6 | |json_emitter() 7 | .every(emit_every) 8 | .jitter(emit_every_jitter) 9 | .json( 10 | <<<{"condition": {"state": "IDLE"}, "topic": "in2"} >>>, 11 | <<<{"condition": {"state": "IDLE"}, "topic": "in1"} >>>, 12 | <<<{"condition": {"state": "IDLE"}, "topic": "in1"} >>>, 13 | <<<{"condition": {"state": "IDLE"}, "topic": "in2"} >>>, 14 | <<<{"condition": {"state": "IDLE"}, "topic": "in3"} >>>, 15 | <<<{"condition": {"state": "IDLE"}, "topic": "in1"} >>>, 16 | <<<{"condition": {"state": "IDLER"}, "topic": "in1"} >>>, 17 | <<<{"condition": {"state": "IDLE"}, "topic": "in1"} >>>, 18 | <<<{"condition": {"state": "IDLE"}, "topic": "in1"} >>> 19 | ) 20 | 21 | |debug(debug_type) 22 | |change_detect('data.condition.state', 'data.topic') 23 | |eval(rewrite_fun).as('data.condition.endstate') 24 | |debug() -------------------------------------------------------------------------------- /dfs/other/test_lm_conveyor_tracking.dfs: -------------------------------------------------------------------------------- 1 | %% testing conveyor tracking tcp and route through mqtt-broker and rabbitmq 2 | def parser = parser_conv_tracking_v1 3 | def v1 = 4 | |tcp_recv_line() 5 | .ip('127.0.0.1') 6 | .port(1111) 7 | .parser(parser) 8 | .min_length(61) 9 | .extract() 10 | 11 | 12 | 13 | -------------------------------------------------------------------------------- /dfs/other/test_lm_robot_plc.dfs: -------------------------------------------------------------------------------- 1 | %% testing robot plc tcp and route through mqtt-broker and rabbitmq 2 | def parser = parser_robot_plc_v1 3 | def v1 = 4 | |tcp_recv() 5 | .ip('127.0.0.1') 6 | .port(1122) 7 | .parser(parser) 8 | .as('map_data') 9 | .extract() 10 | 11 | %% publish to mqtt broker 12 | %% |mqtt_publish_single() 13 | %% .host('10.14.204.3') 14 | %% .port(1883) 15 | %% .qos(1) 16 | %% .topic('ttgw/robot_plc') 17 | %% .retained() 18 | 19 | 20 | -------------------------------------------------------------------------------- /dfs/other/timeout_test1.dfs: -------------------------------------------------------------------------------- 1 | def timeout = 20s 2 | def condition_reason = 'timeout' 3 | 4 | def in1 = 5 | |json_emitter() 6 | .every(12s) 7 | .jitter(2s) 8 | .json(<<<{"condition": {"state": "IDLE"}, "topic": "in1"} >>>) 9 | 10 | 11 | def in2 = 12 | |json_emitter() 13 | .every(15s) 14 | .jitter(32s) 15 | .json(<<<{"Function": "interface::WInterface::progress", "topic": "in2"}>>>) 16 | 17 | 18 | in1 19 | |triggered_timeout(in2) 20 | .timeout(timeout) 21 | .timeout_trigger(lambda: "data.topic" == 'in1') 22 | 23 | |debug('warning') 24 | 25 | 26 | -------------------------------------------------------------------------------- /dfs/other/union_test.dfs: -------------------------------------------------------------------------------- 1 | def in1 = 2 | |value_emitter() 3 | .every(500ms) 4 | .type(point) 5 | .fields(val) 6 | 7 | def in2 = 8 | |value_emitter() 9 | .every(4s) 10 | .type(point) 11 | .fields('val2', 'val3') 12 | 13 | 14 | in1 15 | |union(in2) 16 | 17 | |debug() -------------------------------------------------------------------------------- /dfs/other/value_diff_test.dfs: -------------------------------------------------------------------------------- 1 | def in = 2 | |value_emitter() 3 | .every(3s) 4 | .type(point) 5 | .fields('value', 'value2') 6 | |debug('info') 7 | |value_diff() 8 | .fields('value', 'randval', 'irgendwashere') 9 | .as('value_diff', 'randval_diff', 'wurschtmir') 10 | .default(0) 11 | |debug() -------------------------------------------------------------------------------- /dfs/other/where.dfs: -------------------------------------------------------------------------------- 1 | def in = 2 | |value_emitter() 3 | .every(300ms) 4 | .type(point) 5 | 6 | |where() 7 | .lambda(lambda: "val" > 3) 8 | 9 | |eval() 10 | .lambdas(lambda: round("val" * 1000), lambda: round("val")*2) 11 | .as('val_gramm_rounded', 'double') 12 | 13 | %|where() 14 | %.lambda(lambda: if(member("double", ls_mem('topics_seen')), false, true)) 15 | %.ls_mem('topics_seen') 16 | %.ls_mem_field('double') 17 | %.ls_mem_ttl(3) 18 | 19 | |debug() 20 | 21 | %|delete() 22 | %.fields('val') 23 | 24 | %|debug() 25 | 26 | %@mirror() 27 | 28 | % |log() 29 | %.file('lambda_log.txt') 30 | 31 | 32 | 33 | 34 | -------------------------------------------------------------------------------- /dfs/other/win_clock_test.dfs: -------------------------------------------------------------------------------- 1 | def in = 2 | |value_emitter() 3 | .every(1s) 4 | .type(point) 5 | %.align() 6 | 7 | 8 | in 9 | |change_detect('val') 10 | 11 | in 12 | |win_clock() 13 | .every(5s) 14 | .period(15s) 15 | .fill_period() 16 | .align() -------------------------------------------------------------------------------- /dfs/other/win_event_test.dfs: -------------------------------------------------------------------------------- 1 | var modbus = 2 | |modbus('hregs') 3 | .ip('127.0.0.1') 4 | .port(8899) 5 | .device(255) 6 | .every(1s) 7 | .from(2127) 8 | %.count(5) 9 | .as('A') 10 | %'B', 11 | % 'Energy.BlindEnergyDelivered', 'Energy.Max.A', 'Energy.Min.A') 12 | 13 | modbus 14 | |win_event_q() 15 | .every(7) 16 | .period(5) 17 | .fill_period() -------------------------------------------------------------------------------- /dfs/other/win_event_test2.dfs: -------------------------------------------------------------------------------- 1 | |value_emitter() 2 | .every(2s) 3 | .type(point) 4 | 5 | |win_event() 6 | .every(5) 7 | 8 | |debug('warning') -------------------------------------------------------------------------------- /dfs/other/win_time_test.dfs: -------------------------------------------------------------------------------- 1 | var modbus = 2 | |modbus('hregs') 3 | .ip('127.0.0.1') 4 | .port(8899) 5 | .device(255) 6 | .every(1s) 7 | .from(2127) 8 | %.count(5) 9 | .as('A') 10 | %'B', 11 | % 'Energy.BlindEnergyDelivered', 'Energy.Max.A', 'Energy.Min.A') 12 | 13 | modbus 14 | |win_time_q() 15 | .every(5s) 16 | .period(15s) 17 | .fill_period() -------------------------------------------------------------------------------- /dfs/other/win_time_test_aligned.dfs: -------------------------------------------------------------------------------- 1 | def in = 2 | |json_emitter('{"hello": "there"}') 3 | .every(1s) 4 | .jitter(300ms) 5 | %.type(point) 6 | %.align() 7 | 8 | in 9 | |win_clock() 10 | .every(5s) 11 | %.align() 12 | %.period(10s) 13 | %.fill_period() 14 | 15 | 16 | |aggregate() 17 | .fields('hello') 18 | .functions('count') 19 | 20 | |eval(lambda: to_iso8601("ts"), lambda: to_iso8601(now())).as('dt', 'now') 21 | 22 | |debug() -------------------------------------------------------------------------------- /dfs/other/window.dfs: -------------------------------------------------------------------------------- 1 | def v1 = 2 | |value_emitter() 3 | .every(1s) 4 | .type(point) 5 | 6 | def window = 7 | v1 8 | |win_time() 9 | .every(5s) 10 | 11 | 12 | window 13 | |aggregate() 14 | .fields('val', 'val', 'val', 'val') 15 | .functions('variance', 'sum', 'avg', 'count_distinct') 16 | .as('variance', 'sum', 'avg', 'count_distinct') 17 | 18 | |debug() 19 | 20 | 21 | -------------------------------------------------------------------------------- /dfs/test/count_change.dfs: -------------------------------------------------------------------------------- 1 | def data = 2 | |json_emitter( 3 | '{"data": {"hello": "Francis"}}', 4 | '{"data": {"hello": "Roberto"}}', 5 | '{"data": {"hello": "Roberto"}}', 6 | '{"data": {"hello": "Carol"}}', 7 | '{"data": {"hello": "Jim"}}', 8 | '{"data": {"hello": "Francis"}}' 9 | 10 | ) 11 | .every(3s) 12 | .select('batch') 13 | 14 | 15 | data 16 | |aggregate() 17 | .fields('data.hello') 18 | .as('changed_agg') 19 | .functions('count_change') 20 | .keep_tail(false) 21 | |debug('info') 22 | 23 | data 24 | |count_change() 25 | .field('data.hello') 26 | .as('changed') 27 | .keep_last(false) 28 | 29 | |debug() -------------------------------------------------------------------------------- /dfs/test/delete.dfs: -------------------------------------------------------------------------------- 1 | |json_emitter() 2 | .every(3s) 3 | .json( 4 | '{"sometimes": "", "live": "is", "hard": "no", "val": 4}', 5 | '{"sometimes": "", "live": "is", "hard": "", "val": 0}', 6 | '{"sometimes": "he", "live": "", "hard": "no", "val": 0}' 7 | ) 8 | 9 | |delete('data.sometimes', 'data.live', 'data.hard') 10 | %.when_value('') 11 | %.where(lambda: val > 0) 12 | 13 | |debug() -------------------------------------------------------------------------------- /dfs/test/env.dfs: -------------------------------------------------------------------------------- 1 | def debug_on = e: env('FAXE_DEBUG_HANDLER_MQTT_ENABLE', false) == 'on' 2 | def json = '{"FAXE_DEBUG_HANDLER_MQTT_ENABLE": {{debug_on}}, "fetch": "FAXE_DEBUG_HANDLER_MQTT_BASE_TOPIC"}' 3 | def dest_ssl_var = '-' 4 | def var_default = '-' 5 | def dest_ssl = e: if(dest_ssl_var == var_default, (env('FAXE_CRATE_HTTP_TLS_ENABLE', false) == 'on'), bool(dest_ssl_var)) 6 | 7 | def medium_buffer = 33 8 | 9 | |json_emitter(json) 10 | 11 | |eval(lambda: env("fetch", 'na')) 12 | .as('FAXE_DEBUG_HANDLER_MQTT_BASE_TOPIC') 13 | 14 | 15 | |eval( 16 | lambda: if("data.TargetBufferCountMin" <= 1, "data.TargetBufferCountMin", 17 | if("data.TargetBufferCountMin" >= medium_buffer, "data.TargetBufferCountMax", 18 | if("data.TargetBufferCountMax" < medium_buffer, "data.TargetBufferCountMin", "data.TargetBufferCountMax") 19 | ) 20 | ) 21 | ) 22 | .as('data.TargetBufferAggregate') 23 | 24 | 25 | |debug() -------------------------------------------------------------------------------- /dfs/test/faxe_http.dfs: -------------------------------------------------------------------------------- 1 | def path2 = '/faxe_stats' 2 | 3 | %% first set up a listen node to receive data via http 4 | |http_listen() 5 | .path(path2) 6 | .port(8899) 7 | .payload_type('json') 8 | .as('recv') 9 | 10 | |debug() 11 | 12 | %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% 13 | def path = '/v1/stats/faxe' 14 | def host = 'localhost' 15 | def port = 8081 16 | 17 | %% call faxe's own rest api, and get some stats fields 18 | |http_get() 19 | .host(host) 20 | .port(port) 21 | .path(path) 22 | .every(3s) 23 | 24 | |debug('info') 25 | .message('got from {{path}}') 26 | 27 | %% post this data to the http_listen node setup at the beginning of the script 28 | |http_post() 29 | .host(host) 30 | .port(8899) 31 | .path(path2) 32 | .method('put') 33 | 34 | -------------------------------------------------------------------------------- /dfs/test/group_by.dfs: -------------------------------------------------------------------------------- 1 | def group_field = 'data.code' 2 | def data = 3 | |json_emitter() 4 | .every(100ms) 5 | .align(true) 6 | .json( 7 | <<<{"code" : 224, "message": "this is a test", "mode": 1}>>>, 8 | <<<{"code" : 334, "message": "this is another test", "mode": 1}>>>, 9 | <<<{"code" : 114, "message": "this is another test", "mode": 2}>>>, 10 | <<<{"code" : 443, "message": "this is another test", "mode": 1}>>>, 11 | <<<{"code" : 224, "message": "this is another test", "mode": 2}>>>, 12 | <<<{"code" : 111, "message": "this is another test", "mode": 1}>>>, 13 | <<<{"code" : 551, "message": "this is another test", "mode": 2}>>> 14 | ) 15 | .as('data') 16 | 17 | %% %% %% %% %% %% group 18 | def group_by = 19 | data 20 | |group_by(group_field) 21 | %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% |> start groups 22 | 23 | def window = 24 | group_by 25 | |eval( 26 | lambda: string("data.code"), 27 | lambda: str_replace("data.message", 'test', string("data.code")) 28 | ) 29 | .as( 30 | 'data.group', 31 | 'data.message' 32 | ) 33 | |value_diff() 34 | .fields('data.mode') 35 | .as('data.mode_diff') 36 | .mode('p-c') 37 | |win_clock() 38 | .every(4s) 39 | .align() 40 | 41 | def agg1 = 42 | window 43 | |aggregate() 44 | .fields('data.code') 45 | .functions('sum') 46 | .as('data.code_sum') 47 | %.keep('data.group') 48 | 49 | def agg2 = 50 | window 51 | |aggregate() 52 | .fields('data.mode') 53 | .functions('count_change') 54 | .as('data.mode_change') 55 | %.keep('data') 56 | 57 | agg1 58 | |join(agg2) 59 | .merge_field('data') 60 | .tolerance(0s) 61 | .missing_timeout(5ms) 62 | .fill(0.00000) 63 | 64 | |group_union() 65 | 66 | %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% |> end groups 67 | 68 | |debug('notice') -------------------------------------------------------------------------------- /dfs/test/group_by2.dfs: -------------------------------------------------------------------------------- 1 | def group_field = 'data.code' 2 | def data = 3 | |json_emitter() 4 | .every(300ms) 5 | .json( 6 | <<<{"code" : 224, "message": "my code is: test", "mode": 1}>>>, 7 | <<<{"code" : 334, "message": "my code is: test", "mode": 1}>>>, 8 | <<<{"code" : 114, "message": "my code is: test", "mode": 2}>>>, 9 | <<<{"code" : 443, "message": "my code is: test", "mode": 1}>>>, 10 | <<<{"code" : 224, "message": "my code is: test", "mode": 2}>>>, 11 | <<<{"code" : 111, "message": "my code is: test", "mode": 1}>>>, 12 | <<<{"code" : 551, "message": "my code is: test", "mode": 2}>>> 13 | ) 14 | .as('data') 15 | 16 | %% %% %% %% %% %% group 17 | def group_by = 18 | data 19 | |group_by(group_field) 20 | %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% |> start groups 21 | 22 | def window = 23 | group_by 24 | |eval( 25 | lambda: string("data.code"), 26 | lambda: str_replace("data.message", 'test', string("data.code")) 27 | ) 28 | .as( 29 | 'data.group', 30 | 'data.message' 31 | ) 32 | |value_diff() 33 | .fields('data.mode') 34 | .as('data.mode_diff') 35 | .mode('p-c') 36 | |win_clock() 37 | .every(4s) 38 | .fill_period() 39 | 40 | def agg1 = 41 | window 42 | |aggregate() 43 | .fields('data.code') 44 | .functions('sum') 45 | .as('data.code_sum') 46 | %.keep('data.group') 47 | 48 | def agg2 = 49 | window 50 | |aggregate() 51 | .fields('data.mode') 52 | .functions('count_change') 53 | .as('data.mode_change') 54 | %.keep('data') 55 | 56 | agg1 57 | |group_union(agg2) 58 | 59 | %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% |> end groups 60 | 61 | |debug('notice') -------------------------------------------------------------------------------- /dfs/test/http_get.dfs: -------------------------------------------------------------------------------- 1 | |http_get() 2 | .host('127.0.0.1') 3 | .port(8081) 4 | .path('/v1/stats/faxe') 5 | .every(4s) 6 | .align() 7 | .as('data') 8 | %.tls() 9 | 10 | |eval(lambda: round( (now() / 1000) )) 11 | .as('data.timestamp_sec') 12 | 13 | |mqtt_publish() 14 | .topic('tgw/data/test/miae/faxe_stats') 15 | -------------------------------------------------------------------------------- /dfs/test/http_listen.dfs: -------------------------------------------------------------------------------- 1 | |http_listen() 2 | .payload_type('json') 3 | .user('user') 4 | .pass('pass') 5 | 6 | |debug() 7 | 8 | |http_listen() 9 | .port(8897) 10 | .path('/StandardInterface/MaintenanceAlertStandardInterface_SaveMaintenanceAlertStandardInterface') 11 | .content_type('application/x-www-form-urlencoded') 12 | .as('data.http_res') 13 | .payload_type('json') 14 | .tls() 15 | 16 | |http_listen() 17 | .port(8898) 18 | .content_type('text/plain') 19 | .as('data.http_res') 20 | .payload_type('json') -------------------------------------------------------------------------------- /dfs/test/if.dfs: -------------------------------------------------------------------------------- 1 | %|json_emitter() 2 | %.json( 3 | % '{"address": "cond_vac", "value": 99}', 4 | % '{"address": "cond_scale", "value": 94}', 5 | % '{"address": "cond_robot", "value": 95}' 6 | % ) 7 | %.as('data') 8 | % 9 | %|if(lambda: "data.address" != 'cond_vac') 10 | %.then('address is not cond_vac') 11 | %.else(lambda: str_concat('address is cond_vac and value is ', string("data.value"))) 12 | %.as('data.desc') 13 | % 14 | %|debug() 15 | 16 | 17 | 18 | |json_emitter() 19 | .json( 20 | '{"address": "", "value": 99}', 21 | '{"address": "cond_scale", "value": 94}', 22 | '{"address": "cond_robot", "value": 95}' 23 | ) 24 | .as('data') 25 | 26 | |if(lambda: "data.address" == '') 27 | .then('address is empty') 28 | %.else(lambda: str_concat(['address is ', "data.address", ' and value is ', string("data.value")])) 29 | .as('data.note') 30 | 31 | |debug() -------------------------------------------------------------------------------- /dfs/test/mongo.dfs: -------------------------------------------------------------------------------- 1 | |mongo_query() 2 | .host('localhost') 3 | .user('root') 4 | .pass('root') 5 | .database('test') 6 | .collection('inventory') 7 | .query(<<< {"item": "canvas"} >>>) 8 | %.query(<<< {"size.h": {"$gt": 16}} >>>) 9 | .every(5s) 10 | .as('data') 11 | 12 | 13 | |aggregate() 14 | .fields('data.size.h', 'data.size.w', 'data.size.h') 15 | .functions('avg', 'avg', 'sum') 16 | .as('canvas.size_h_avg', 'canvas.size_w_avg', 'canvas.size_h_sum') 17 | 18 | |debug() 19 | 20 | 21 | -------------------------------------------------------------------------------- /dfs/test/mqtt_topic_field_test.dfs: -------------------------------------------------------------------------------- 1 | def default_map = 2 | '{"1":"tgw/data/some/test/number1", "2":"tgw/data/some/test/number2", "3":"tgw/data/some/test/number3"}' 3 | def payload = 4 | |json_emitter() 5 | .every(100ms) 6 | .json( 7 | '{"no": 1, "val": "10"}', 8 | '{"no": 2, "val": "20"}', 9 | '{"no": 3, "val": "30"}', 10 | '{"no": 4, "val": "40"}' 11 | ) 12 | .as('data') 13 | .select('seq') 14 | 15 | |eval( 16 | lambda: map_get(string("data.no"), default_map), 17 | lambda: if(undefined("topic"), 'tgw/data/some/test/default', "topic") 18 | ) 19 | .as( 20 | 'topic', 21 | 'topic' 22 | ) 23 | 24 | 25 | |mqtt_publish() 26 | .topic_field('topic') 27 | .use_pool(true) 28 | -------------------------------------------------------------------------------- /dfs/test/rebatch_test.dfs: -------------------------------------------------------------------------------- 1 | |value_emitter() 2 | .mode('monotonic_int') 3 | .every(300ms) 4 | 5 | |batch(16) 6 | |batch(3) 7 | |batch(8) 8 | 9 | %|debug() -------------------------------------------------------------------------------- /dfs/test/rename.dfs: -------------------------------------------------------------------------------- 1 | |json_emitter( 2 | %'{}' 3 | '{"field1" : 1, "field2": "2", "field3": {"deeper": {"list": [1,2,{"inlist": 999}]}}}', 4 | '{"field11" : 1, "field22": "2", "field3": {"deeper": {"list": [3,4,{"inlist": 444}]}}}' 5 | ) 6 | .every(500ms) 7 | .as('data.prata') 8 | 9 | |rename() 10 | .fields('data.prata.field1') 11 | .as_fields('prata.data.field1') 12 | 13 | |debug() -------------------------------------------------------------------------------- /dfs/test/time_align.dfs: -------------------------------------------------------------------------------- 1 | |value_emitter() 2 | .every(17s) 3 | 4 | |eval( 5 | lambda: to_iso8601("ts"), 6 | lambda: to_iso8601(time_align("ts", 1m)) 7 | ) 8 | .as('datetime', 'datetime_aligned') 9 | 10 | |debug() -------------------------------------------------------------------------------- /dfs/test/value_emitter.dfs: -------------------------------------------------------------------------------- 1 | |value_emitter() 2 | .every(3s) 3 | .mode('monotonic_int') 4 | .type(point) 5 | %.fields('val1', 'val2') 6 | 7 | |default() 8 | .fields('some') 9 | .field_values('value') 10 | |delete().fields('wasined') 11 | 12 | |debug() -------------------------------------------------------------------------------- /dfs/test/win_session.dfs: -------------------------------------------------------------------------------- 1 | |value_emitter() 2 | .every(500ms) 3 | .jitter(4600ms) 4 | 5 | |win_session() 6 | .session_timeout(4500ms) 7 | 8 | |debug('info') -------------------------------------------------------------------------------- /docker.run: -------------------------------------------------------------------------------- 1 | # build 2 | docker build -t faxe-docker . 3 | docker build -t faxe -f Dockerfile_azedge . 4 | 5 | # run 6 | docker run -d -p 8081:8081 --ulimit nofile=1024 --log-driver=syslog faxe-docker 7 | docker run -a stdout -a stderr -p 8081:8081 -it faxe-docker 8 | docker run -a stdout -a stderr -p 8081:8081 -it --log-driver=syslog faxe-docker 9 | 10 | # final 11 | docker run -d -p 8081:8081 --log-driver=syslog faxe-docker 12 | 13 | docker run --rm -it -p 8081:8081 -e MNESIA_DIR=/data -v /tmp/mnesia/:/data faxe:latest 14 | ## fix hostname, because docker uses a new one on every start and this will lead to mnesia not finding its data 15 | docker run --hostname=faxe --rm -it -p 8081:8081 -e MNESIA_DIR=/data -v /home/heyoka/data:/data faxe:latest 16 | 17 | 18 | ## docker debian prod with env vars 19 | docker run --hostname=faxe --rm -it -p 8081:8081 -e MNESIA_DIR=/data -e FAXE_ALLOW_ANONYMOUS=true -v /home/heyoka/data:/data faxe:latest -------------------------------------------------------------------------------- /ext.config: -------------------------------------------------------------------------------- 1 | []. -------------------------------------------------------------------------------- /python/callback.py: -------------------------------------------------------------------------------- 1 | from faxe import Faxe 2 | 3 | 4 | class Callback(Faxe): 5 | """ 6 | simple noop faxe python node callback 7 | """ 8 | 9 | def handle_point(self, point_data): 10 | self.emit(point_data) 11 | 12 | def handle_batch(self, batch_data): 13 | self.emit(batch_data) 14 | -------------------------------------------------------------------------------- /python/data_demo.py: -------------------------------------------------------------------------------- 1 | from faxe import Faxe, Point, Batch 2 | 3 | 4 | class Data_demo(Faxe): 5 | """ 6 | simple noop faxe python node callback 7 | """ 8 | 9 | def handle_point(self, point_data): 10 | 11 | # just add a field and emit the data-point 12 | self.log("point field ", Point.value(point_data, 'pyth.field3.deeper.list[0]')) 13 | Point.value(point_data,'glippse.globbsy.p_ts[0]', Faxe.now()) 14 | self.log('result after setting data', point_data) 15 | 16 | self.emit(point_data) 17 | 18 | def handle_batch(self, batch_data): 19 | # HELPER CLASSES DEMO ########## 20 | """ 21 | ############################################################################################ 22 | completely ignoring the input data_batch, we build our own and emit it ;) 23 | 24 | here we can see, that the helper classes are completely optional to use 25 | we can always directly work on the datastructure 26 | 27 | the helper classes work directly on the data structure, and they do not hold an internal representation of the 28 | data structure themselves 29 | 30 | :param batch_data: 31 | :return: void 32 | """ 33 | newpoint = Point.new(1) 34 | Point.value(newpoint, 'python_field', 1234) 35 | 36 | newpoint2 = Point.new(2) 37 | Point.field(newpoint2, 'python_field2', 2345) 38 | 39 | newpoint3 = Point.new(Faxe.now()) 40 | point3fields = \ 41 | {'python_field3': 5678, 'python_field3.1': 12.546, 'python_field3.2': {'sub': ['alle', 'sind', 'da%$<-']}} 42 | Point.fields(newpoint3, point3fields) 43 | 44 | newpoint4 = dict() 45 | newpoint4['fields'] = {'python_field3': 6464} 46 | newpoint4['ts'] = Faxe.now()+10 47 | 48 | lastpoint = Point.new(7) 49 | Point.field(lastpoint, 'python_field_7', 8773) 50 | 51 | # create batch and add all the points to it 52 | batch_out = Batch.new(9876) 53 | Batch.points(batch_out, [newpoint2, lastpoint, newpoint3, newpoint, newpoint4]) 54 | 55 | self.log('output ' + str(batch_out), 'info') 56 | self.emit(batch_out) 57 | -------------------------------------------------------------------------------- /python/double.py: -------------------------------------------------------------------------------- 1 | from faxe import Faxe, Point, Batch 2 | 3 | 4 | class Double(Faxe): 5 | 6 | @staticmethod 7 | def options(): 8 | opts = [ 9 | ("field", "string"), 10 | ("as", "string") 11 | ] 12 | return opts 13 | 14 | def init(self, args=None): 15 | self.fieldname = args["field"] 16 | self.asfieldname = args["as"] 17 | 18 | def handle_point(self, point_data): 19 | self.emit(self.calc(point_data)) 20 | 21 | def handle_batch(self, batch_data): 22 | for point in Batch.points(batch_data): 23 | self.calc(point) 24 | self.emit(batch_data) 25 | 26 | def calc(self, point_dict): 27 | Point.value(point_dict, self.asfieldname, (Point.value(point_dict, self.fieldname) * 2)) 28 | return point_dict 29 | 30 | -------------------------------------------------------------------------------- /python/filter.py: -------------------------------------------------------------------------------- 1 | from faxe import Faxe 2 | 3 | 4 | class Filter(Faxe): 5 | """ 6 | simple batch filter python node 7 | """ 8 | 9 | def handle_point(self, point_data): 10 | self.emit(point_data) 11 | 12 | def handle_batch(self, batch_data): 13 | print(batch_data) 14 | for i, point in enumerate(batch_data['points']): 15 | if (not point['fields']['field1']) or (point['fields']['field1'] > 20): 16 | del batch_data['points'][i] 17 | 18 | self.emit(batch_data) 19 | -------------------------------------------------------------------------------- /python/less_than.py: -------------------------------------------------------------------------------- 1 | from faxe import Faxe 2 | from faxe import Point 3 | from faxe import Batch 4 | 5 | 6 | class Less_than(Faxe): 7 | """ 8 | simple less-than batch filter node using the 'Batch' and 'Point' helper classes 9 | 10 | Filters out points, that have fields (.fields() option) with values below given thresholds (.values() option) 11 | Only numerical values are considered 12 | 13 | use in dfs: 14 | 15 | @less_than() 16 | .fields( 'field1', 'field2', 'field3') 17 | .values( 14, 3.048, 7) 18 | 19 | """ 20 | 21 | @staticmethod 22 | def options(): 23 | opts = [ 24 | ('fields', 'string_list'), 25 | ('values', 'number_list') 26 | ] 27 | return opts 28 | 29 | def init(self, args=None): 30 | self.fields = list(args['fields']) 31 | self.values = list(args['values']) 32 | 33 | def handle_point(self, point_data): 34 | self.emit(point_data) 35 | 36 | def handle_batch(self, batch_data): 37 | """ 38 | uses the built-in 'filter' function to filter the list of points, utilizing the 'filter_fun' method below 39 | :param batch_data: 40 | :return: 41 | """ 42 | # filter points from the data-batch 43 | filtered_points = filter(self.filter_fun, Batch.points(batch_data)) 44 | # set the filtered list of points back to the batch 45 | Batch.points(batch_data, list(filtered_points)) 46 | # emit the filtered batch 47 | self.emit(batch_data) 48 | 49 | def filter_fun(self, point): 50 | """ 51 | for a single DataPoint iterate over the fields given with options 52 | and compare values to thresholds 53 | :param point: 54 | :return: 55 | """ 56 | for idx, fieldname in enumerate(self.fields): 57 | val = Point.field(point, fieldname) 58 | if val is not None: 59 | if (type(val) == int or type(val) == float) and val >= self.values[idx]: 60 | return False 61 | 62 | return True 63 | 64 | -------------------------------------------------------------------------------- /python/path.py: -------------------------------------------------------------------------------- 1 | from faxe import Faxe, Point, Batch 2 | 3 | 4 | class Path(Faxe): 5 | 6 | @staticmethod 7 | def options(): 8 | opts = [ 9 | ("field", "string") 10 | ] 11 | return opts 12 | 13 | def init(self, args=None): 14 | self.path = args["field"] 15 | 16 | def handle_point(self, point_data): 17 | # val = Point.value(point_data, self.path) 18 | # print(val) 19 | # p = Point.new(Faxe.now()) 20 | # Point.value(p, 'python.value', val) 21 | Point.default(point_data, 'pyth.field2', 'did not have field2') 22 | Point.default(point_data, 'pyth.field22', 'did not have field22') 23 | self.emit(point_data) 24 | 25 | 26 | -------------------------------------------------------------------------------- /python/python_time.py: -------------------------------------------------------------------------------- 1 | from faxe import Faxe, Point, Batch 2 | import sys 3 | 4 | 5 | class Python_time(Faxe): 6 | 7 | # def handle_batch(self, b): 8 | # self.log('handle_batch is implemented ;)') 9 | 10 | def handle_point(self, point_data): 11 | print(sys.version) 12 | 13 | # add the field python.time with the current timestamp 14 | now = Faxe.now() 15 | Point.value(point_data, 'python.time', now) 16 | batch = Batch.new(now) 17 | batch['points'] = [point_data] 18 | self.emit(batch) 19 | 20 | -------------------------------------------------------------------------------- /python_requirements.txt: -------------------------------------------------------------------------------- 1 | # pip install --no-cache-dir -r python_requirements.txt 2 | ###### Requirements without Version Specifiers ###### 3 | psutil 4 | cytoolz 5 | jsonpath-ng 6 | numpy 7 | # pandas 8 | scipy 9 | # paho-mqtt 10 | # requests 11 | # ujson 12 | # opencv-python-headless 13 | # 14 | ###### Requirements with Version Specifiers ###### 15 | # See https://www.python.org/dev/peps/pep-0440/#version-specifiers 16 | # docopt == 0.6.1 # Version Matching. Must be version 0.6.1 17 | # keyring >= 4.1.1 # Minimum version 4.1.1 18 | # coverage != 3.5 # Version Exclusion. Anything except version 3.5 19 | # Mopidy-Dirble ~= 1.1 # Compatible release. Same as >= 1.1, == 1.* 20 | # 21 | ###### Refer to other requirements files ###### 22 | # -r other-requirements.txt 23 | -------------------------------------------------------------------------------- /python_requirements_prod.txt: -------------------------------------------------------------------------------- 1 | # pip install --no-cache-dir -r python_requirements.txt 2 | ###### Requirements without Version Specifiers ###### 3 | psutil 4 | cytoolz 5 | jsonpath-ng >= 1.5.2 6 | numpy 7 | pandas 8 | scipy 9 | paho-mqtt 10 | requests 11 | ujson 12 | opencv-python-headless 13 | azure-storage-blob 14 | datetime 15 | typing 16 | dataclasses 17 | #matplotlib >=3.9 18 | #plotly >=5.22 19 | # 20 | ###### Requirements with Version Specifiers ###### 21 | # See https://www.python.org/dev/peps/pep-0440/#version-specifiers 22 | # docopt == 0.6.1 # Version Matching. Must be version 0.6.1 23 | # keyring >= 4.1.1 # Minimum version 4.1.1 24 | # coverage != 3.5 # Version Exclusion. Anything except version 3.5 25 | # Mopidy-Dirble ~= 1.1 # Compatible release. Same as >= 1.1, == 1.* 26 | # 27 | ###### Refer to other requirements files ###### 28 | # -r other-requirements.txt 29 | -------------------------------------------------------------------------------- /rebar.config.script: -------------------------------------------------------------------------------- 1 | case os:getenv("FAXE_EXTENSIONS") of 2 | false -> CONFIG; % env var not defined 3 | [] -> CONFIG; % env var set to empty string 4 | Path -> 5 | io:format("extension path: ~p~n", [Path]), 6 | {ok, [AddDeps]} = file:consult(Path), 7 | NewDeps = proplists:get_value(deps, CONFIG) ++ AddDeps, 8 | NewCONFIG = lists:keystore(deps, 1, CONFIG, {deps, NewDeps}), 9 | %% next get the additional app names 10 | AddApps = proplists:get_keys(AddDeps), 11 | %% relx section 12 | [{release, Vsn, Apps} | RRest] = Relx = proplists:get_value(relx, NewCONFIG), 13 | NewRelx = [{release, Vsn, Apps++AddApps} | RRest], 14 | lists:keystore(relx, 1, NewCONFIG, {relx, NewRelx}) 15 | end. -------------------------------------------------------------------------------- /renovate.json: -------------------------------------------------------------------------------- 1 | { 2 | "$schema": "https://docs.renovatebot.com/renovate-schema.json", 3 | "ignorePaths": ["Dockerfile_azedge", "Dockerfile.intel-nuc", "Dockerfile.revpi-core-3"], 4 | "reviewers": [ "required:miae@tgw-group.com" ], 5 | "customManagers": [ 6 | { 7 | "description": "Match versions in rebar.config", 8 | "customType": "regex", 9 | "fileMatch": [ 10 | "rebar.config" 11 | ], 12 | "datasourceTemplate": "hex", 13 | "matchStrings": [ 14 | "{(?[^,]+), \"(?\\d+\\.\\d+(\\.\\d+)?)\"" 15 | ], 16 | "versioningTemplate": "semver" 17 | }, 18 | { 19 | "description": "Match versions (per datasource and depName) in Dockerfile", 20 | "customType": "regex", 21 | "fileMatch": [ 22 | "^(Dockerfile.*)$" 23 | ], 24 | "matchStrings": [ 25 | "#\\s*renovate:\\s*(datasource=(?.*?) )?depName=(?.*?)( versioning=(?.*?))?\\s*.*?_VERSION=(?.*)" 26 | ], 27 | "versioningTemplate": "loose" 28 | } 29 | ], 30 | "packageRules": [ 31 | { 32 | "matchUpdateTypes": ["major"], 33 | "enabled": false 34 | }, 35 | { 36 | "matchManagers": ["custom.regex"], 37 | "matchPackageNames": ["erlang/otp"], 38 | "extractVersion": "^OTP-(?.*)$" 39 | } 40 | ] 41 | } -------------------------------------------------------------------------------- /todo: -------------------------------------------------------------------------------- 1 | 2 | 3 | --------------------------------------------------------------------------------