├── scripts ├── query ├── csv_to_json.py ├── generate_lld.py ├── query_to_yaml.py ├── csv_delete_columns.py ├── values_to_macros.py ├── yaml_to_query.py ├── pc_tests_results_clean.csv ├── pc_tests_results.csv └── transform_perfcheck.py ├── ansible ├── playbooks │ ├── grafana_deployment.yaml │ ├── k8s_perfchecks_cronjob.yaml │ ├── k8s_perfchecks.yaml │ ├── zbx_templates.yaml │ ├── grafana_datasource.yaml │ ├── grafana_dashboards.yaml │ ├── zbx_autoregistration.yaml │ ├── zbx_deployment.yaml │ ├── zbx_trigger_action.yaml │ ├── deploying_agents.yaml │ ├── zbx_media.yaml │ ├── zbx_users.yaml │ └── zbx_hosts.yaml ├── files │ ├── eida_nodes.yaml │ └── oculus_users.yaml ├── config │ ├── config_grafana.yaml │ ├── config_zabbix.yaml │ ├── config_prod.yaml │ └── config_staging.yaml └── collections │ └── requirements.yaml ├── eida_consistency ├── Dockerfile └── automate_eida_consistency.py ├── monitoring.md ├── .gitignore ├── .sops.yaml ├── grafana_server ├── helm_values │ ├── values.yaml │ └── production.yaml └── dashboards │ ├── welcome.json │ ├── EIDA_perfcheck_inter-nodes.json │ └── EIDA_nodes_requirements.json ├── perf_checks ├── Dockerfile └── webscenarios_perfcheck.py ├── .github ├── ISSUE_TEMPLATE │ └── new_monitoring.md └── workflows │ └── docker-publish.yml ├── CONTRIBUTING.md ├── zabbix_server ├── templates │ ├── zbx_export_templates_EC.yaml │ ├── zbx_export_templates_discovery.yaml │ └── zbx_export_templates_routing.yaml └── helm_values │ ├── values.yaml │ └── production.yaml ├── triggers.md ├── eida_nodes ├── ign.yaml ├── ingv.yaml ├── icgc.yaml ├── lmu.yaml ├── koeri.yaml ├── bgr.yaml ├── bgs.yaml ├── noa.yaml ├── eposfr.yaml ├── odc.yaml ├── eth.yaml ├── uib-norsar.yaml ├── geofon.yaml └── niep.yaml └── README.md /scripts/query: -------------------------------------------------------------------------------- 1 | 2 | -------------------------------------------------------------------------------- /ansible/playbooks/grafana_deployment.yaml: -------------------------------------------------------------------------------- 1 | --- 2 | - name: Add datasource 3 | import_playbook: grafana_datasource.yaml 4 | 5 | - name: Import dashboards 6 | import_playbook: grafana_dashboards.yaml 7 | -------------------------------------------------------------------------------- /ansible/files/eida_nodes.yaml: -------------------------------------------------------------------------------- 1 | eida_nodes: 2 | - BGR 3 | - BGS 4 | - EPOSFR 5 | - GEOFON 6 | - ICGC 7 | - IGN 8 | - INGV 9 | - KOERI 10 | - LMU 11 | - NIEP 12 | - NOA 13 | - ODC 14 | - ETH 15 | - UIB-NORSAR 16 | -------------------------------------------------------------------------------- /eida_consistency/Dockerfile: -------------------------------------------------------------------------------- 1 | ############################################# 2 | from ghcr.io/eida/eida-consistency:latest 3 | 4 | WORKDIR /app 5 | ADD automate_eida_consistency.py . 6 | RUN automate_eida_consistency.py 7 | -------------------------------------------------------------------------------- /ansible/config/config_grafana.yaml: -------------------------------------------------------------------------------- 1 | - name: Credentials Grafana API 2 | ansible.builtin.set_fact: 3 | grafana_url: "http://localhost:3000" 4 | grafana_user: ansible 5 | grafana_password: "{{ ansible_httpapi_pass }}" 6 | -------------------------------------------------------------------------------- /monitoring.md: -------------------------------------------------------------------------------- 1 | # List of services monitoring 2 | 3 | - availability : `15m` 4 | - Dataselect : `15m` 5 | - present in central eida routing : `60m` 6 | - routing information published at node : `15m` 7 | - station : `15m` 8 | - wfcatalog `15m` -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | .vscode/ 2 | .ansible/ 3 | ansible/files/macros/ 4 | perf_checks/performance_results/ 5 | perf_checks/oculus-monitoring-backend 6 | perf_checks/webscenarios_perfcheck.log 7 | eida_consistency/automate_eida_consistency.log 8 | eida_consistency/reports/* -------------------------------------------------------------------------------- /.sops.yaml: -------------------------------------------------------------------------------- 1 | creation_rules: 2 | - path_regex: \.yaml$ 3 | pgp: "C83ECA1E0B9D719C7FA2470F18E4F5A58801E669" 4 | encrypted_regex: "(password|ansible_httpapi_pass|ansible_zabbix_token|ansible_grafana_token|zabbix_grafana_token|zabbix_grafana_pass|zabbix_user_pass|adminPassword)" 5 | mac_only_encrypted: true 6 | -------------------------------------------------------------------------------- /ansible/config/config_zabbix.yaml: -------------------------------------------------------------------------------- 1 | - name: Credentials Zabbix API 2 | ansible.builtin.set_fact: 3 | ansible_user: ansible 4 | ansible_httpapi_pass: "{{ ansible_httpapi_pass }}" 5 | 6 | - name: Set API token 7 | ansible.builtin.set_fact: 8 | ansible_zabbix_token: "{{ ansible_zabbix_token }}" 9 | -------------------------------------------------------------------------------- /ansible/collections/requirements.yaml: -------------------------------------------------------------------------------- 1 | # --- 2 | # collections: 3 | # - name: community.zabbix 4 | # version: 3.3.0 5 | # - name: ansible.posix 6 | # version: 1.3.0 7 | # - name: community.general 8 | # version: 3.7.0 9 | # - name: kubernetes.core 10 | # version: "5.3.0" 11 | # - name: community.kubernetes 12 | # version: "2.0.1" 13 | -------------------------------------------------------------------------------- /grafana_server/helm_values/values.yaml: -------------------------------------------------------------------------------- 1 | adminUser: admin 2 | ingress: 3 | enabled: true 4 | hosts: 5 | - grafana.local 6 | grafana.ini: 7 | auth.anonymous: 8 | enabled: true 9 | org_name: Main Org. 10 | org_role: Viewer 11 | plugins: 12 | enabled: true 13 | plugins: 14 | - grafana-piechart-panel 15 | - grafana-worldmap-panel 16 | - alexanderzobnin-zabbix-app 17 | -------------------------------------------------------------------------------- /grafana_server/helm_values/production.yaml: -------------------------------------------------------------------------------- 1 | adminUser: admin 2 | ingress: 3 | enabled: true 4 | hosts: 5 | - eida-oculus.orfeus-eu.org 6 | grafana.ini: 7 | auth.anonymous: 8 | enabled: true 9 | org_name: Main Org. 10 | org_role: Viewer 11 | plugins: 12 | enabled: true 13 | # server: 14 | # root_url: eida-oculus.orfeus-eu.org 15 | plugins: 16 | - grafana-piechart-panel 17 | - grafana-worldmap-panel 18 | - alexanderzobnin-zabbix-app 19 | -------------------------------------------------------------------------------- /perf_checks/Dockerfile: -------------------------------------------------------------------------------- 1 | ############################################# 2 | # Install uv 3 | FROM python:3.12-slim 4 | 5 | RUN apt-get update \ 6 | && apt-get install -y --no-install-recommends git\ 7 | && rm -rf /var/lib/apt/lists/* /usr/share/doc /usr/share/man \ 8 | && apt-get clean 9 | 10 | COPY --from=ghcr.io/astral-sh/uv:latest /uv /uvx /bin/ 11 | # Change the working directory to the `app` directory 12 | WORKDIR /app 13 | COPY webscenarios_perfcheck.py . 14 | RUN uv sync --script webscenarios_perfcheck.py 15 | CMD ["uv", "run", "--script", "webscenarios_perfcheck.py"] 16 | -------------------------------------------------------------------------------- /ansible/playbooks/k8s_perfchecks_cronjob.yaml: -------------------------------------------------------------------------------- 1 | apiVersion: batch/v1 2 | kind: CronJob 3 | metadata: 4 | name: oculus-perfchecks 5 | namespace: eida-monitoring 6 | spec: 7 | suspend: false 8 | schedule: "0 0,12 * * *" # everyday at 14h10 9 | jobTemplate: 10 | spec: 11 | template: 12 | spec: 13 | restartPolicy: OnFailure 14 | containers: 15 | - name: oculus-perfchecks 16 | image: ghcr.io/eida/oculus-perfchecks:main 17 | imagePullPolicy: Always 18 | env: 19 | - name: ZABBIX_SERVER 20 | value: oculus-zabbix-zabbix-server 21 | -------------------------------------------------------------------------------- /ansible/playbooks/k8s_perfchecks.yaml: -------------------------------------------------------------------------------- 1 | --- 2 | - name: Deploy tests perfchecks python scrips on k8s cluster 3 | kubernetes.core.k8s: 4 | state: present 5 | namespace: eida-monitoring 6 | definition: 7 | apiVersion: batch/v1 8 | kind: CronJob 9 | metadata: 10 | name: oculus-perfchecks 11 | spec: 12 | suspend: false 13 | schedule: "0 13 * * *" # everyday at 1pm 14 | jobTemplate: 15 | spec: 16 | template: 17 | spec: 18 | restartPolicy: OnFailure 19 | containers: 20 | - name: oculus-perfchecks 21 | image: ghcr.io/eida/oculus-perfchecks 22 | imagePullPolicy: Always 23 | -------------------------------------------------------------------------------- /.github/ISSUE_TEMPLATE/new_monitoring.md: -------------------------------------------------------------------------------- 1 | --- 2 | name: New monitoring 3 | about: Ask for something new to monitor 4 | title: "[NewMonitoring] Title of your issue" 5 | labels: 'new monitoring' 6 | assignees: jschaeff 7 | 8 | --- 9 | 10 | Note that new monitoring requests will be considered by their importance for EIDA and the cost of implementation. 11 | The product owner will evaluate the request with you. 12 | 13 | ## User story 14 | A clear and concise description of the context of 15 | 16 | As a , I want to be notified when , in order to . 17 | 18 | ## Details of the monitoring 19 | 20 | - What should be tested ? 21 | - What is the expected output of the test ? 22 | - What is the definition of a problem ? 23 | - What is the definition of the resolution of the problem ? 24 | 25 | Add any other context. 26 | -------------------------------------------------------------------------------- /ansible/playbooks/zbx_templates.yaml: -------------------------------------------------------------------------------- 1 | --- 2 | - name: Install Zabbix plugins (EIDA) 3 | hosts: localhost 4 | gather_facts: false 5 | vars_files: 6 | - "../config/config_{{ hostvars['localhost']['selected_environment'] | default('staging') }}.yaml" 7 | vars: 8 | ansible_network_os: community.zabbix.zabbix 9 | ansible_connection: httpapi 10 | ansible_httpapi_port: 8888 11 | ansible_httpapi_use_ssl: false 12 | ansible_httpapi_validate_certs: false 13 | ansible_zabbix_url_path: "" 14 | ansible_host: localhost 15 | 16 | tasks: 17 | - name: Import Zabbix Credentials 18 | ansible.builtin.import_tasks: ../config/config_zabbix.yaml 19 | 20 | - name: Import Zabbix templates 21 | delegate_to: localhost 22 | community.zabbix.zabbix_template: 23 | template_yaml: "{{ lookup('file', '../../zabbix_server/templates/zbx_export_templates.yaml') }}" 24 | state: present 25 | -------------------------------------------------------------------------------- /ansible/playbooks/grafana_datasource.yaml: -------------------------------------------------------------------------------- 1 | --- 2 | - name: Configure datasource in Grafana (Zabbix) 3 | hosts: localhost 4 | gather_facts: false 5 | vars_files: 6 | - ../config/config_prod.yaml 7 | - ../config/config_grafana.yaml 8 | vars: 9 | ansible_network_os: community.grafana.grafana 10 | 11 | tasks: 12 | - name: Import Grafana Credentials 13 | ansible.builtin.import_tasks: ../config/config_grafana.yaml 14 | 15 | - name: Configue datasource (Zabbix) 16 | community.grafana.grafana_datasource: 17 | grafana_url: "http://localhost:3000" 18 | grafana_user: ansible 19 | grafana_password: "{{ ansible_httpapi_pass }}" 20 | name: oculus-zabbix-datasource 21 | ds_type: "alexanderzobnin-zabbix-datasource" 22 | ds_url: "http://oculus-zabbix-zabbix-web:8888/api_jsonrpc.php" 23 | is_default: true 24 | zabbix_user: grafana 25 | zabbix_password: "{{ zabbix_grafana_pass }}" 26 | trends: true 27 | state: present 28 | -------------------------------------------------------------------------------- /ansible/playbooks/grafana_dashboards.yaml: -------------------------------------------------------------------------------- 1 | --- 2 | - name: Import dashboard Grafana 3 | hosts: localhost 4 | gather_facts: false 5 | vars_files: 6 | - ../config/config_prod.yaml 7 | - ../config/config_grafana.yaml 8 | vars: 9 | ansible_network_os: community.grafana.grafana 10 | 11 | tasks: 12 | - name: Import Grafana Credentials 13 | ansible.builtin.import_tasks: ../config/config_grafana.yaml 14 | 15 | - name: Import dashboard 16 | community.grafana.grafana_dashboard: 17 | grafana_url: "http://localhost:3000" 18 | grafana_user: ansible 19 | grafana_password: "{{ ansible_httpapi_pass }}" 20 | overwrite: false 21 | validate_certs: false 22 | use_proxy: false 23 | commit_message: "Import EIDA dashboards" 24 | path: "{{ dashboards_files }}" 25 | state: present 26 | loop: "{{ lookup('fileglob', '../../grafana_server/dashboards/*.json', wantlist=True) }}" 27 | loop_control: 28 | loop_var: dashboards_files 29 | -------------------------------------------------------------------------------- /CONTRIBUTING.md: -------------------------------------------------------------------------------- 1 | # Contribute to change monitoring EIDA Nodes values 2 | 3 | To modify the parameters of the checks made by Oculus, you must: 4 | 1. create a new branch 5 | 2. modify the .yaml file of your Node (present [here](eida_nodes/)) You can modifie everything in the GitHub web interface 6 | 3. Create a new [Pull Requests](https://github.com/EIDA/oculus-monitoring-backend/pulls) 7 | 8 | The `OnlineCheck` section is used to perform the main zabbix tests, while `perfCheck` is for performance tests, do not chenge this. 9 | 10 | 11 | ## OnlineCheck 12 | Same parameter values is used for all the webservices. 13 | - net: is network 14 | - sta: is station 15 | - loc: is location 16 | - cha: is channel 17 | - start: is start date 18 | - end: is end date 19 | 20 | ## PerfCheck 21 | Change require validation with the administator. 22 | Each performance check as organise by services and each services as a list of scenarios with their parameters 23 | 24 | ## Contact for alerts 25 | Update email contact [here](ansible/files/oculus_users.yaml) 26 | -------------------------------------------------------------------------------- /zabbix_server/templates/zbx_export_templates_EC.yaml: -------------------------------------------------------------------------------- 1 | zabbix_export: 2 | version: '7.0' 3 | template_groups: 4 | - uuid: 44950db14d8549c2a022ab3d5ba3ff96 5 | name: Templates/EIDA 6 | templates: 7 | - uuid: a2974168aa6d40d0ba1bd70dbae9f629 8 | template: 'Template eida consistency' 9 | name: 'Template eida consistency' 10 | groups: 11 | - name: Templates/EIDA 12 | items: 13 | - uuid: c3c7b983bee243daa52b97ae753f4dbe 14 | name: 'Report json' 15 | type: TRAP 16 | key: report.json 17 | delay: '0' 18 | value_type: TEXT 19 | trends: '0' 20 | description: 'entier report json of eida consistency test' 21 | tags: 22 | - tag: format 23 | value: json 24 | - uuid: 233416c295964e57a6b2a9fa7fedde54 25 | name: 'Score global' 26 | type: TRAP 27 | key: score.eida_consistency 28 | delay: '0' 29 | value_type: TEXT 30 | trends: '0' 31 | tags: 32 | - tag: consistency 33 | - tag: score 34 | -------------------------------------------------------------------------------- /ansible/playbooks/zbx_autoregistration.yaml: -------------------------------------------------------------------------------- 1 | --- 2 | - name: Configuration Zabbix autoregistration 3 | hosts: localhost 4 | gather_facts: false 5 | vars_files: 6 | - ../config/config_staging.yaml 7 | vars: 8 | ansible_network_os: community.zabbix.zabbix 9 | ansible_connection: httpapi 10 | ansible_httpapi_port: 8888 11 | ansible_httpapi_use_ssl: false 12 | ansible_httpapi_validate_certs: false 13 | ansible_zabbix_url_path: "" 14 | ansible_host: localhost 15 | 16 | tasks: 17 | - name: Import Zabbix Credentials 18 | ansible.builtin.import_tasks: ../config/config_zabbix.yaml 19 | 20 | - name: Configure autoregistration 21 | delegate_to: localhost 22 | community.zabbix.zabbix_action: 23 | name: "EIDA nodes autoregistration" 24 | event_source: auto_registration 25 | status: enabled 26 | esc_period: 60 27 | operations: 28 | - type: add_host 29 | - type: add_to_host_group 30 | host_groups: 31 | - "Discovered hosts" 32 | - type: link_to_template 33 | templates: 34 | - "Template discovery" 35 | - "Linux by Zabbix agent" 36 | - type: enable_host 37 | -------------------------------------------------------------------------------- /ansible/playbooks/zbx_deployment.yaml: -------------------------------------------------------------------------------- 1 | --- 2 | - name: Select environment 3 | hosts: localhost 4 | tasks: 5 | - name: Prompt for environment selection 6 | ansible.builtin.pause: 7 | prompt: "Select environment (prod/staging/)" 8 | register: environment_input 9 | 10 | - name: Validate environment input 11 | ansible.builtin.fail: 12 | msg: "Invalid environment. Please choose 'prod' or 'staging'" 13 | when: environment_input.user_input not in ['prod', 'staging'] 14 | 15 | - name: Set config file path 16 | ansible.builtin.set_fact: 17 | config_file: "../config/config_{{ environment_input.user_input }}.yaml" 18 | 19 | - name: Store environment for other playbooks 20 | ansible.builtin.set_fact: 21 | selected_environment: "{{ environment_input.user_input }}" 22 | cacheable: True 23 | 24 | - name: Import templates 25 | import_playbook: zbx_templates.yaml 26 | 27 | - name: Deploying hosts and hosts groups 28 | import_playbook: zbx_hosts.yaml 29 | 30 | - name: Activate Media type 31 | import_playbook: zbx_media.yaml 32 | 33 | - name: Create users 34 | import_playbook: zbx_users.yaml 35 | 36 | - name: Configuration Zabbix Triggers 37 | import_playbook: zbx_trigger_action.yaml 38 | -------------------------------------------------------------------------------- /ansible/playbooks/zbx_trigger_action.yaml: -------------------------------------------------------------------------------- 1 | --- 2 | - name: Configuration Zabbix Trigger action 3 | hosts: localhost 4 | gather_facts: false 5 | vars_files: 6 | - "../config/config_{{ hostvars['localhost']['selected_environment'] | default('staging') }}.yaml" 7 | - ../files/eida_nodes.yaml 8 | vars: 9 | ansible_network_os: community.zabbix.zabbix 10 | ansible_connection: httpapi 11 | ansible_httpapi_port: 8888 12 | ansible_httpapi_use_ssl: false 13 | ansible_httpapi_validate_certs: false 14 | ansible_zabbix_url_path: "" 15 | ansible_host: localhost 16 | 17 | tasks: 18 | - name: Import Zabbix Credentials 19 | ansible.builtin.import_tasks: ../config/config_zabbix.yaml 20 | 21 | - name: Configure Trigger action 22 | community.zabbix.zabbix_action: 23 | name: "Reports EIDA problems" 24 | event_source: trigger 25 | state: present 26 | status: disabled 27 | esc_period: 1h 28 | eval_type: andor 29 | conditions: 30 | - type: host_template 31 | operator: equals 32 | value: "Template webservices" 33 | operations: 34 | - type: send_message 35 | send_to_groups: "{{ eida_nodes }}" 36 | media_type: "Email (HTML) EIDA" 37 | recovery_operations: 38 | - type: send_message 39 | send_to_groups: "{{ eida_nodes }}" 40 | media_type: "Email (HTML) EIDA" 41 | update_operations: 42 | - type: send_message 43 | send_to_groups: "{{ eida_nodes }}" 44 | media_type: "Email (HTML) EIDA" 45 | pause_symptoms: true 46 | notify_if_canceled: true 47 | -------------------------------------------------------------------------------- /scripts/csv_to_json.py: -------------------------------------------------------------------------------- 1 | import csv 2 | import json 3 | import argparse 4 | from pathlib import Path 5 | 6 | def csv_to_json(csv_file_path, json_file_path=None, encoding='utf-8'): 7 | """ 8 | converts a csv file to json format 9 | 10 | args: 11 | csv_file_path (str): path to the csv file 12 | json_file_path (str, optional): path to the output json file 13 | encoding (str): file encoding (default 'utf-8') 14 | """ 15 | 16 | # if no output file is specified, use the same name with .json extension 17 | if json_file_path is None: 18 | csv_path = Path(csv_file_path) 19 | json_file_path = csv_path.with_suffix('.json') 20 | 21 | data = [] 22 | 23 | try: 24 | with open(csv_file_path, 'r', encoding=encoding) as csv_file: 25 | csv_reader = csv.DictReader(csv_file) 26 | 27 | # convert each row to dictionary 28 | for row in csv_reader: 29 | data.append(row) 30 | 31 | # write data to json file 32 | with open(json_file_path, 'w', encoding=encoding) as json_file: 33 | json.dump(data, json_file, indent=2, ensure_ascii=False) 34 | 35 | print(f"conversion successful: {csv_file_path} -> {json_file_path}") 36 | print(f"{len(data)} rows converted") 37 | 38 | except FileNotFoundError: 39 | print(f"error: file {csv_file_path} does not exist") 40 | except Exception as e: 41 | print(f"error during conversion: {str(e)}") 42 | 43 | def main(): 44 | parser = argparse.ArgumentParser(description='converts a csv file to json') 45 | parser.add_argument('csv_file', help='path to the csv file to convert') 46 | parser.add_argument('-o', '--output', help='path to the output json file') 47 | parser.add_argument('-e', '--encoding', default='utf-8', help='file encoding (default: utf-8)') 48 | 49 | args = parser.parse_args() 50 | 51 | csv_to_json(args.csv_file, args.output, args.encoding) 52 | 53 | if __name__ == "__main__": 54 | main() -------------------------------------------------------------------------------- /triggers.md: -------------------------------------------------------------------------------- 1 | # Zabbix triggers description 2 | #### Zabbix have 6 levels of severity : 3 | - Not classified 4 | - Information 5 | - Warning 6 | - Average 7 | - High 8 | - Disaster 9 | 10 | # Zabbix triggers definition 11 | ## Webservices 12 | ### Availability, Dataselect, Station, WFCatalog 13 | ### | Step 1 : get documentation 14 | 15 | `Average` : response http code **204** *(no content)*, require http code **200** *(OK)* 16 | 17 | `Average` : response http code **403** *(forbidden)* 18 | 19 | `Average` : response http code **404** *(not found)* 20 | 21 | `High` : service is **down** or **not responding** since **15 min** 22 | 23 | `Disaster` : service is **down** or **not responding** since **1 hour** 24 | 25 | ### | Step 2 : simple request 26 | 27 | `Average` : response http code **204** *(no content)*, require http code **200** *(OK)* 28 | 29 | `Average` : response http code **403** *(forbidden)* 30 | 31 | `Average` : response http code **404** *(not found)* 32 | 33 | ### | Step 3 : application.wadl 34 | 35 | `High` : content **not found** "content-type: application/mxl" on **https://{#EIDA_WS_ENDPOINT}/fdsnws/availability/1/application.wadl** 36 | 37 | ### Present in central eida routing 38 | #### | Step 1 : EIDA routing information 39 | 40 | `Warning` : not present in query 41 | 42 | `Average` : response http code **204** *(no content)*, require http code **200** *(OK)* 43 | 44 | `Average` : response http code **403** *(forbidden)* 45 | 46 | `Average` : response http code **404** *(not found)* 47 | 48 | ### Routing information published at node 49 | #### | Step 1 : EIDA routing XML file 50 | `Average` : route network **{#EIDA_NETWORK}** *(not found)* on **https://{#EIDA_WS_ENDPOINT}/fdsnws/routing/1/query?service=dataselect&network={#EIDA_NETWORK}** 51 | 52 | ## Certificats 53 | 54 | `Information` : Fingerprint has **changed** 55 | 56 | `Warning` : SSL certificate expires in **1 day** 57 | 58 | `Average` : SSL certificate expires in **2 week** 59 | 60 | `High` : SSL certificate is **invalid** 61 | 62 | `Disaster` : SSL certificate has **expired** 63 | 64 | ## Other TODO -------------------------------------------------------------------------------- /ansible/playbooks/deploying_agents.yaml: -------------------------------------------------------------------------------- 1 | --- 2 | - name: Deploy Zabbix agent in k8s whith Helm 3 | hosts: localhost 4 | gather_facts: false 5 | vars_files: 6 | - ../config/config_staging.yaml 7 | vars: 8 | k8s_namespace: eida-monitoring 9 | chart_dir: ../../oculus-zbx-agent 10 | deployments_dir: ../../oculus-zbx-agent-deployments 11 | ansible_python_interpreter: /usr/bin/python 12 | 13 | tasks: 14 | - name: Deploy all Zabbix agent 15 | ansible.builtin.shell: | 16 | for f in $(find {{ deployments_dir }} -type f); do 17 | name=$(basename $f|cut -f1 -d'.') 18 | echo $name 19 | echo $f 20 | helm upgrade -i $name {{ chart_dir }} --set-file zbx_lld=$f -n {{ k8s_namespace }} 21 | done 22 | args: 23 | chdir: "{{ playbook_dir }}" 24 | 25 | # - name: List nodes 26 | # ansible.builtin.find: 27 | # paths: "{{ deployments_dir }}" 28 | # file_type: file 29 | # register: lld_files 30 | 31 | # - name: Show nodes 32 | # ansible.builtin.debug: 33 | # msg: "{{ item | basename | splitext | first }}" 34 | # loop: "{{ lld_files.files | map(attribute='path') | list }}" 35 | 36 | # - name: Read LLD file content 37 | # ansible.builtin.slurp: 38 | # src: "{{ lld_file }}" 39 | # register: lld_content 40 | # loop: "{{ lld_files.files | map(attribute='path') | list }}" 41 | # loop_control: 42 | # loop_var: lld_file 43 | 44 | # - name: Create k8s namespace 45 | # kubernetes.core.k8s: 46 | # api_version: v1 47 | # kind: Namespace 48 | # name: "{{ k8s_namespace }}" 49 | # state: prese00nt 50 | 51 | # - name: Deploy k8s Zabbix agents 52 | # kubernetes.core.helm: 53 | # name: "{{ item.0 | basename | splitext | first }}" 54 | # chart_ref: "{{ chart_dir }}" 55 | # namespace: "{{ k8s_namespace }}" 56 | # values: 57 | # zbx_lld: "{{ item.1.content | b64decode }}" 58 | # wait: true 59 | # state: present 60 | # with_together: 61 | # - "{{ lld_files.files | map(attribute='path') | list }}" 62 | # - "{{ lld_content.results }}" 63 | -------------------------------------------------------------------------------- /scripts/generate_lld.py: -------------------------------------------------------------------------------- 1 | #! /bin/env -S uv run 2 | # /// script 3 | # requires-python = ">=3.12" 4 | # dependencies = [ 5 | # "pyyaml", 6 | # ] 7 | # /// 8 | import yaml 9 | import json 10 | import sys 11 | import datetime 12 | 13 | class DateTimeEncoder(json.JSONEncoder): 14 | def default(self, obj): 15 | if isinstance(obj, datetime.datetime): 16 | return obj.isoformat() 17 | return json.JSONEncoder.default(self, obj) 18 | 19 | def flatten_yaml(data, parent_key='', sep='_'): 20 | """ 21 | Recursively flattens a nested dictionayr, prefixing keys with a parent key and convetring them to uppercase 22 | 23 | Args : 24 | data (dict ) : The dictionary to flatten 25 | parent_key (str): The base key to prefix 26 | sep (str) : The separator between keys 27 | 28 | Returns: 29 | dict : A flattened dictionary with prefixe keys 30 | """ 31 | items = [] 32 | for key, value in data.items(): 33 | new_key = f"{parent_key}{sep}{key}".upper() if parent_key else key.upper() 34 | if isinstance(value, dict): 35 | items.extend(flatten_yaml(value, new_key, sep=sep).items()) 36 | else: 37 | items.append((new_key, value)) 38 | return dict(items) 39 | 40 | def generate_lld(yaml_file): 41 | """ 42 | Generates a LLD JSON output from a YAML file struture and print in the console 43 | 44 | Args: 45 | yaml_file (str): The path to the input YAML fille 46 | """ 47 | # Load the YAML file 48 | with open(yaml_file, 'r') as yf: 49 | data = yaml.load(yf,Loader=yaml.BaseLoader) 50 | 51 | # Flatten the YAML data 52 | flattened_data = flatten_yaml(data) 53 | 54 | # Create the LLD data with the desired format 55 | lld_data = {f"{{#{key}}}": f"{value}" for key, value in flattened_data.items()} 56 | 57 | # Print the LLD data as JSON to the console 58 | print(json.dumps([lld_data], indent=2, cls=DateTimeEncoder)) 59 | 60 | if __name__ == "__main__": 61 | if len(sys.argv) != 2: 62 | print("Usage: python generate_lld.py ") 63 | sys.exit(1) 64 | 65 | input_yaml_file = sys.argv[1] 66 | 67 | generate_lld(input_yaml_file) -------------------------------------------------------------------------------- /eida_nodes/ign.yaml: -------------------------------------------------------------------------------- 1 | --- 2 | node: IGN 3 | endpoint: fdsnws.sismologia.ign.es 4 | routingFile: 5 | onlineCheck: 6 | net: ES 7 | sta: EXVI3 8 | loc: -- 9 | cha: HNZ 10 | start: "2025-06-25T23:00:00" 11 | end: "2025-06-25T23:00:10" 12 | requiremets: 13 | network: { biggest_network_code } 14 | perfCheck: 15 | - webservice: dataselect 16 | scenario: 9streams 17 | net: 18 | sta: 19 | loc: 20 | cha: 21 | start: 22 | end: 23 | - webservice: dataselect 24 | scenario: 54streams 25 | net: 26 | sta: 27 | loc: 28 | cha: 29 | start: 30 | end: 31 | - webservice: dataselect 32 | scenario: 320streams 33 | net: 34 | sta: 35 | loc: 36 | cha: 37 | start: 38 | end: 39 | - webservice: dataselect 40 | scenario: 1stream15days 41 | net: 42 | sta: 43 | loc: 44 | cha: 45 | start: 46 | end: 47 | - webservice: wfcatalog 48 | scenario: 9streams 49 | net: 50 | sta: 51 | loc: 52 | cha: 53 | start: 54 | end: 55 | - webservice: wfcatalog 56 | scenario: 54streams 57 | net: 58 | sta: 59 | loc: 60 | cha: 61 | start: 62 | end: 63 | - webservice: wfcatalog 64 | scenario: 320streams 65 | net: 66 | sta: 67 | loc: 68 | cha: 69 | start: 70 | end: 71 | - webservice: wfcatalog 72 | scenario: 1stream15days 73 | net: 74 | sta: 75 | loc: 76 | cha: 77 | start: 78 | end: 79 | - webservice: availability 80 | scenario: 9streams 81 | net: 82 | sta: 83 | loc: 84 | cha: 85 | start: 86 | end: 87 | - webservice: availability 88 | scenario: 54streams 89 | net: 90 | sta: 91 | loc: 92 | cha: 93 | star: 94 | end: 95 | - webservice: availability 96 | scenario: 320streams 97 | net: 98 | sta: 99 | loc: 100 | cha: 101 | start: 102 | end: 103 | - webservice: availability 104 | scenario: 1stream15days 105 | net: 106 | sta: 107 | loc: 108 | cha: 109 | start: 110 | end: 111 | - webservice: station 112 | scenario: 9streams 113 | net: 114 | sta: 115 | loc: 116 | cha: 117 | start: 118 | end: 119 | - webservice: station 120 | scenario: 54streams 121 | net: 122 | sta: 123 | loc: 124 | cha: 125 | start: 126 | end: 127 | - webservice: station 128 | scenario: 320streams 129 | net: 130 | sta: 131 | loc: 132 | cha: 133 | start: 134 | end: 135 | -------------------------------------------------------------------------------- /zabbix_server/helm_values/values.yaml: -------------------------------------------------------------------------------- 1 | zabbixImageTag: alpine-7.0.18 2 | zabbixProxy: 3 | enabled: false 4 | # Désactive la création d'un container postgresql sur k8s 5 | postgresql: 6 | enabled: false 7 | postgresAccess: 8 | # Désactive l'utilisation des secrets Kubernetes 9 | useUnifiedSecret: false 10 | # Supprime les référence à un secret 11 | unifiedSecretName: "" 12 | unifiedSecretAutoCreate: false 13 | host: resif-pgpreprod.u-ga.fr 14 | port: 5432 15 | user: oculus 16 | password: ENC[AES256_GCM,data:DY1m0yZrqbH/p3he8MXySwGM5TM=,iv:UtDOuoLOHEI7+1JazP9IMj1Wfi6a2fNSn3iWlHcdfe8=,tag:4+oOhZcyZ7oKRckGG1puTw==,type:str] 17 | database: oculus_zabbix 18 | zabbixServer: 19 | enabled: true 20 | zabbixServerHA: 21 | enable: false 22 | service: 23 | type: ClusterIP 24 | port: 10051 25 | zabbixWeb: 26 | enabled: true 27 | ZBX_SERVER_HOST: zbx-server 28 | ZBX_SERVER_PORT: 10051 29 | service: 30 | type: ClusterIP 31 | port: 8888 32 | ingress: 33 | enabled: true 34 | hosts: 35 | - host: zabbix.local 36 | paths: 37 | - path: / 38 | pathType: Prefix 39 | ingressClassName: nginx 40 | pathType: Prefix 41 | # fullnameOverride: oculus 42 | sops: 43 | kms: [] 44 | gcp_kms: [] 45 | azure_kv: [] 46 | hc_vault: [] 47 | age: [] 48 | lastmodified: "2025-02-14T10:57:10Z" 49 | mac: ENC[AES256_GCM,data:jG7OpoSJelnPs9eHzXMJJJ6er45wm1gVnHBDvptFK/w0ZPNB7FIHNqqhl0/y9KKLmvAuhdP1zARn6fsjZ4MbPXNP3r5KNROtobuqy8cyRqfdS/e/byVF5xTdbWomwXkl+mTsxjt8cte8VXQYUvAe2tpM27+VyvubGwU2VoVC9uE=,iv:lrK+uj3KXLDp38asEBkMXFU71rRylhzCWb6/oBVg8L8=,tag:2PCoa7KVQ2+cJ2SZvV3d9A==,type:str] 50 | pgp: 51 | - created_at: "2025-02-14T10:57:10Z" 52 | enc: |- 53 | -----BEGIN PGP MESSAGE----- 54 | 55 | hQEMA23uPS/3Y9M2AQgAzB1X9f9N2YSwP1BvkWG6795v6PZMD+N1ZzhHGamBT2Tw 56 | HEBHe9Y/L2wDluNX524iePhrlGSVuidWxGh6kPlyWfRcSs4VLlyTbdo5AIqY6iSd 57 | yEGQcF5eg6G46pLDE93y6ul9f3Ki+Ph1kaJ9SADMm1haE/R5Z0BizyipFymUuToK 58 | HXcyDK0f6a6PPWwFZ6iY9nBaHEfEt+C+No1cJaYmJlq3/koiHnJ7lx5L4Mmi7N1M 59 | eCrX9l0nfMNAuM8oQ62/gYN/tNqk9G1CG0R3LajNAw9ysWH2D0xiABWvCAUv81VU 60 | 6wdHkEOrpzDbhlecOsQ2BwfyPSwZM2lbIWmWuajxn9JeAXOMgxc8wxOH4uSnRToy 61 | yioGUTROHhdTxjV72zFK61bbL9G5KjcGq4HwXzAGLKm0/5pUJP6tHWCBHLb9oZjs 62 | QAUGrZx3DP/UYmwzX+AF8M8cMAuVHUX+AI2nXkrQvg== 63 | =kwwW 64 | -----END PGP MESSAGE----- 65 | fp: C83ECA1E0B9D719C7FA2470F18E4F5A58801E669 66 | encrypted_regex: (password) 67 | mac_only_encrypted: true 68 | version: 3.9.1 69 | -------------------------------------------------------------------------------- /zabbix_server/helm_values/production.yaml: -------------------------------------------------------------------------------- 1 | zabbixImageTag: alpine-7.0.18 2 | zabbixProxy: 3 | enabled: false 4 | # Désactive la création d'un container postgresql sur k8s 5 | postgresql: 6 | enabled: false 7 | postgresAccess: 8 | # Désactive l'utilisation des secrets Kubernetes 9 | useUnifiedSecret: false 10 | # Supprime les référence à un secret 11 | unifiedSecretName: "" 12 | unifiedSecretAutoCreate: false 13 | host: resif-pgprod.u-ga.fr 14 | port: "5432" 15 | user: oculus 16 | password: ENC[AES256_GCM,data:2aN4Ymwmzf8MU16hOhI3iiZoNlM=,iv:N4YUfesaC/rB2RvMVdumP7EwIoh1bazScQQyO5cnZZM=,tag:kKPrVZj0KUO2C2qCXV3ixA==,type:str] 17 | database: oculus_zabbix 18 | zabbixServer: 19 | enabled: true 20 | zabbixServerHA: 21 | enable: false 22 | service: 23 | type: ClusterIP 24 | port: 10051 25 | zabbixWeb: 26 | enabled: true 27 | ZBX_SERVER_HOST: zabbix-server 28 | ZBX_SERVER_PORT: 10051 29 | service: 30 | type: ClusterIP 31 | port: 8888 32 | ingress: 33 | annotations: 34 | nginx.ingress.kubernetes.io/rewrite-target: /$1 35 | enabled: true 36 | hosts: 37 | - host: ws.resif.fr 38 | paths: 39 | - path: /oculus-zbx/(.*) 40 | pathType: ImplementationSpecific 41 | ingressClassName: nginx 42 | pathType: Prefix 43 | # fullnameOverride: oculus 44 | sops: 45 | kms: [] 46 | gcp_kms: [] 47 | azure_kv: [] 48 | hc_vault: [] 49 | age: [] 50 | lastmodified: "2025-02-14T11:07:42Z" 51 | mac: ENC[AES256_GCM,data:fA++13nUzTeXfOPrjaIDi0WQDJZ5Lwb1+FXauimUc2o4a1i8Yly2HOmx7jM4iERRcP4UP/L3EhB457Fhl3IhqmyszHW5jn5im78BB5aG2/FeZL5/0R+Skefs6F8nIA2dDTB56Akfsf+VlPAIDDUSkhX2OqGOW8YB6/Tf8pLwAnQ=,iv:XEFbRXSFMOIyhPyF5BwRN4RZeU/pLXydjilLsvA5278=,tag:KF0uVNqqsjgTQBLLagn2gw==,type:str] 52 | pgp: 53 | - created_at: "2025-02-14T11:07:42Z" 54 | enc: |- 55 | -----BEGIN PGP MESSAGE----- 56 | 57 | hQEMA23uPS/3Y9M2AQf/SkfgSnnzD8aZ5ahoasog5EXjdiEuLvPyZlzARDmgcBIL 58 | 1GNWXicYP9Bvg8pjLBeKnuyupbDhAThhbb178rz6v3+UgtOqIqcO6XuIBk2LSGIU 59 | TTahA3vf7nB2qiurIdeeFjtnppDy3dZvmx2ebvjQE6Zldp/+UCz8vA9uk/nDzH9y 60 | ZI5UQ60PvIeNSc1ImWlN8SE7/fDjyx9qzgm4mZ+Cjc0Atgl2QPWkNJZqEH8pDLR3 61 | 6YpIajlpO9sJWoco19CzBYMer+bwysWFOQUuTcUUNRRgCFajNcR6qzKvWuljJ4KR 62 | tl+LhcDdcm/oK77CSZwNzaJKtGYiKD1RogSstar0UdJeATJ0LIWUIjLGDmpD1UgL 63 | LNm38YVKdyxL33ZdvgZ+g4jQv6j7e8g0yaaV/rER5IT67KTJMPRdFVLELz5l6OfB 64 | YDgXRhpKYlYHuHMqw2iy9+KX1r0AnygHEoohayTyuA== 65 | =FaIX 66 | -----END PGP MESSAGE----- 67 | fp: C83ECA1E0B9D719C7FA2470F18E4F5A58801E669 68 | encrypted_regex: (password) 69 | mac_only_encrypted: true 70 | version: 3.9.1 71 | -------------------------------------------------------------------------------- /ansible/config/config_prod.yaml: -------------------------------------------------------------------------------- 1 | ansible_httpapi_pass: ENC[AES256_GCM,data:l7g/Ju5akNfoy+gH,iv:2mNDY7lyD4zjfNcC64P+IjhiCzhj2tjU/85nq+paefE=,tag:+eeOpnnoLlNvLzSX9p/buA==,type:str] 2 | ansible_zabbix_token: ENC[AES256_GCM,data:6UD3S2oLxYTAXdshL1p+lD6FOUtsDFiSWqmSKXrhv1xPhb0s+GTUb1wglCnQsWePzhHID6Czg+A5WndgNy6aGA==,iv:3ZtpTPciBLo9gV8NFEYQtprrl/Jk2XsdKFAG7O62rb4=,tag:t4reVAT7ujOGuQTShpX0kw==,type:str] 3 | ansible_grafana_token: ENC[AES256_GCM,data:JLmwXqo3J/6dY1DDgu7nAecP9QEpVxbYjGYjsjL3g9hj29m2HSf/8bWHQPE55w==,iv:jceTNrDcDlUB9/ejLogMDKFUzk7ih0Qgt2ePfQrsQrQ=,tag:zl/I+tRpm/JJ7N9UuZfB5w==,type:str] 4 | zabbix_grafana_token: ENC[AES256_GCM,data:g72cP0x/0jE4tlX+xAEdjqCkaHZcj8rjuejHahaAZVy6mlnflxrXcC3n4+lWo4Km+uIai14qb1N7S4eSlv54eQ==,iv:kjWEp366qPURF0oSCbtB04qLBZXZr2PK6MJJw08wsSM=,tag:jEGktblV8C1BaTi04ydjeA==,type:str] 5 | zabbix_grafana_pass: ENC[AES256_GCM,data:gL7f8f5m9cqpfIoc,iv:uu0i+jfsO72BRMKh0twtz0g1U/jwG/nOztgWz67kahI=,tag:ouKfxolHlTU1leAm70Qdtw==,type:str] 6 | zabbix_user_pass: ENC[AES256_GCM,data:buQt5/gEh8qqVUeG,iv:xU9F96cqaTjdwWi0wenxsxXTr8Wr1DMaQ5nzri1xl4w=,tag:mFpYE7TaT3Qh6E9zJF21kQ==,type:str] 7 | sops: 8 | kms: [] 9 | gcp_kms: [] 10 | azure_kv: [] 11 | hc_vault: [] 12 | age: [] 13 | lastmodified: "2025-12-01T16:26:12Z" 14 | mac: ENC[AES256_GCM,data:ZIrvb6trFgJQgI/L+E46RpOECZIPOtb3MDEgenOJ4Z2uLbLpxKmh6540drbU8cvOUg5pnshzMFpvAAPZv5RRPej1Id3YK6F1eGW/77kqze+kpRl6kP1By73NKQYbiWWJPnbfsCR3qk+Nb2YWJioPeNpuSJn6TSBT9tBi4TgQ8Pg=,iv:gJnRn74JGasAmz00p1EzlypjZ+zcZK75zqgwd5QNeuU=,tag:JXBSPbaqgq+vbSgUhmeUYw==,type:str] 15 | pgp: 16 | - created_at: "2025-12-01T16:26:12Z" 17 | enc: |- 18 | -----BEGIN PGP MESSAGE----- 19 | 20 | hQEMA23uPS/3Y9M2AQf+P7zkaBONMy51c4+dmp9t4aRKz48FByFuAW7+k5tfxpEc 21 | vPKoGlKjUX9Go3RmqO4TBFqqGSTNNQ0yA/ePy93xeg72MILQkcmQkyvDXWPlaVFY 22 | +5sLYQjNkeeT/uBlwLquMDY6J0+LNPjQeesqpFIQfujhbG5/qNOeX+uKnTPRp34j 23 | E0zk1xf7m9NhHAmo1ooj6Q3HPSsNqik90hMThv6aKVYVWJx87+sfpRvy1mthLnk+ 24 | KELPNZ+ssfxR+Brp4obCLoEjUvIi4Q56p/4oXmoxmXUWzRaFaZE9U0Tx6D+m4uMB 25 | Wyq/zjLOUY5zUJTQ0knROC/R6y31DgqvT4+gC2WXfdJeAXSzVsPwEVeiSWThO5ov 26 | M4hGJpLMNuBwcwKk6Haz5RL7wu/jFFvG5U0HReIvyD1fGqZa9i5kw87cKthXz7Ll 27 | Nje9lu2Oib7iZPHZ5pT15pCff3KtR9EHD6ZP9a2WAA== 28 | =+aoI 29 | -----END PGP MESSAGE----- 30 | fp: C83ECA1E0B9D719C7FA2470F18E4F5A58801E669 31 | encrypted_regex: (password|ansible_httpapi_pass|ansible_zabbix_token|ansible_grafana_token|zabbix_grafana_token|zabbix_grafana_pass|zabbix_user_pass|adminPassword) 32 | mac_only_encrypted: true 33 | version: 3.9.2 34 | -------------------------------------------------------------------------------- /ansible/config/config_staging.yaml: -------------------------------------------------------------------------------- 1 | ansible_httpapi_pass: ENC[AES256_GCM,data:h0CnWwo9kRBNU0RQ,iv:GfiD0prJ8cIFHzqPy77gzAGjklKm62tdpD09Z6Ob5Is=,tag:z+4kfmWmR0OdU/UzXMCLPA==,type:str] 2 | ansible_zabbix_token: ENC[AES256_GCM,data:/QiOWeQeEAuax1NoOco9JGFrETPjheSBu2aJk6+B/dGReKRpV+ovAlNIFcrg6fRiwMbut49SBnLw28xO5aOJUw==,iv:t2HM6ge0w6JyGdgrz9VSmfGLEa1JElqFZ1bcGMIX+xk=,tag:ssziqMkVwQqtwY2wxnntLw==,type:str] 3 | ansible_grafana_token: ENC[AES256_GCM,data:BAHBjK1mdm96dEcCPPoooZBWZg6uTfcQSWheeD7+xm1xSF/sdu+mWhUogrPfPg==,iv:VGU8kndV9uLRyHhiKVovOtbsdb6xrgrNLBJKuBuUVlc=,tag:+PAfx/5Pcs+AzYihkN1z6w==,type:str] 4 | zabbix_grafana_token: ENC[AES256_GCM,data:y9Stn53hUR8vczHQytSnDVndM8RGWsM0uppxlQgfJyZ5USu0i5QHds7QPLT7aNVPCTwHbBnV3X12ulv4qXCsPA==,iv:8PZ5H/81e/17ZZErUMEOnWK7tyfwbTV3VrH7bfA/ygk=,tag:0eXV/wQwFl466n/ie9hsHw==,type:str] 5 | zabbix_grafana_pass: ENC[AES256_GCM,data:C36242fKVuNr/mmQ,iv:pZmUxkDjjmHp//+xLhAF8Zei0ErvLAhYsqKth+IWgKA=,tag:nL0fhU/nA+sL8gd2LO6M8Q==,type:str] 6 | zabbix_user_pass: ENC[AES256_GCM,data:fK7EdgY7+9jX9woh,iv:AGmfRoAcT7ZoxiXZTA/EGmWJkeSE/hdVOd2tXgSrcoQ=,tag:Ed+L+SrDj1ICC/CL7H9rAQ==,type:str] 7 | sops: 8 | kms: [] 9 | gcp_kms: [] 10 | azure_kv: [] 11 | hc_vault: [] 12 | age: [] 13 | lastmodified: "2025-12-01T16:26:35Z" 14 | mac: ENC[AES256_GCM,data:5LJ3ggv1zx3dZj7hB02QO0CsAPMgmC1UB2PeO+w2uNRpo+yzWubF6eT2yZtWEEvROv5hGlHsPkdymF40bpN/EUsKIJyOo60qbzkXkFembEE8Zb7NLi5dWnJYwl7Ya9FvuKLfmQkFGbUJkw9e2fH8hU85911aa5PaDJxRtPoJDaY=,iv:7xWtO0Vuz2LWUClwD0p3rHcaD4SEDlTxXBtL6BLuruQ=,tag:0gulh+I/EvFzFuFgwOC3Mw==,type:str] 15 | pgp: 16 | - created_at: "2025-12-01T16:26:35Z" 17 | enc: |- 18 | -----BEGIN PGP MESSAGE----- 19 | 20 | hQEMA23uPS/3Y9M2AQgAsFwTqb4vbHfCYnlujYTC6sHmBFX5jeXTFbZYTjmuA5Xn 21 | aJTuUFIHzfKt+pU5SZ2RtB2NupxOeuScAzZVKVhgT7ESsEb3v9EYGatQ0+4/yiq9 22 | jh2o1hNS2PM/omSk3Cr3diFFnF3+a23IZgDkTQSH0xJDlOy60qLFkSclann4XBc+ 23 | rgGOlq79N2LvXxCvI6Xzzyll8yeTMMxiA058sPEq7F1iV9K4pJ/bHxxZ1DDwVq5u 24 | 4vjEcMApdG1mM4snBPW2mM4noDVizRB+xr0/+P5dcWFOU4MBo/M7P9QfhP635fxo 25 | kk3n7+y9131HWGIeSCU+nvid8n38MB4xhL+4VCl6KNJeAfM/Csy1+N/xylQ+2BJa 26 | tzICHybo/vcC6yMxXyjUd1jZiY4KScV9DvZuuWYbHva1ZZl1l3fzJybXst9eQIEY 27 | gyu0RpEvkDQgGcH2YxMnjut9WqIBsiMXovvn/442cA== 28 | =9SEt 29 | -----END PGP MESSAGE----- 30 | fp: C83ECA1E0B9D719C7FA2470F18E4F5A58801E669 31 | encrypted_regex: (password|ansible_httpapi_pass|ansible_zabbix_token|ansible_grafana_token|zabbix_grafana_token|zabbix_grafana_pass|zabbix_user_pass|adminPassword) 32 | mac_only_encrypted: true 33 | version: 3.9.2 34 | -------------------------------------------------------------------------------- /scripts/query_to_yaml.py: -------------------------------------------------------------------------------- 1 | import yaml 2 | from urllib.parse import parse_qs 3 | import argparse 4 | from collections import OrderedDict 5 | 6 | # Configure YAML to handle OrderedDict properly 7 | def represent_ordereddict(dumper, data): 8 | return dumper.represent_dict(data.items()) 9 | 10 | yaml.add_representer(OrderedDict, represent_ordereddict) 11 | 12 | def query_to_yaml(query): 13 | # Extract the query parameters 14 | query_params = parse_qs(query) 15 | # Convert lists to single values if they contain only one item 16 | query_params = {key: value[0] if len(value) == 1 else value for key, value in query_params.items()} 17 | 18 | # Map shortname 19 | key_mapping = { 20 | "network": "net", 21 | "station": "sta", 22 | "channel": "cha", 23 | "starttime": "start", 24 | "endtime": "end", 25 | "location": "loc" 26 | } 27 | # Replace keys based on the mapping 28 | query_params = {key_mapping.get(key, key): value for key, value in query_params.items()} 29 | 30 | # Ensure empty fields are represented as empty strings 31 | for key in ["net", "sta", "loc", "cha", "start", "end"]: 32 | if key not in query_params: 33 | query_params[key] = "" 34 | 35 | # Reorder keys to match the desired order 36 | ordered_keys = ["net", "sta", "loc", "cha", "start", "end"] 37 | ordered_query_params = OrderedDict((key, query_params[key]) for key in ordered_keys if key in query_params) 38 | 39 | return ordered_query_params 40 | 41 | if __name__ == "__main__": 42 | # Set up argument parser 43 | parser = argparse.ArgumentParser(description="Convert query parameters to YAML format.") 44 | parser.add_argument("-f", "--file", help="Path to a file containing multiple query strings, one per line.") 45 | 46 | # Parse arguments 47 | args = parser.parse_args() 48 | 49 | section_order = ["9streams", "54streams", "320streams", "1stream20days"] 50 | 51 | if args.file: 52 | # Read queries from the file 53 | with open(args.file, "r") as file: 54 | lines = file.readlines() 55 | output = OrderedDict() 56 | for section, line in zip(section_order, lines): 57 | line = line.strip() 58 | if line: # Skip empty lines 59 | output[section] = query_to_yaml(line) 60 | 61 | # Convert the final output to YAML 62 | yaml_output = yaml.dump(output, default_flow_style=False, allow_unicode=True) 63 | print(yaml_output) 64 | else: 65 | print("Error: You must provide a file containing queries.") -------------------------------------------------------------------------------- /ansible/files/oculus_users.yaml: -------------------------------------------------------------------------------- 1 | oculus_users: 2 | - username: user-eposfr 3 | name: Enzo Fogliano 4 | usrgrp: EPOSFR 5 | email: enzo.fogliano@univ-grenoble-alpes.fr 6 | - username: user-eposfr2 7 | name: Jonathan Schaeffer 8 | usrgrp: EPOSFR 9 | email: jonathan.schaeffer@univ-grenoble-alpes.fr 10 | - username: user-bgr 11 | name: Goebel Björn 12 | usrgrp: BGR 13 | email: Bjoern.Goebel@bgr.de 14 | - username: user-bgr2 15 | name: Mathias Hoffmann 16 | usrgrp: BGR 17 | email: Mathias.Hoffmann@bgr.de 18 | - username: user-bgs 19 | name: John Stevenson 20 | usrgrp: BGS 21 | email: jostev@bgs.ac.uk 22 | - username: user-bgs2 23 | name: Tarun Joseph 24 | usrgrp: BGS 25 | email: tjos@bgs.ac.uk 26 | - username: user-geofon 27 | name: Andres Heinloo 28 | usrgrp: GEOFON 29 | email: andres@gfz-potsdam.de 30 | - username: user-geofon2 31 | name: Javier Quinteros 32 | usrgrp: GEOFON 33 | email: javier@gfz-potsdam.de 34 | - username: user-icgc 35 | name: Jara Jose Antonio 36 | usrgrp: ICGC 37 | email: JoseAntonio.Jara@icgc.cat 38 | - username: user-ingv 39 | name: Ivano Carluccio 40 | usrgrp: INGV 41 | email: ivano.carluccio@ingv.it 42 | - username: user-icgc2 43 | name: Massimo Fares 44 | usrgrp: INGV 45 | email: massimo.fares@ingv.it 46 | - username: user-koeri 47 | name: Tuğçe Ergün 48 | usrgrp: KOERI 49 | email: afacan@bogazici.edu.tr 50 | - username: user-koeri2 51 | name: Fatih Turhan 52 | usrgrp: KOERI 53 | email: fatih.turhan@bogazici.edu.tr 54 | - username: user-lmu 55 | name: Tobias Megies 56 | usrgrp: LMU 57 | email: tobias.megies@lmu.de 58 | - username: user-lmu2 59 | name: Joachim Wassermann 60 | usrgrp: LMU 61 | email: j.wassermann@lmu.de 62 | - username: user-niep 63 | name: Cristian Neagoe 64 | usrgrp: NIEP 65 | email: cristian.neagoe@infp.ro 66 | - username: user-noa 67 | name: Κωνσταντίνος Μπούκουρας 68 | usrgrp: NOA 69 | email: kbouk@noa.gr 70 | - username: user-odc 71 | name: RDSA AGILE 72 | usrgrp: ODC 73 | email: rdsa-agile@knmi.nl 74 | - username: user-eth 75 | name: Stefan Heimers 76 | usrgrp: ETH 77 | email: stefan.heimers@sed.ethz.ch 78 | - username: user-eth2 79 | name: Kästli Philipp 80 | usrgrp: ETH 81 | email: kaestli@sed.ethz.ch 82 | - username: user-eth3 83 | name: Roman Racine 84 | usrgrp: ETH 85 | email: racine@sed.ethz.ch 86 | - username: user-uib-norsar 87 | name: Øyvind Natvik 88 | usrgrp: UIB-NORSAR 89 | email: Oyvind.Natvik@uib.no 90 | - username: user-ign 91 | name: Barco De La Torre Jaime 92 | usrgrp: IGN 93 | email: jbarco@transportes.gob.es 94 | - username: user-ign2 95 | name: Nuño Esteban Félix 96 | usrgrp: IGN 97 | email: fnuno@transportes.gob.es 98 | -------------------------------------------------------------------------------- /ansible/playbooks/zbx_media.yaml: -------------------------------------------------------------------------------- 1 | --- 2 | - name: Activate Media type Email (HTML) EIDA 3 | hosts: localhost 4 | gather_facts: false 5 | vars_files: 6 | - "../config/config_{{ hostvars['localhost']['selected_environment'] | default('staging') }}.yaml" 7 | vars: 8 | ansible_network_os: community.zabbix.zabbix 9 | ansible_connection: httpapi 10 | ansible_httpapi_port: 8888 11 | ansible_httpapi_use_ssl: false 12 | ansible_httpapi_validate_certs: false 13 | ansible_zabbix_url_path: "" 14 | ansible_host: localhost 15 | 16 | tasks: 17 | - name: Import Zabbix Credentials 18 | ansible.builtin.import_tasks: ../config/config_zabbix.yaml 19 | 20 | - name: Activate Media type 21 | delegate_to: localhost 22 | community.zabbix.zabbix_mediatype: 23 | name: "Email (HTML) EIDA" 24 | type: email 25 | smtp_server: "mailhost.u-ga.fr" 26 | smtp_email: "ne-pas-repondre@univ-grenoble-alpes.fr" 27 | smtp_server_port: 25 28 | message_format: "html" 29 | status: disabled 30 | message_templates: 31 | - eventsource: triggers 32 | recovery: operations 33 | subject: "Problem: {EVENT.NAME}" 34 | body: | 35 | Problem started at {EVENT.TIME} on {EVENT.DATE}
36 | Problem name: {EVENT.NAME}
37 | Host: {HOST.NAME}
38 | Description: {}
39 | Severity: {EVENT.SEVERITY}
40 | Operational data: {EVENT.OPDATA}
41 | Original problem ID: {EVENT.ID}
42 | - eventsource: triggers 43 | recovery: recovery_operations 44 | subject: "Resolved in {EVENT.DURATION}: {EVENT.NAME}" 45 | body: | 46 | Problem has been resolved at {EVENT.RECOVERY.TIME} on {EVENT.RECOVERY.DATE}
47 | Problem name: {EVENT.NAME}
48 | Problem duration: {EVENT.DURATION}
49 | Host: {HOST.NAME}
50 | Severity: {EVENT.SEVERITY}
51 | Original problem ID: {EVENT.ID}
52 | - eventsource: triggers 53 | recovery: update_operations 54 | subject: "Updated problem in {EVENT.AGE}: {EVENT.NAME}" 55 | body: | 56 | {USER.FULLNAME} {EVENT.UPDATE.ACTION} problem 57 | at {EVENT.UPDATE.DATE} {EVENT.UPDATE.TIME}.
58 | {EVENT.UPDATE.MESSAGE}

59 | Current problem status: {EVENT.STATUS}
60 | Age: {EVENT.AGE}
61 | Acknowledged: {EVENT.ACK.STATUS} 62 | -------------------------------------------------------------------------------- /zabbix_server/templates/zbx_export_templates_discovery.yaml: -------------------------------------------------------------------------------- 1 | zabbix_export: 2 | version: '7.0' 3 | template_groups: 4 | - uuid: 44950db14d8549c2a022ab3d5ba3ff96 5 | name: Templates/EIDA 6 | host_groups: 7 | - uuid: 538a19975c7a43cf8a349b953d587723 8 | name: 'EIDA nodes' 9 | templates: 10 | - uuid: 74f0d2bd954841d39f19a0c7d06549ee 11 | template: 'Template discovery' 12 | name: 'Template discovery' 13 | description: 'Template for discovery agent/host' 14 | groups: 15 | - name: Templates/EIDA 16 | discovery_rules: 17 | - uuid: 73a97a466ed542a98a67e3e2c61af74a 18 | name: EIDA_ENDPOINT 19 | key: eida.lld 20 | host_prototypes: 21 | - uuid: 357b595cf81e44a8a70842dbd8887fdd 22 | host: '{#ENDPOINT}' 23 | name: '{#NODE}' 24 | inventory_mode: AUTOMATIC 25 | group_links: 26 | - group: 27 | name: 'EIDA nodes' 28 | group_prototypes: 29 | - name: '{#NODE}' 30 | templates: 31 | - name: 'Template performance checks' 32 | - name: 'Template webservices' 33 | - name: 'Website certificate by Zabbix agent 2' 34 | macros: 35 | - macro: '{$CERT.EXPIRY.WARN}' 36 | value: '14' 37 | description: 'Number of days until the certificate expires.' 38 | - macro: '{$CERT.WEBSITE.HOSTNAME}' 39 | value: '{#ENDPOINT}' 40 | - macro: '{$ENDPOINT}' 41 | value: '{#ENDPOINT}' 42 | description: 'Node endpoint URL' 43 | - macro: '{$NODE}' 44 | value: '{#NODE}' 45 | description: 'Node name' 46 | - macro: '{$ONLINECHECK_CHA}' 47 | value: '{#ONLINECHECK_CHA}' 48 | description: 'Node channel check' 49 | - macro: '{$ONLINECHECK_END}' 50 | value: '{#ONLINECHECK_END}' 51 | description: 'Node end time' 52 | - macro: '{$ONLINECHECK_LOC}' 53 | value: '{#ONLINECHECK_LOC}' 54 | description: 'Node location check' 55 | - macro: '{$ONLINECHECK_NET}' 56 | value: '{#ONLINECHECK_NET}' 57 | description: 'Node network check' 58 | - macro: '{$ONLINECHECK_STA}' 59 | value: '{#ONLINECHECK_STA}' 60 | description: 'Node station check' 61 | - macro: '{$ONLINECHECK_START}' 62 | value: '{#ONLINECHECK_START}' 63 | description: 'Node start time' 64 | -------------------------------------------------------------------------------- /ansible/playbooks/zbx_users.yaml: -------------------------------------------------------------------------------- 1 | --- 2 | - name: Create Zabbix users 3 | hosts: localhost 4 | gather_facts: false 5 | vars_files: 6 | - "../config/config_{{ hostvars['localhost']['selected_environment'] | default('staging') }}.yaml" 7 | - ../files/oculus_users.yaml 8 | - ../files/eida_nodes.yaml 9 | vars: 10 | ansible_network_os: community.zabbix.zabbix 11 | ansible_connection: httpapi 12 | ansible_httpapi_port: 8888 13 | ansible_httpapi_use_ssl: false 14 | ansible_httpapi_validate_certs: false 15 | ansible_zabbix_url_path: "" 16 | ansible_host: localhost 17 | 18 | tasks: 19 | - name: Import Zabbix Credentials 20 | ansible.builtin.import_tasks: ../config/config_zabbix.yaml 21 | 22 | - name: Create users groups 23 | delegate_to: localhost 24 | loop: "{{ oculus_users }}" 25 | community.zabbix.zabbix_usergroup: 26 | name: "{{ item.usrgrp }}" 27 | templategroup_rights: 28 | - template_group: Templates/EIDA 29 | permission: read-only 30 | hostgroup_rights: 31 | - host_group: "{{ item.usrgrp }}" 32 | permission: read-only 33 | tag_filters: 34 | - host_group: "{{ item.usrgrp }}" 35 | 36 | - name: Create users 37 | delegate_to: localhost 38 | loop: "{{ oculus_users }}" 39 | community.zabbix.zabbix_user: 40 | username: "{{ item.username }}" 41 | name: "{{ item.name }}" 42 | usrgrps: 43 | - Internal 44 | - "{{ item.usrgrp }}" 45 | passwd: "{{ zabbix_user_pass }}" 46 | lang: en_GB 47 | autologin: false 48 | refresh: "30" 49 | rows_per_page: "50" 50 | user_medias: 51 | - mediatype: Email (HTML) EIDA 52 | sendto: 53 | - "{{ item.email }}" 54 | 55 | - name: Create API user group 56 | delegate_to: localhost 57 | community.zabbix.zabbix_usergroup: 58 | name: "API-RO" 59 | templategroup_rights: 60 | - template_group: Templates/EIDA 61 | permission: read-only 62 | hostgroup_rights: "{{ [{'host_group': 'EIDA nodes', 'permission': 'read-only'}] + (eida_nodes | map('regex_replace', '^(.*)$', '{\"host_group\": \"\\1\", \"permission\": \"read-only\"}') | map('from_json') | list) }}" 63 | tag_filters: "{{ [{'host_group': 'EIDA nodes'}] + (eida_nodes | map('community.general.dict_kv', 'host_group') | list) }}" 64 | 65 | - name: Create grafana user 66 | delegate_to: localhost 67 | community.zabbix.zabbix_user: 68 | username: grafana 69 | usrgrps: 70 | - API-RO 71 | - "No access to the frontend" 72 | passwd: "{{ zabbix_grafana_pass }}" 73 | lang: en_GB 74 | autologin: false 75 | -------------------------------------------------------------------------------- /grafana_server/dashboards/welcome.json: -------------------------------------------------------------------------------- 1 | { 2 | "annotations": { 3 | "list": [ 4 | { 5 | "builtIn": 1, 6 | "datasource": { 7 | "type": "grafana", 8 | "uid": "-- Grafana --" 9 | }, 10 | "enable": true, 11 | "hide": true, 12 | "iconColor": "rgba(0, 211, 255, 1)", 13 | "name": "Annotations & Alerts", 14 | "type": "dashboard" 15 | } 16 | ] 17 | }, 18 | "editable": true, 19 | "fiscalYearStartMonth": 0, 20 | "graphTooltip": 0, 21 | "id": 7, 22 | "links": [], 23 | "panels": [ 24 | { 25 | "fieldConfig": { 26 | "defaults": {}, 27 | "overrides": [] 28 | }, 29 | "gridPos": { 30 | "h": 6, 31 | "w": 24, 32 | "x": 0, 33 | "y": 0 34 | }, 35 | "id": 1, 36 | "options": { 37 | "code": { 38 | "language": "plaintext", 39 | "showLineNumbers": false, 40 | "showMiniMap": false 41 | }, 42 | "content": "# Welcome to EIDA Oculus\n\nFor the EIDA Technical Committee and EIDA Management Board that need to improve there services quality, OCULUS is a central monitoring and alerting system that tests all the services at EIDA nodes. Unlike the previous situation where the moniitoring was very scattered and uneven, OCULUS will provide a global view of the services status and indicators for keeping track of service quality evolution.\n\n[EIDA home page](https://www.orfeus-eu.org/data/eida/)\n\n[Github project](https://github.com/EIDA/oculus-monitoring-backend/issues/new/choose)\n", 43 | "mode": "markdown" 44 | }, 45 | "pluginVersion": "12.1.1", 46 | "title": "", 47 | "type": "text" 48 | }, 49 | { 50 | "fieldConfig": { 51 | "defaults": {}, 52 | "overrides": [] 53 | }, 54 | "gridPos": { 55 | "h": 13, 56 | "w": 24, 57 | "x": 0, 58 | "y": 6 59 | }, 60 | "id": 2, 61 | "options": { 62 | "folderUID": "", 63 | "includeVars": false, 64 | "keepTime": false, 65 | "maxItems": 10, 66 | "query": "EIDA", 67 | "showFolderNames": true, 68 | "showHeadings": false, 69 | "showRecentlyViewed": false, 70 | "showSearch": true, 71 | "showStarred": false, 72 | "tags": [ 73 | "public" 74 | ] 75 | }, 76 | "pluginVersion": "12.1.1", 77 | "title": "Dashboard list", 78 | "type": "dashlist" 79 | } 80 | ], 81 | "preload": false, 82 | "schemaVersion": 41, 83 | "tags": [], 84 | "templating": { 85 | "list": [] 86 | }, 87 | "time": { 88 | "from": "now-6h", 89 | "to": "now" 90 | }, 91 | "timepicker": {}, 92 | "timezone": "browser", 93 | "title": "welcome", 94 | "uid": "feon4v7w47shse", 95 | "version": 1 96 | } -------------------------------------------------------------------------------- /scripts/csv_delete_columns.py: -------------------------------------------------------------------------------- 1 | import pandas as pd 2 | import sys 3 | import os 4 | 5 | # check files 6 | if len(sys.argv) < 2: 7 | print("Usage: python csv_delete_last_columns.py ") 8 | sys.exit(1) 9 | 10 | input_file = sys.argv[1] 11 | 12 | # check if file existe 13 | if not os.path.exists(input_file): 14 | print(f"Error: file {input_file} dosen't exist.") 15 | sys.exit(1) 16 | 17 | try: 18 | # read csv file preserving NULL values as strings 19 | df = pd.read_csv(input_file, quoting=3, escapechar='\\', keep_default_na=False, na_values=[]) 20 | 21 | # check if file is empty 22 | if df.empty: 23 | print("Error: file CSV is empty.") 24 | sys.exit(1) 25 | 26 | # check if file have columns 27 | if df.shape[1] < 1: 28 | print("Error: file has no column.") 29 | sys.exit(1) 30 | 31 | print(f"Original file: {df.shape[0]} lignes, {df.shape[1]} columns") 32 | print("\nColumns available:") 33 | 34 | # show all columns with index 35 | for i, column in enumerate(df.columns): 36 | print(f"{i}: {column}") 37 | 38 | # ask to user which column to delete 39 | while True: 40 | try: 41 | choice = input(f"\nWhich column to delete ? (0-{len(df.columns)-1}) or 'q' for quit: ") 42 | 43 | if choice.lower() == 'q': 44 | print("Canceled.") 45 | sys.exit(0) 46 | 47 | column_index = int(choice) 48 | 49 | if 0 <= column_index < len(df.columns): 50 | column_to_delete = df.columns[column_index] 51 | print(f"Delete the column: '{column_to_delete}'") 52 | 53 | confirm = input("Confirm ? (y/n): ") 54 | if confirm.lower() in ['y', 'yes', 'o', 'oui']: 55 | # Delete selected column 56 | df = df.drop(columns=[column_to_delete]) 57 | break 58 | else: 59 | print("Deletion canceled.") 60 | continue 61 | else: 62 | print(f"Error: enter a number between 0 and {len(df.columns)-1}") 63 | except ValueError: 64 | print("Error: enter a valid number or 'q' for quit") 65 | 66 | # save preserving NULL values and original format 67 | output_file = input_file.replace('.csv', '_clean.csv') 68 | df.to_csv(output_file, index=False, quoting=3, escapechar='\\') 69 | 70 | print(f"\nColumn '{column_to_delete}' deleted ! File save here : {output_file}") 71 | print(f"New file : {df.shape[0]} lignes, {df.shape[1]} columns") 72 | 73 | except pd.errors.EmptyDataError: 74 | print("Erroe: CSV file is empty or bad formated.") 75 | except KeyboardInterrupt: 76 | print("\Operation canceled.") 77 | except Exception as e: 78 | print(f"Error during processing: {str(e)}") -------------------------------------------------------------------------------- /scripts/values_to_macros.py: -------------------------------------------------------------------------------- 1 | #! /usr/bin/env -S uv run 2 | # /// script 3 | # requires-python = ">=3.12" 4 | # dependencies = [ 5 | # "pyyaml", 6 | # ] 7 | # /// 8 | import yaml 9 | import json 10 | import sys 11 | import datetime 12 | 13 | class DateTimeEncoder(json.JSONEncoder): 14 | def default(self, obj): 15 | if isinstance(obj, datetime.datetime): 16 | return obj.isoformat() 17 | return super().default(obj) 18 | 19 | def flatten_yaml(data, parent_key='', sep='_'): 20 | """ 21 | flattens a nested yaml/dict structure into a flat dictionarry 22 | args: 23 | data: the data structure to flatten 24 | parent_key: the parent key for nested structures 25 | sep: separator to use between keys 26 | returns: 27 | dict; flattened dictrionary 28 | """ 29 | items = [] 30 | if isinstance(data, dict): 31 | for k, v in data.items(): 32 | new_key = f"{parent_key}{sep}{k}" if parent_key else k 33 | if isinstance(v, dict): 34 | items.extend(flatten_yaml(v, new_key, sep=sep).items()) 35 | elif isinstance(v, list): 36 | for i, item in enumerate(v): 37 | items.extend(flatten_yaml(item, f"{new_key}{sep}{i}", sep=sep).items()) 38 | else: 39 | items.append((new_key, v)) 40 | return dict(items) 41 | 42 | def generate_lld(yaml_file): 43 | """ 44 | generate ansible macros format output from a yaml file structure 45 | only processes specific sections; node, endpoint, routingFile, onlinecheck 46 | args: 47 | yaml_file (string): the path to the input yaml file 48 | """ 49 | with open(yaml_file, 'r') as yf: 50 | data = yaml.load(yf, Loader=yaml.BaseLoader) 51 | 52 | allowed_sections = ['node', 'endpoint', 'routingFile', 'onlineCheck'] 53 | filtered_data = {key: value for key, value in data.items() if key in allowed_sections} 54 | 55 | flattened_data = flatten_yaml(filtered_data) 56 | 57 | macros = [] 58 | for key, value in flattened_data.items(): 59 | macro_key = f"{{${key.upper()}}}" 60 | 61 | macro_entry = { 62 | "macro": macro_key, 63 | "value": str(value) 64 | } 65 | macros.append(macro_entry) 66 | 67 | # add CERT.WEBSITE.HOSTNAME for template web certificate 68 | endpoint_value = None 69 | for key, value in flattened_data.items(): 70 | if key.upper() == 'ENDPOINT': 71 | endpoint_value = str(value) 72 | break 73 | 74 | if endpoint_value: 75 | cert_macro = { 76 | "macro": "{$CERT.WEBSITE.HOSTNAME}", 77 | "value": endpoint_value 78 | } 79 | macros.append(cert_macro) 80 | 81 | for macro in macros: 82 | print(f'- macro: "{macro["macro"]}"') 83 | print(f' value: \'{macro["value"]}\'') 84 | 85 | 86 | if __name__ == "__main__": 87 | if len(sys.argv) != 2: 88 | print("usage: python values_to_macros.py ") 89 | sys.exit(1) 90 | 91 | input_yaml_file = sys.argv[1] 92 | 93 | generate_lld(input_yaml_file) -------------------------------------------------------------------------------- /ansible/playbooks/zbx_hosts.yaml: -------------------------------------------------------------------------------- 1 | --- 2 | - name: Deploy EIDA Nodes in Zabbix 3 | hosts: localhost 4 | gather_facts: false 5 | vars_files: 6 | - "../config/config_{{ hostvars['localhost']['selected_environment'] | default('staging') }}.yaml" 7 | vars: 8 | ansible_network_os: community.zabbix.zabbix 9 | ansible_connection: httpapi 10 | ansible_httpapi_port: 8888 11 | ansible_httpapi_use_ssl: false 12 | ansible_httpapi_validate_certs: false 13 | ansible_zabbix_url_path: "" 14 | ansible_host: localhost 15 | host_files_directory: "../../eida_nodes" 16 | macros_output_directory: "../files/macros" 17 | 18 | tasks: 19 | - name: Import Zabbix Credentials 20 | ansible.builtin.import_tasks: ../config/config_zabbix.yaml 21 | 22 | - name: Create macros output directory 23 | ansible.builtin.file: 24 | path: "{{ macros_output_directory }}" 25 | state: directory 26 | mode: '0755' 27 | 28 | - name: Find all YAML files 29 | ansible.builtin.find: 30 | paths: "{{ host_files_directory }}" 31 | patterns: "*.yaml,*.yml" 32 | register: host_files 33 | 34 | - name: Display found files 35 | ansible.builtin.debug: 36 | msg: "Found {{ host_files.files | length }} files: {{ host_files.files | map(attribute='path') | map('basename') | list }}" 37 | 38 | - name: Execute values_to_macros.py and save output 39 | ansible.builtin.shell: 40 | cmd: "python ../../scripts/values_to_macros.py {{ item.path }} > {{ macros_output_directory }}/{{ item.path | basename }}" 41 | chdir: "{{ playbook_dir }}" 42 | loop: "{{ host_files.files }}" 43 | loop_control: 44 | label: "{{ item.path | basename }}" 45 | register: macros_generation 46 | 47 | - name: Create Zabbix hosts groups 48 | community.zabbix.zabbix_group: 49 | state: present 50 | host_groups: "{{ item.path | basename | regex_replace('\\.(yaml|yml)$', '') | upper }}" 51 | loop: "{{ host_files.files }}" 52 | loop_control: 53 | label: "{{ item.path | basename }}" 54 | 55 | - name: Create Zabbix hosts from file names with macros 56 | community.zabbix.zabbix_host: 57 | host_name: "{{ item.path | basename | regex_replace('\\.(yaml|yml)$', '') | upper }}" 58 | host_groups: 59 | - "EIDA nodes" 60 | - "{{ item.path | basename | regex_replace('\\.(yaml|yml)$', '') | upper }}" 61 | link_templates: 62 | - "Template performance checks" 63 | - "Template webservices" 64 | - "Template routing" 65 | - "Website certificate by Zabbix agent 2" 66 | status: "enabled" 67 | state: "present" 68 | interfaces: 69 | - type: 1 70 | useip: 1 71 | main: 1 72 | ip: "127.0.0.1" 73 | port: "10050" 74 | inventory_mode: "disabled" 75 | macros: "{{ lookup('file', macros_output_directory + '/' + item.path | basename) | from_yaml }}" 76 | loop: "{{ host_files.files }}" 77 | loop_control: 78 | label: "{{ item.path | basename }}" 79 | register: host_results 80 | 81 | - name: Display creation results 82 | ansible.builtin.debug: 83 | msg: >- 84 | Host '{{ item.item.path | basename | regex_replace('\.(yaml|yml)$', '') | upper }}': 85 | {{ 'Created/Updated' if item.changed else 'Already exists' }} 86 | loop: "{{ host_results.results }}" 87 | loop_control: 88 | label: "{{ item.item.path | basename }}" 89 | -------------------------------------------------------------------------------- /eida_nodes/ingv.yaml: -------------------------------------------------------------------------------- 1 | --- 2 | node: INGV 3 | endpoint: webservices.ingv.it 4 | routingFile: eidaws/routing/1/routing.xml 5 | onlineCheck: 6 | net: BA 7 | sta: PZUN 8 | loc: "*" 9 | cha: HHZ 10 | start: 2018-06-02T00:01:00 11 | end: 2018-06-02T00:01:10 12 | requiremets: 13 | network: { biggest_network_code } 14 | perfCheck: 15 | - webservice: dataselect 16 | scenario: 9streams 17 | net: "IV" 18 | sta: "AR*,Z*" 19 | loc: "*" 20 | cha: "*" 21 | start: "2025-12-01T00:01:00" 22 | end: "2025-12-01T00:02:00" 23 | - webservice: dataselect 24 | scenario: 54streams 25 | net: "IV" 26 | sta: "A*,B*" 27 | loc: "*" 28 | cha: "HH*" 29 | start: "2025-12-01T00:01:00" 30 | end: "2025-12-01T00:02:00" 31 | - webservice: dataselect 32 | scenario: 320streams 33 | net: "IV,MN,NI,GU" 34 | sta: "*" 35 | loc: "*" 36 | cha: HHZ 37 | start: "2025-12-01T00:01:00" 38 | end: "2025-12-01T00:02:00" 39 | - webservice: dataselect 40 | scenario: 1stream15days 41 | net: IV 42 | sta: AMUR 43 | loc: "*" 44 | cha: HH? 45 | start: "2012-06-08T00:00:00" 46 | end: "2012-06-23T00:00:00" 47 | - webservice: wfcatalog 48 | scenario: 9streams 49 | net: "IV" 50 | sta: "AR*,Z*" 51 | loc: "*" 52 | cha: "*" 53 | start: "2025-12-01T00:01:00" 54 | end: "2025-12-01T00:02:00" 55 | - webservice: wfcatalog 56 | scenario: 54streams 57 | net: "IV" 58 | sta: "A*,B*" 59 | loc: "*" 60 | cha: "HH*" 61 | start: "2025-12-01T00:01:00" 62 | end: "2025-12-01T00:02:00" 63 | - webservice: wfcatalog 64 | scenario: 320streams 65 | net: "IV,MN,NI,GU" 66 | sta: "*" 67 | loc: "*" 68 | cha: HHZ 69 | start: "2025-12-01T00:01:00" 70 | end: "2025-12-01T00:02:00" 71 | - webservice: wfcatalog 72 | scenario: 1stream15days 73 | net: IV 74 | sta: AMUR 75 | loc: "*" 76 | cha: HH? 77 | start: "2012-06-08T00:00:00" 78 | end: "2012-06-23T00:00:00" 79 | - webservice: availability 80 | scenario: 9streams 81 | net: "IV" 82 | sta: "AR*,Z*" 83 | loc: "*" 84 | cha: "*" 85 | start: "2025-12-01T00:01:00" 86 | end: "2025-12-01T00:02:00" 87 | - webservice: availability 88 | scenario: 54streams 89 | net: "IV" 90 | sta: "A*,B*" 91 | loc: "*" 92 | cha: "HH*" 93 | start: "2025-12-01T00:01:00" 94 | end: "2025-12-01T00:02:00" 95 | - webservice: availability 96 | scenario: 320streams 97 | net: "IV,MN,NI,GU" 98 | sta: "*" 99 | loc: "*" 100 | cha: HHZ 101 | start: "2025-12-01T00:01:00" 102 | end: "2025-12-01T00:02:00" 103 | - webservice: availability 104 | scenario: 1stream15days 105 | net: IV 106 | sta: AMUR 107 | loc: "*" 108 | cha: HH? 109 | start: "2012-06-08T00:00:00" 110 | end: "2012-06-23T00:00:00" 111 | - webservice: station 112 | scenario: 9streams 113 | net: "IV" 114 | sta: "AR*,Z*" 115 | loc: "*" 116 | cha: "*" 117 | start: "2025-12-01T00:01:00" 118 | end: "2025-12-01T00:02:00" 119 | - webservice: station 120 | scenario: 54streams 121 | net: "IV" 122 | sta: "A*,B*" 123 | loc: "*" 124 | cha: "HH*" 125 | start: "2025-12-01T00:01:00" 126 | end: "2025-12-01T00:02:00" 127 | - webservice: station 128 | scenario: 320streams 129 | net: "IV,MN,NI,GU" 130 | sta: "*" 131 | loc: "*" 132 | cha: HHZ 133 | start: "2025-12-01T00:01:00" 134 | end: "2025-12-01T00:02:00" 135 | -------------------------------------------------------------------------------- /eida_nodes/icgc.yaml: -------------------------------------------------------------------------------- 1 | --- 2 | node: ICGC 3 | endpoint: ws.icgc.cat 4 | routingFile: eidaws/routing/routing.xml 5 | onlineCheck: 6 | net: CA 7 | sta: CAVN 8 | loc: -- 9 | cha: HHZ 10 | start: 2024-12-22T00:00:00 11 | end: 2024-12-22T00:00:10 12 | requiremets: 13 | network: { biggest_network_code } 14 | perfCheck: 15 | - webservice: dataselect 16 | scenario: 9streams 17 | net: CA 18 | sta: CMAS,CORI,CBEU 19 | loc: "*" 20 | cha: HH? 21 | start: "2025-11-07T00:01:00" 22 | end: "2025-11-07T00:02:00" 23 | - webservice: dataselect 24 | scenario: 54streams 25 | net: CA 26 | sta: "C*" 27 | loc: "*" 28 | cha: "*" 29 | start: "2025-11-07T00:01:00" 30 | end: "2025-11-07T00:02:00" 31 | - webservice: dataselect 32 | scenario: 320streams #194 33 | net: "*" 34 | sta: "*" 35 | loc: "*" 36 | cha: "*" 37 | start: "2025-11-07T00:01:00" 38 | end: "2025-11-07T00:02:00" 39 | - webservice: dataselect 40 | scenario: 1stream15days 41 | net: CA 42 | sta: CBEU 43 | loc: "*" 44 | cha: HHZ 45 | start: "2012-06-08T00:00:00" 46 | end: "2012-06-23T00:00:00" 47 | - webservice: wfcatalog 48 | scenario: 9streams 49 | net: CA 50 | sta: CMAS,CORI,CBEU 51 | loc: "*" 52 | cha: HH? 53 | start: "2025-11-07T00:01:00" 54 | end: "2025-11-07T00:02:00" 55 | - webservice: wfcatalog 56 | scenario: 54streams 57 | net: CA 58 | sta: "C*" 59 | loc: "*" 60 | cha: "*" 61 | start: "2025-11-07T00:01:00" 62 | end: "2025-11-07T00:02:00" 63 | - webservice: wfcatalog 64 | scenario: 320streams 65 | net: "*" 66 | sta: "*" 67 | loc: "*" 68 | cha: "*" 69 | start: "*" 70 | end: "*" 71 | - webservice: wfcatalog 72 | scenario: 1stream15days 73 | net: CA 74 | sta: CBEU 75 | loc: -- 76 | cha: HHZ 77 | start: "2012-06-08T00:00:00" 78 | end: "2012-06-23T00:00:00" 79 | - webservice: availability 80 | scenario: 9streams 81 | net: CA 82 | sta: CMAS,CORI,CBEU 83 | loc: "*" 84 | cha: HH? 85 | start: "2025-11-07T00:01:00" 86 | end: "2025-11-07T00:02:00" 87 | - webservice: availability 88 | scenario: 54streams 89 | net: CA 90 | sta: "C*" 91 | loc: "*" 92 | cha: "*" 93 | start: "2025-11-07T00:01:00" 94 | end: "2025-11-07T00:02:00" 95 | - webservice: availability 96 | scenario: 320streams 97 | net: "*" 98 | sta: "*" 99 | loc: "*" 100 | cha: "*" 101 | start: "2020-08-05T00:01:00" 102 | end: "2020-08-05T23:59:00" 103 | - webservice: availability 104 | scenario: 1stream15days 105 | net: CA 106 | sta: CBEU 107 | loc: "*" 108 | cha: HHZ 109 | start: "2012-06-08T00:00:00" 110 | end: "2012-06-23T00:00:00" 111 | - webservice: station 112 | scenario: 9streams 113 | net: CA 114 | sta: CMAS,CORI,CBEU 115 | loc: "*" 116 | cha: HH? 117 | start: "2025-11-07T00:01:00" 118 | end: "2025-11-07T00:02:00" 119 | - webservice: station 120 | scenario: 54streams 121 | net: CA 122 | sta: "C*" 123 | loc: "*" 124 | cha: "*" 125 | start: "2025-11-07T00:01:00" 126 | end: "2025-11-07T00:02:00" 127 | - webservice: station 128 | scenario: 320streams 129 | net: CA,YK,EB 130 | sta: "*" 131 | loc: "*" 132 | cha: "*" 133 | start: "2011-12-22T00:00:00" 134 | end: "2019-05-20T00:10:00" 135 | - webservice: station 136 | scenario: 1stream15days 137 | net: CA 138 | sta: CBEU 139 | loc: "*" 140 | cha: HHZ 141 | start: "2012-06-08T00:00:00" 142 | end: "2012-06-23T00:00:00" 143 | -------------------------------------------------------------------------------- /eida_nodes/lmu.yaml: -------------------------------------------------------------------------------- 1 | --- 2 | node: LMU 3 | endpoint: erde.geophysik.uni-muenchen.de 4 | routingFile: eidaws/routing/routing.xml 5 | onlineCheck: 6 | net: BW 7 | sta: FFB1 8 | loc: "*" 9 | cha: HHZ 10 | start: 2017-05-20T00:00:00 11 | end: 2017-05-20T00:00:05 12 | requiremets: 13 | network: { biggest_network_code } 14 | perfCheck: 15 | - webservice: dataselect 16 | scenario: 9streams 17 | net: BW 18 | sta: "FFB?" 19 | loc: "*" 20 | cha: "HH?" 21 | start: 2025-12-01T00:01:00 22 | end: 2025-12-01T00:02:00 23 | - webservice: dataselect 24 | scenario: 54streams 25 | net: BW 26 | sta: "B*,F*,M*" 27 | loc: "*" 28 | cha: "H*" 29 | start: 2025-12-01T00:01:00 30 | end: 2025-12-01T00:02:00 31 | - webservice: dataselect 32 | scenario: 320streams #270 33 | net: Z3 34 | sta: "*" 35 | loc: "*" 36 | cha: "*" 37 | start: 2021-12-01T00:01:00 38 | end: 2021-12-01T00:02:00 39 | - webservice: dataselect 40 | scenario: 1stream15days 41 | net: BW 42 | sta: "FFB1" 43 | loc: "*" 44 | cha: HHZ 45 | start: 2017-05-01T00:00:00 46 | end: 2017-05-16T00:00:00 47 | - webservice: availability 48 | scenario: 9streams 49 | net: BW 50 | sta: "FFB?" 51 | loc: "*" 52 | cha: "HH?" 53 | start: 2025-12-01T00:01:00 54 | end: 2025-12-01T00:02:00 55 | - webservice: availability 56 | scenario: 54streams 57 | net: BW 58 | sta: "B*,F*,M*" 59 | loc: "*" 60 | cha: "H*" 61 | start: 2025-12-01T00:01:00 62 | end: 2025-12-01T00:02:00 63 | - webservice: availability 64 | scenario: 320streams #270 65 | net: Z3 66 | sta: "*" 67 | loc: "*" 68 | cha: "*" 69 | start: 2021-12-01T00:01:00 70 | end: 2021-12-01T00:02:00 71 | - webservice: availability 72 | scenario: 1stream15days 73 | net: BW 74 | sta: "FFB1" 75 | loc: "*" 76 | cha: HHZ 77 | start: 2017-05-01T00:00:00 78 | end: 2017-05-16T00:00:00 79 | - webservice: wfcatalog 80 | scenario: 9streams 81 | net: BW 82 | sta: "FFB?" 83 | loc: "*" 84 | cha: "HH?" 85 | start: 2025-12-01T00:01:00 86 | end: 2025-12-01T00:02:00 87 | - webservice: wfcatalog 88 | scenario: 54streams 89 | net: BW 90 | sta: "B*,F*,M*" 91 | loc: "*" 92 | cha: "H*" 93 | start: 2025-12-01T00:01:00 94 | end: 2025-12-01T00:02:00 95 | - webservice: wfcatalog 96 | scenario: 320streams #270 97 | net: Z3 98 | sta: "*" 99 | loc: "*" 100 | cha: "*" 101 | start: 2021-12-01T00:01:00 102 | end: 2021-12-01T00:02:00 103 | - webservice: wfcatalog 104 | scenario: 1stream15days 105 | net: BW 106 | sta: "FFB1" 107 | loc: "*" 108 | cha: HHZ 109 | start: 2017-05-01T00:00:00 110 | end: 2017-05-16T00:00:00 111 | - webservice: station 112 | scenario: 9streams 113 | net: BW 114 | sta: "FFB?" 115 | loc: "*" 116 | cha: "HH?" 117 | start: 2025-12-01T00:01:00 118 | end: 2025-12-01T00:02:00 119 | - webservice: station 120 | scenario: 54streams 121 | net: BW 122 | sta: "B*,F*,M*" 123 | loc: "*" 124 | cha: "H*" 125 | start: 2025-12-01T00:01:00 126 | end: 2025-12-01T00:02:00 127 | - webservice: station 128 | scenario: 320streams #270 129 | net: Z3 130 | sta: "*" 131 | loc: "*" 132 | cha: "*" 133 | start: 2021-12-01T00:01:00 134 | end: 2021-12-01T00:02:00 135 | - webservice: station 136 | scenario: 1stream15days 137 | net: BW 138 | sta: "FFB1" 139 | loc: "*" 140 | cha: HHZ 141 | start: 2017-05-01T00:00:00 142 | end: 2017-05-16T00:00:00 143 | -------------------------------------------------------------------------------- /eida_nodes/koeri.yaml: -------------------------------------------------------------------------------- 1 | --- 2 | node: KOERI 3 | endpoint: eida.koeri.boun.edu.tr 4 | routingFile: routing/routing.xml 5 | onlineCheck: 6 | net: KO 7 | sta: KMAH 8 | loc: -- 9 | cha: HHZ 10 | start: 2025-04-01T00:00:00 11 | end: 2025-04-01T00:00:05 12 | requiremets: 13 | network: { biggest_network_code } 14 | perfCheck: 15 | - webservice: dataselect 16 | scenario: 9streams 17 | net: "KO" 18 | sta: "YA*" 19 | loc: "*" 20 | cha: "*" 21 | start: "2025-12-01T00:01:00" 22 | end: "2025-12-01T00:02:00" 23 | - webservice: dataselect 24 | scenario: 54streams 25 | net: "KO" 26 | sta: "A*" 27 | loc: "*" 28 | cha: "*" 29 | start: "2025-12-01T00:01:00" 30 | end: "2025-12-01T00:02:00" 31 | - webservice: dataselect 32 | scenario: 320streams 33 | net: 6G,KO 34 | sta: "*" 35 | loc: "*" 36 | cha: "*" 37 | start: "2025-12-01T00:01:00" 38 | end: "2025-12-01T00:02:00" 39 | - webservice: dataselect 40 | scenario: 1stream15days 41 | net: KO 42 | sta: GULA 43 | loc: "*" 44 | cha: HHZ 45 | start: "2025-06-04T00:00:00" 46 | end: "2025-06-19T00:00:00" 47 | - webservice: wfcatalog 48 | scenario: 9streams 49 | net: "KO" 50 | sta: "YA*" 51 | loc: "*" 52 | cha: "*" 53 | start: "2025-12-01T00:01:00" 54 | end: "2025-12-01T00:02:00" 55 | - webservice: wfcatalog 56 | scenario: 54streams 57 | net: "KO" 58 | sta: "A*" 59 | loc: "*" 60 | cha: "*" 61 | start: "2025-12-01T00:01:00" 62 | end: "2025-12-01T00:02:00" 63 | - webservice: wfcatalog 64 | scenario: 320streams 65 | net: 6G,KO 66 | sta: "*" 67 | loc: "*" 68 | cha: "*" 69 | start: "2025-12-01T00:01:00" 70 | end: "2025-12-01T00:02:00" 71 | - webservice: wfcatalog 72 | scenario: 1stream15days 73 | net: KO 74 | sta: GULA 75 | loc: "*" 76 | cha: HHZ 77 | start: "2025-06-04T00:00:00" 78 | end: "2025-06-19T00:00:00" 79 | - webservice: availability 80 | scenario: 9streams 81 | net: "KO" 82 | sta: "YA*" 83 | loc: "*" 84 | cha: "*" 85 | start: "2025-12-01T00:01:00" 86 | end: "2025-12-01T00:02:00" 87 | - webservice: availability 88 | scenario: 54streams 89 | net: "KO" 90 | sta: "A*" 91 | loc: "*" 92 | cha: "*" 93 | start: "2025-12-01T00:01:00" 94 | end: "2025-12-01T00:02:00" 95 | - webservice: availability 96 | scenario: 320streams 97 | net: 6G,KO 98 | sta: "*" 99 | loc: "*" 100 | cha: "*" 101 | start: "2025-12-01T00:01:00" 102 | end: "2025-12-01T00:02:00" 103 | - webservice: availability 104 | scenario: 1stream15days 105 | net: KO 106 | sta: GULA 107 | loc: "*" 108 | cha: HHZ 109 | start: "2025-06-04T00:00:00" 110 | end: "2025-06-19T00:00:00" 111 | - webservice: station 112 | scenario: 9streams 113 | net: "KO" 114 | sta: "YA*" 115 | loc: "*" 116 | cha: "*" 117 | start: "2025-12-01T00:01:00" 118 | end: "2025-12-01T00:02:00" 119 | - webservice: station 120 | scenario: 54streams 121 | net: "KO" 122 | sta: "A*" 123 | loc: "*" 124 | cha: "*" 125 | start: "2025-12-01T00:01:00" 126 | end: "2025-12-01T00:02:00" 127 | - webservice: station 128 | scenario: 320streams 129 | net: 6G,KO 130 | sta: "*" 131 | loc: "*" 132 | cha: "*" 133 | start: "2025-12-01T00:01:00" 134 | end: "2025-12-01T00:02:00" 135 | - webservice: station 136 | scenario: 1stream15days 137 | net: KO 138 | sta: GULA 139 | loc: "*" 140 | cha: HHZ 141 | start: "2025-06-04T00:00:00" 142 | end: "2025-06-19T00:00:00" 143 | -------------------------------------------------------------------------------- /eida_nodes/bgr.yaml: -------------------------------------------------------------------------------- 1 | --- 2 | node: BGR 3 | endpoint: eida.bgr.de 4 | routingFile: eidaws/routing/routing.xml 5 | onlineCheck: 6 | net: GR 7 | sta: ASSE 8 | loc: "*" 9 | cha: HHZ 10 | start: 2017-07-01T00:01:00 11 | end: 2017-07-01T00:01:10 12 | requiremets: 13 | network: { biggest_network_code } 14 | perfCheck: 15 | - webservice: dataselect 16 | scenario: 9streams 17 | net: GR 18 | sta: ASSE,BFO,MOX 19 | loc: "*" 20 | cha: HH* 21 | start: "2017-07-01T00:01:00" 22 | end: "2017-07-01T00:02:00" 23 | - webservice: dataselect 24 | scenario: 54streams 25 | net: SX,TH 26 | sta: "*" 27 | loc: "*" 28 | cha: HHZ 29 | start: "2017-01-01T00:01:00" 30 | end: "2017-01-01T00:02:00" 31 | - webservice: dataselect 32 | scenario: 320streams 33 | net: TH,GR 34 | sta: "*" 35 | loc: "*" 36 | cha: H* 37 | start: "2025-06-01T00:01:00" 38 | end: "2025-06-01T00:02:00" 39 | - webservice: dataselect 40 | scenario: 1stream15days 41 | net: GR 42 | sta: BFO 43 | loc: "*" 44 | cha: HHZ 45 | start: "2025-06-01T00:00:00" 46 | end: "2025-06-16T00:00:00" 47 | - webservice: wfcatalog 48 | scenario: 9streams 49 | net: GR 50 | sta: ASSE,BFO,MOX 51 | loc: "*" 52 | cha: HH* 53 | start: "2017-07-01T00:01:00" 54 | end: "2017-07-01T00:02:00" 55 | - webservice: wfcatalog 56 | scenario: 54streams 57 | net: SX,TH 58 | sta: "*" 59 | loc: "*" 60 | cha: HHZ 61 | start: "2017-01-01T00:01:00" 62 | end: "2017-01-01T00:02:00" 63 | - webservice: wfcatalog 64 | scenario: 320streams 65 | net: TH,GR 66 | sta: "*" 67 | loc: "*" 68 | cha: H* 69 | start: "2025-06-01T00:01:00" 70 | end: "2025-06-01T00:02:00" 71 | - webservice: wfcatalog 72 | scenario: 1stream15days 73 | net: GR 74 | sta: BFO 75 | loc: "*" 76 | cha: HHZ 77 | start: "2025-06-01T00:00:00" 78 | end: "2025-06-16T00:00:00" 79 | - webservice: availability 80 | scenario: 9streams 81 | net: GR 82 | sta: ASSE,BFO,MOX 83 | loc: "*" 84 | cha: HH* 85 | start: "2017-07-01T00:01:00" 86 | end: "2017-07-01T23:59:00" 87 | - webservice: availability 88 | scenario: 54streams 89 | net: SX,TH 90 | sta: "*" 91 | loc: "*" 92 | cha: HHZ 93 | start: "2017-01-01T00:01:00" 94 | end: "2017-01-01T00:02:00" 95 | - webservice: availability 96 | scenario: 320streams 97 | net: TH,GR 98 | sta: "*" 99 | loc: "*" 100 | cha: H* 101 | start: "2025-06-01T00:01:00" 102 | end: "2025-06-01T00:02:00" 103 | - webservice: availability 104 | scenario: 1stream15days 105 | net: GR 106 | sta: BFO 107 | loc: "*" 108 | cha: HHZ 109 | start: "2025-06-01T00:00:00" 110 | end: "2025-06-16T00:00:00" 111 | - webservice: station 112 | scenario: 9streams 113 | net: GR 114 | sta: ASSE,BFO,MOX 115 | loc: "*" 116 | cha: HH* 117 | start: "2017-07-01T00:01:00" 118 | end: "2017-07-01T00:02:00" 119 | - webservice: station 120 | scenario: 54streams 121 | net: SX,TH 122 | sta: "*" 123 | loc: "*" 124 | cha: HHZ 125 | start: "2017-01-01T00:01:00" 126 | end: "2017-01-01T00:02:00" 127 | - webservice: station 128 | scenario: 320streams 129 | net: TH,GR 130 | sta: "*" 131 | loc: "*" 132 | cha: H* 133 | start: "2025-06-01T00:01:00" 134 | end: "2025-06-01T00:02:00" 135 | - webservice: station 136 | scenario: 1stream15days 137 | net: GR 138 | sta: BFO 139 | loc: "*" 140 | cha: HHZ 141 | start: "2025-06-01T00:00:00" 142 | end: "2025-06-16T00:00:00" 143 | -------------------------------------------------------------------------------- /eida_nodes/bgs.yaml: -------------------------------------------------------------------------------- 1 | --- 2 | node: BGS 3 | endpoint: eida.bgs.ac.uk 4 | routingFile: routing.xml 5 | onlineCheck: 6 | net: GB 7 | sta: TARL 8 | loc: "00" 9 | cha: HHZ 10 | start: 2024-11-30T00:01:00 11 | end: 2024-11-30T00:01:10 12 | requiremets: 13 | network: { biggest_network_code } 14 | perfCheck: 15 | - webservice: dataselect 16 | scenario: 9streams 17 | net: GB 18 | sta: A* 19 | loc: "*" 20 | cha: "H*" 21 | start: "2025-03-12T00:01:00" 22 | end: "2025-03-12T00:02:00" 23 | - webservice: dataselect 24 | scenario: 54streams 25 | net: GB 26 | sta: "E*,H*,W*" 27 | loc: "*" 28 | cha: "HH*" 29 | start: "2025-03-12T00:01:00" 30 | end: "2025-03-12T00:02:00" 31 | - webservice: dataselect 32 | scenario: 320streams # 255 33 | net: "*" 34 | sta: "*" 35 | loc: "*" 36 | cha: "*" 37 | start: "2025-03-12T00:01:00" 38 | end: "2025-03-12T00:02:00" 39 | - webservice: dataselect 40 | scenario: 1stream15days 41 | net: GB 42 | sta: LAWE 43 | loc: "*" 44 | cha: HHZ 45 | start: "2025-03-01T00:01:00" 46 | end: "2025-03-16T00:01:00" 47 | - webservice: wfcatalog 48 | scenario: 9streams 49 | net: GB 50 | sta: A* 51 | loc: "*" 52 | cha: "H*" 53 | start: "2025-03-12T00:01:00" 54 | end: "2025-03-12T00:02:00" 55 | - webservice: wfcatalog 56 | scenario: 54streams 57 | net: GB 58 | sta: "E*,H*,W*" 59 | loc: "*" 60 | cha: "HH*" 61 | start: "2025-03-12T00:01:00" 62 | end: "2025-03-12T00:02:00" 63 | - webservice: wfcatalog 64 | scenario: 320streams # 255 65 | net: "*" 66 | sta: "*" 67 | loc: "*" 68 | cha: "*" 69 | start: "2025-03-12T00:01:00" 70 | end: "2025-03-12T00:02:00" 71 | - webservice: wfcatalog 72 | scenario: 1stream15days 73 | net: GB 74 | sta: LAWE 75 | loc: "*" 76 | cha: HHZ 77 | start: "2025-03-01T00:01:00" 78 | end: "2025-03-16T00:01:00" 79 | - webservice: availability 80 | scenario: 9streams 81 | net: GB 82 | sta: A* 83 | loc: "*" 84 | cha: "H*" 85 | start: "2025-03-12T00:01:00" 86 | end: "2025-03-12T00:02:00" 87 | - webservice: availability 88 | scenario: 54streams 89 | net: GB 90 | sta: "E*,H*,W*" 91 | loc: "*" 92 | cha: "HH*" 93 | start: "2025-03-12T00:01:00" 94 | end: "2025-03-12T00:02:00" 95 | - webservice: availability 96 | scenario: 320streams # 255 97 | net: "*" 98 | sta: "*" 99 | loc: "*" 100 | cha: "*" 101 | start: "2025-03-12T00:01:00" 102 | end: "2025-03-12T00:02:00" 103 | - webservice: availability 104 | scenario: 1stream15days 105 | net: GB 106 | sta: LAWE 107 | loc: "*" 108 | cha: HHZ 109 | start: "2025-03-01T00:01:00" 110 | end: "2025-03-16T00:01:00" 111 | - webservice: station 112 | scenario: 9streams 113 | net: GB 114 | sta: A* 115 | loc: "*" 116 | cha: "H*" 117 | start: "2025-03-12T00:01:00" 118 | end: "2025-03-12T00:02:00" 119 | - webservice: station 120 | scenario: 54streams 121 | net: GB 122 | sta: "E*,H*,W*" 123 | loc: "*" 124 | cha: "HH*" 125 | start: "2025-03-12T00:01:00" 126 | end: "2025-03-12T00:02:00" 127 | - webservice: station 128 | scenario: 320streams # 255 129 | net: "*" 130 | sta: "*" 131 | loc: "*" 132 | cha: "*" 133 | start: "2025-03-12T00:01:00" 134 | end: "2025-03-12T00:02:00" 135 | - webservice: station 136 | scenario: 1stream15days 137 | net: GB 138 | sta: LAWE 139 | loc: "*" 140 | cha: HHZ 141 | start: "2025-03-01T00:01:00" 142 | end: "2025-03-16T00:01:00" 143 | -------------------------------------------------------------------------------- /eida_nodes/noa.yaml: -------------------------------------------------------------------------------- 1 | --- 2 | node: NOA 3 | endpoint: eida.gein.noa.gr 4 | routingFile: routing/routing.xml 5 | onlineCheck: 6 | net: HL 7 | sta: ZKR 8 | loc: -- 9 | cha: HHZ 10 | start: "2025-08-16T00:00:00" 11 | end: "2025-08-16T00:00:05" 12 | requiremets: 13 | network: { biggest_network_code } 14 | perfCheck: 15 | - webservice: dataselect 16 | scenario: 9streams 17 | net: HL 18 | sta: "Y*,Z*" 19 | loc: "*" 20 | cha: "*" 21 | start: "2025-12-01T00:01:00" 22 | end: "2025-12-01T00:02:00" 23 | - webservice: dataselect 24 | scenario: 54streams 25 | net: HL 26 | sta: "*" 27 | loc: "*" 28 | cha: HNZ 29 | start: "2025-12-01T00:01:00" 30 | end: "2025-12-01T00:02:00" 31 | - webservice: dataselect 32 | scenario: 320streams 33 | net: HL,HI,CQ 34 | sta: "*" 35 | loc: "*" 36 | cha: "H*E,H*Z" 37 | start: "2025-12-01T00:01:00" 38 | end: "2025-12-01T00:02:00" 39 | - webservice: dataselect 40 | scenario: 1stream15days 41 | net: HL 42 | sta: IDI 43 | loc: "*" 44 | cha: HHZ 45 | start: "2012-06-04T00:00:00" 46 | end: "2012-06-19T00:00:00" 47 | - webservice: availability 48 | scenario: 9streams 49 | net: HL 50 | sta: "Y*,Z*" 51 | loc: "*" 52 | cha: "*" 53 | start: "2025-12-01T00:01:00" 54 | end: "2025-12-01T00:02:00" 55 | - webservice: availability 56 | scenario: 54streams 57 | net: HL 58 | sta: "*" 59 | loc: "*" 60 | cha: HNZ 61 | start: "2025-12-01T00:01:00" 62 | end: "2025-12-01T00:02:00" 63 | - webservice: availability 64 | scenario: 320streams 65 | net: HL,HI,CQ 66 | sta: "*" 67 | loc: "*" 68 | cha: "H*E,H*Z" 69 | start: "2025-12-01T00:01:00" 70 | end: "2025-12-01T00:02:00" 71 | - webservice: availability 72 | scenario: 1stream15days 73 | net: HL 74 | sta: IDI 75 | loc: "*" 76 | cha: HHZ 77 | start: "2012-06-04T00:00:00" 78 | end: "2012-06-19T00:00:00" 79 | - webservice: wfcatalog 80 | scenario: 9streams 81 | net: HL 82 | sta: "Y*,Z*" 83 | loc: "*" 84 | cha: "*" 85 | start: "2025-12-01T00:01:00" 86 | end: "2025-12-01T00:02:00" 87 | - webservice: wfcatalog 88 | scenario: 54streams 89 | net: HL 90 | sta: "*" 91 | loc: "*" 92 | cha: HNZ 93 | start: "2025-12-01T00:01:00" 94 | end: "2025-12-01T00:02:00" 95 | - webservice: wfcatalog 96 | scenario: 320streams 97 | net: HL,HI,CQ 98 | sta: "*" 99 | loc: "*" 100 | cha: "H*E,H*Z" 101 | start: "2025-12-01T00:01:00" 102 | end: "2025-12-01T00:02:00" 103 | - webservice: wfcatalog 104 | scenario: 1stream15days 105 | net: HL 106 | sta: IDI 107 | loc: "*" 108 | cha: HHZ 109 | start: "2012-06-04T00:00:00" 110 | end: "2012-06-19T00:00:00" 111 | - webservice: station 112 | scenario: 9streams 113 | net: HL 114 | sta: "Y*,Z*" 115 | loc: "*" 116 | cha: "*" 117 | start: "2025-12-01T00:01:00" 118 | end: "2025-12-01T00:02:00" 119 | - webservice: station 120 | scenario: 54streams 121 | net: HL 122 | sta: "*" 123 | loc: "*" 124 | cha: HNZ 125 | start: "2025-12-01T00:01:00" 126 | end: "2025-12-01T00:02:00" 127 | - webservice: station 128 | scenario: 320streams 129 | net: HL,HI,CQ 130 | sta: "*" 131 | loc: "*" 132 | cha: "H*E,H*Z" 133 | start: "2025-12-01T00:01:00" 134 | end: "2025-12-01T00:02:00" 135 | - webservice: station 136 | scenario: 1stream15days 137 | net: HL 138 | sta: IDI 139 | loc: "*" 140 | cha: HHZ 141 | start: "2012-06-04T00:00:00" 142 | end: "2012-06-19T00:00:00" 143 | -------------------------------------------------------------------------------- /eida_nodes/eposfr.yaml: -------------------------------------------------------------------------------- 1 | --- 2 | node: EPOSFR 3 | endpoint: ws.resif.fr 4 | routingFile: routing/eida/routing.xml 5 | onlineCheck: 6 | net: FR 7 | sta: CIEL 8 | loc: "00" 9 | cha: HHZ 10 | start: 2025-02-01T00:00:00 11 | end: 2025-02-01T00:00:05 12 | requiremets: 13 | network: FR 14 | perfCheck: 15 | - webservice: dataselect 16 | scenario: 9streams 17 | net: FR 18 | sta: ATE,BANN,BLAF 19 | loc: "*" 20 | cha: HH? 21 | start: "2025-12-01T00:01:00" 22 | end: "2025-12-01T00:02:00" 23 | - webservice: dataselect 24 | scenario: 54streams 25 | net: FR 26 | sta: "B*,D*" 27 | loc: "*" 28 | cha: "H*" 29 | start: "2025-12-01T00:01:00" 30 | end: "2025-12-01T00:02:00" 31 | - webservice: dataselect 32 | scenario: 320streams 33 | net: FR,RA 34 | sta: "*" 35 | loc: "*" 36 | cha: "H?Z" 37 | start: "2025-12-01T00:01:00" 38 | end: "2025-12-01T00:02:00" 39 | - webservice: dataselect 40 | scenario: 1stream15days 41 | net: FR 42 | sta: CIEL 43 | loc: "00" 44 | cha: HHZ 45 | start: "2025-11-01T00:00:00" 46 | end: "2025-11-16T00:00:00" 47 | - webservice: wfcatalog 48 | scenario: 9streams 49 | net: FR 50 | sta: ATE,BANN,BLAF 51 | loc: "*" 52 | cha: HH? 53 | start: "2025-12-01T00:01:00" 54 | end: "2025-12-01T00:02:00" 55 | - webservice: wfcatalog 56 | scenario: 54streams 57 | net: FR 58 | sta: "B*,D*" 59 | loc: "*" 60 | cha: "HH*" 61 | start: "2025-12-01T00:01:00" 62 | end: "2025-12-01T00:02:00" 63 | - webservice: wfcatalog 64 | scenario: 320streams 65 | net: FR,RA 66 | sta: "*" 67 | loc: "*" 68 | cha: "H?Z" 69 | start: "2025-12-01T00:01:00" 70 | end: "2025-12-01T00:02:00" 71 | - webservice: wfcatalog 72 | scenario: 1stream15days 73 | net: FR 74 | sta: CIEL 75 | loc: "00" 76 | cha: HHZ 77 | start: "2025-11-01T00:00:00" 78 | end: "2025-11-16T00:00:00" 79 | - webservice: availability 80 | scenario: 9streams 81 | net: FR 82 | sta: ATE,BANN,BLAF 83 | loc: "*" 84 | cha: HH? 85 | start: "2025-12-01T00:01:00" 86 | end: "2025-12-01T00:02:00" 87 | - webservice: availability 88 | scenario: 54streams 89 | net: FR 90 | sta: "B*,D*" 91 | loc: "*" 92 | cha: "HH*" 93 | start: "2025-12-01T00:01:00" 94 | end: "2025-12-01T00:02:00" 95 | - webservice: availability 96 | scenario: 320streams 97 | net: FR,RA 98 | sta: "*" 99 | loc: "*" 100 | cha: "H?Z" 101 | start: "2025-12-01T00:01:00" 102 | end: "2025-12-01T00:02:00" 103 | - webservice: availability 104 | scenario: 1stream15days 105 | net: FR 106 | sta: CIEL 107 | loc: "00" 108 | cha: HHZ 109 | start: "2025-11-01T00:00:00" 110 | end: "2025-11-16T00:00:00" 111 | - webservice: station 112 | scenario: 9streams 113 | net: FR 114 | sta: ATE,BANN,BLAF 115 | loc: "*" 116 | cha: HH? 117 | start: "2025-12-01T00:01:00" 118 | end: "2025-12-01T00:02:00" 119 | - webservice: station 120 | scenario: 54streams 121 | net: FR 122 | sta: "B*,D*" 123 | loc: "*" 124 | cha: "HH*" 125 | start: "2025-12-01T00:01:00" 126 | end: "2025-12-01T00:02:00" 127 | - webservice: station 128 | scenario: 320streams 129 | net: FR,RA 130 | sta: "*" 131 | loc: "*" 132 | cha: "H?Z" 133 | start: "2025-12-01T00:01:00" 134 | end: "2025-12-01T00:02:00" 135 | - webservice: station 136 | scenario: 1stream15days 137 | net: FR 138 | sta: CIEL 139 | loc: "00" 140 | cha: HHZ 141 | start: "2025-11-01T00:00:00" 142 | end: "2025-11-16T00:00:00" 143 | -------------------------------------------------------------------------------- /eida_nodes/odc.yaml: -------------------------------------------------------------------------------- 1 | --- 2 | node: ODC 3 | endpoint: orfeus-eu.org 4 | routingFile: "eidaws/routing/1/query?service=dataselect&net=NL" 5 | onlineCheck: 6 | net: NL 7 | sta: BHAR 8 | loc: -- 9 | cha: HGZ 10 | start: 2017-06-04T00:00:00 11 | end: 2017-06-04T00:00:05 12 | requiremets: 13 | network: { biggest_network_code } 14 | perfCheck: 15 | - webservice: dataselect 16 | scenario: 9streams 17 | net: NL 18 | sta: BHAR,BING,BSTD 19 | loc: "*" 20 | cha: "*" 21 | start: "2017-06-04T00:01:00" 22 | end: "2017-06-04T00:02:00" 23 | - webservice: dataselect 24 | scenario: 54streams 25 | net: NL 26 | sta: "Z*,N*" 27 | loc: "*" 28 | cha: HNZ 29 | start: "2025-12-01T00:01:00" 30 | end: "2025-12-01T00:02:00" 31 | - webservice: dataselect 32 | scenario: 320streams #294 33 | net: BE,CR 34 | sta: "*" 35 | loc: "*" 36 | cha: "*" 37 | start: "2025-12-01T00:01:00" 38 | end: "2025-12-01T00:02:00" 39 | - webservice: dataselect 40 | scenario: 1stream15days 41 | net: "NL" 42 | sta: "BHAR" 43 | loc: "*" 44 | cha: "HNZ" 45 | start: "2025-10-01" 46 | end: "2025-10-16" 47 | 48 | - webservice: wfcatalog 49 | scenario: 9streams 50 | net: NL 51 | sta: BHAR,BING,BSTD 52 | loc: "*" 53 | cha: "*" 54 | start: "2017-06-04T00:01:00" 55 | end: "2017-06-04T00:02:00" 56 | - webservice: wfcatalog 57 | scenario: 54streams 58 | net: NL 59 | sta: "Z*,N*" 60 | loc: "*" 61 | cha: HNZ 62 | start: "2025-12-01T00:01:00" 63 | end: "2025-12-01T00:02:00" 64 | - webservice: wfcatalog 65 | scenario: 320streams #294 66 | net: BE,CR 67 | sta: "*" 68 | loc: "*" 69 | cha: "*" 70 | start: "2025-12-01T00:01:00" 71 | end: "2025-12-01T00:02:00" 72 | - webservice: wfcatalog 73 | scenario: 1stream15days 74 | net: "NL" 75 | sta: "BHAR" 76 | loc: "*" 77 | cha: "HNZ" 78 | start: "2025-10-01" 79 | end: "2025-10-16" 80 | 81 | - webservice: station 82 | scenario: 9streams 83 | net: NL 84 | sta: BHAR,BING,BSTD 85 | loc: "*" 86 | cha: "*" 87 | start: "2017-06-04T00:01:00" 88 | end: "2017-06-04T00:02:00" 89 | - webservice: station 90 | scenario: 54streams 91 | net: NL 92 | sta: "Z*,N*" 93 | loc: "*" 94 | cha: HNZ 95 | start: "2025-12-01T00:01:00" 96 | end: "2025-12-01T00:02:00" 97 | - webservice: station 98 | scenario: 320streams #294 99 | net: BE,CR 100 | sta: "*" 101 | loc: "*" 102 | cha: "*" 103 | start: "2025-12-01T00:01:00" 104 | end: "2025-12-01T00:02:00" 105 | - webservice: station 106 | scenario: 1stream15days 107 | net: "NL" 108 | sta: "BHAR" 109 | loc: "*" 110 | cha: "HNZ" 111 | start: "2025-10-01" 112 | end: "2025-10-16" 113 | 114 | - webservice: availability 115 | scenario: 9streams 116 | net: NL 117 | sta: BHAR,BING,BSTD 118 | loc: "*" 119 | cha: "*" 120 | start: "2017-06-04T00:01:00" 121 | end: "2017-06-04T00:02:00" 122 | - webservice: availability 123 | scenario: 54streams 124 | net: NL 125 | sta: "Z*,N*" 126 | loc: "*" 127 | cha: HNZ 128 | start: "2025-12-01T00:01:00" 129 | end: "2025-12-01T00:02:00" 130 | - webservice: availability 131 | scenario: 320streams #294 132 | net: BE,CR 133 | sta: "*" 134 | loc: "*" 135 | cha: "*" 136 | start: "2025-12-01T00:01:00" 137 | end: "2025-12-01T00:02:00" 138 | - webservice: availability 139 | scenario: 1stream15days 140 | net: "NL" 141 | sta: "BHAR" 142 | loc: "*" 143 | cha: "HNZ" 144 | start: "2025-10-01" 145 | end: "2025-10-16" 146 | -------------------------------------------------------------------------------- /eida_nodes/eth.yaml: -------------------------------------------------------------------------------- 1 | --- 2 | node: ETH 3 | endpoint: eida.ethz.ch 4 | routingFile: eidaws/routing/routing.xml 5 | onlineCheck: 6 | net: CH 7 | sta: SVIK 8 | loc: BT 9 | cha: HGZ 10 | start: 2025-05-14T00:01:00 11 | end: 2025-05-14T00:01:10 12 | requiremets: 13 | network: { biggest_network_code } 14 | perfCheck: 15 | - webservice: dataselect 16 | scenario: 9streams 17 | net: CH 18 | sta: MUO,LLS,DAVOX 19 | loc: "*" 20 | cha: HH? 21 | start: "2025-06-08T00:01:00" 22 | end: "2025-06-08T00:02:00" 23 | - webservice: dataselect 24 | scenario: 54streams 25 | net: CH 26 | sta: "B*,CHASS" 27 | loc: "*" 28 | cha: "H*" 29 | start: "2025-06-08T00:01:00" 30 | end: "2025-06-08T00:02:00" 31 | - webservice: dataselect 32 | scenario: 320streams 33 | net: CH 34 | sta: "S*,C*,D*" 35 | loc: "*" 36 | cha: "H*Z,H*N" 37 | start: "2025-06-08T00:01:00" 38 | end: "2025-06-08T00:02:00" 39 | - webservice: dataselect 40 | scenario: 1stream15days 41 | net: CH 42 | sta: LLS 43 | loc: "*" 44 | cha: HHZ 45 | start: "2025-06-08T00:00:00" 46 | end: "2025-06-23T00:00:00" 47 | - webservice: wfcatalog 48 | scenario: 9streams 49 | net: CH 50 | sta: MUO,LLS,DAVOX 51 | loc: "*" 52 | cha: HH? 53 | start: "2025-06-08T00:01:00" 54 | end: "2025-06-08T00:02:00" 55 | - webservice: wfcatalog 56 | scenario: 54streams 57 | net: CH 58 | sta: "B*,CHASS" 59 | loc: "*" 60 | cha: "H*" 61 | start: "2025-06-08T00:01:00" 62 | end: "2025-06-08T00:02:00" 63 | - webservice: wfcatalog 64 | scenario: 320streams 65 | net: CH 66 | sta: "S*,C*,D*" 67 | loc: "*" 68 | cha: "H*Z,H*N" 69 | start: "2025-06-08T00:01:00" 70 | end: "2025-06-08T00:02:00" 71 | - webservice: wfcatalog 72 | scenario: 1stream15days 73 | net: CH 74 | sta: LLS 75 | loc: "*" 76 | cha: HHZ 77 | start: "2025-06-08T00:00:00" 78 | end: "2025-06-23T00:00:00" 79 | - webservice: availability 80 | scenario: 9streams 81 | net: CH 82 | sta: MUO,LLS,DAVOX 83 | loc: "*" 84 | cha: HH? 85 | start: "2025-06-08T00:01:00" 86 | end: "2025-06-08T00:02:00" 87 | - webservice: availability 88 | scenario: 54streams 89 | net: CH 90 | sta: "B*,CHASS" 91 | loc: "*" 92 | cha: "H*" 93 | start: "2025-06-08T00:01:00" 94 | end: "2025-06-08T00:02:00" 95 | - webservice: availability 96 | scenario: 320streams 97 | net: CH 98 | sta: "S*,C*,D*" 99 | loc: "*" 100 | cha: "H*Z,H*N" 101 | start: "2025-06-08T00:01:00" 102 | end: "2025-06-08T00:02:00" 103 | - webservice: availability 104 | scenario: 1stream15days 105 | net: CH 106 | sta: LLS 107 | loc: "*" 108 | cha: HHZ 109 | start: "2025-06-08T00:00:00" 110 | end: "2025-06-23T00:00:00" 111 | - webservice: station 112 | scenario: 9streams 113 | net: CH 114 | sta: MUO,LLS,DAVOX 115 | loc: "*" 116 | cha: HH? 117 | start: "2025-06-08T00:01:00" 118 | end: "2025-06-08T00:02:00" 119 | - webservice: station 120 | scenario: 54streams 121 | net: CH 122 | sta: "B*,CHASS" 123 | loc: "*" 124 | cha: "H*" 125 | start: "2025-06-08T00:01:00" 126 | end: "2025-06-08T00:02:00" 127 | - webservice: station 128 | scenario: 320streams 129 | net: CH 130 | sta: "S*,C*,D*" 131 | loc: "*" 132 | cha: "H*Z,H*N" 133 | start: "2025-06-08T00:01:00" 134 | end: "2025-06-08T00:02:00" 135 | - webservice: station 136 | scenario: 1stream15days 137 | net: CH 138 | sta: LLS 139 | loc: "*" 140 | cha: HHZ 141 | start: "2025-06-08T00:00:00" 142 | end: "2025-06-23T00:00:00" 143 | -------------------------------------------------------------------------------- /eida_nodes/uib-norsar.yaml: -------------------------------------------------------------------------------- 1 | --- 2 | node: UIB-NORSAR 3 | endpoint: eida.geo.uib.no 4 | routingFile: eidaws/routing.xml 5 | onlineCheck: 6 | net: NS 7 | sta: BER 8 | loc: "00" 9 | cha: HHZ 10 | start: 2019-01-15T00:01:50 11 | end: 2019-01-15T00:02:00 12 | requiremets: 13 | network: { biggest_network_code } 14 | perfCheck: 15 | - webservice: dataselect 16 | scenario: 9streams 17 | net: NS 18 | sta: B* 19 | loc: "*" 20 | cha: "*" 21 | start: "2025-12-01T00:01:00" 22 | end: "2025-12-01T00:02:00" 23 | - webservice: dataselect 24 | scenario: 54streams # 44 streams 25 | net: NS 26 | sta: "*" 27 | loc: "*" 28 | cha: "H*Z" 29 | start: "2025-12-01T00:01:00" 30 | end: "2025-12-01T00:02:00" 31 | - webservice: dataselect 32 | scenario: 320streams 33 | net: "*" 34 | sta: "*" 35 | loc: "*" 36 | cha: "H*" 37 | start: "2025-12-01T00:01:00" 38 | end: "2025-12-01T00:02:00" 39 | - webservice: dataselect 40 | scenario: 1stream15days 41 | net: NS 42 | sta: FAUS 43 | loc: "*" 44 | cha: HHZ 45 | start: "2018-06-07T00:01:00" 46 | end: "2018-06-22T00:01:00" 47 | 48 | - webservice: availability 49 | scenario: 9streams 50 | net: NS 51 | sta: B* 52 | loc: "*" 53 | cha: "*" 54 | start: "2025-12-01T00:01:00" 55 | end: "2025-12-01T00:02:00" 56 | - webservice: availability 57 | scenario: 54streams # 44 streams 58 | net: NS 59 | sta: "*" 60 | loc: "*" 61 | cha: "H*Z" 62 | start: "2025-12-01T00:01:00" 63 | end: "2025-12-01T00:02:00" 64 | - webservice: availability 65 | scenario: 320streams 66 | net: "*" 67 | sta: "*" 68 | loc: "*" 69 | cha: "H*" 70 | start: "2025-12-01T00:01:00" 71 | end: "2025-12-01T00:02:00" 72 | - webservice: availability 73 | scenario: 1stream15days 74 | net: NS 75 | sta: FAUS 76 | loc: "*" 77 | cha: HHZ 78 | start: "2018-06-07T00:01:00" 79 | end: "2018-06-22T00:01:00" 80 | 81 | - webservice: wfcatalog 82 | scenario: 9streams 83 | net: NS 84 | sta: B* 85 | loc: "*" 86 | cha: "*" 87 | start: "2025-12-01T00:01:00" 88 | end: "2025-12-01T00:02:00" 89 | - webservice: wfcatalog 90 | scenario: 54streams # 44 streams 91 | net: NS 92 | sta: "*" 93 | loc: "*" 94 | cha: "H*Z" 95 | start: "2025-12-01T00:01:00" 96 | end: "2025-12-01T00:02:00" 97 | - webservice: wfcatalog 98 | scenario: 320streams # 196 streams 99 | net: "*" 100 | sta: "*" 101 | loc: "*" 102 | cha: "H*" 103 | start: "2025-12-01T00:01:00" 104 | end: "2025-12-01T00:02:00" 105 | - webservice: wfcatalog 106 | scenario: 1stream15days 107 | net: NS 108 | sta: FAUS 109 | loc: "*" 110 | cha: HHZ 111 | start: "2018-06-07T00:01:00" 112 | end: "2018-06-22T00:01:00" 113 | 114 | - webservice: station 115 | scenario: 9streams 116 | net: NS 117 | sta: B* 118 | loc: "*" 119 | cha: "*" 120 | start: "2025-12-01T00:01:00" 121 | end: "2025-12-01T00:02:00" 122 | - webservice: station 123 | scenario: 54streams # 44 streams 124 | net: NS 125 | sta: "*" 126 | loc: "*" 127 | cha: "H*Z" 128 | start: "2025-12-01T00:01:00" 129 | end: "2025-12-01T00:02:00" 130 | - webservice: station 131 | scenario: 320streams # 196 streams 132 | net: "*" 133 | sta: "*" 134 | loc: "*" 135 | cha: "H*" 136 | start: "2025-12-01T00:01:00" 137 | end: "2025-12-01T00:02:00" 138 | - webservice: station 139 | scenario: 1stream15days 140 | net: NS 141 | sta: FAUS 142 | loc: "*" 143 | cha: HHZ 144 | start: "2018-06-07T00:01:00" 145 | end: "2018-06-22T00:01:00" 146 | -------------------------------------------------------------------------------- /eida_nodes/geofon.yaml: -------------------------------------------------------------------------------- 1 | --- 2 | node: GEOFON 3 | endpoint: geofon.gfz.de 4 | routingFile: "eidaws/routing/1/query?service=dataselect&network=GE" 5 | onlineCheck: 6 | net: GE 7 | sta: KBS 8 | loc: "00" 9 | cha: HHZ 10 | start: 2025-01-23T00:01:00 11 | end: 2025-01-23T00:01:10 12 | requiremets: 13 | network: { biggest_network_code } 14 | perfCheck: 15 | - webservice: dataselect 16 | scenario: 9streams 17 | net: GE 18 | sta: ZKR 19 | loc: "*" 20 | cha: "B*,H*,L*" 21 | start: "2025-04-04T00:01:00" 22 | end: "2025-04-04T00:02:00" 23 | - webservice: dataselect 24 | scenario: 54streams # Actually 47 25 | net: GE 26 | sta: "*" 27 | loc: "*" 28 | cha: SHZ 29 | start: "2025-04-05T00:01:00" 30 | end: "2025-04-05T00:02:00" 31 | - webservice: dataselect 32 | scenario: 320streams # Actually 324 33 | net: GE 34 | sta: "*" 35 | loc: "*" 36 | cha: "B*,*1,*2" 37 | start: "2025-06-03T00:01:00" 38 | end: "2025-06-03T00:02:00" 39 | - webservice: dataselect 40 | scenario: 1stream15days 41 | net: CZ 42 | sta: VRAC 43 | loc: -- 44 | cha: HHZ 45 | start: "2025-06-08T00:00:00" 46 | end: "2025-06-23T00:00:00" 47 | - webservice: wfcatalog 48 | scenario: 9streams 49 | net: CX 50 | sta: PB06,PB07,PB08 51 | loc: "*" 52 | cha: HH? 53 | start: "2015-04-04T00:01:00" 54 | end: "2015-04-04T00:02:00" 55 | - webservice: wfcatalog 56 | scenario: 54streams 57 | net: CX,CZ,DK 58 | sta: "*" 59 | loc: "*" 60 | cha: HHE 61 | start: "2012-04-05T00:01:00" 62 | end: "2012-04-05T00:02:00" 63 | - webservice: wfcatalog 64 | scenario: 320streams 65 | net: GE 66 | sta: "*" 67 | loc: "*" 68 | cha: HH*,SH* 69 | start: "2011-06-03T00:01:00" 70 | end: "2011-06-03T00:02:00" 71 | - webservice: wfcatalog 72 | scenario: 1stream15days 73 | net: CZ 74 | sta: VRAC 75 | loc: -- 76 | cha: HH? 77 | start: "2012-06-08T00:00:00" 78 | end: "2012-06-23T00:00:00" 79 | - webservice: availability 80 | scenario: 9streams 81 | net: CX 82 | sta: PB06,PB07,PB08 83 | loc: "*" 84 | cha: HH? 85 | start: "2015-04-04T00:01:00" 86 | end: "2015-04-04T23:59:00" 87 | - webservice: availability 88 | scenario: 54streams 89 | net: CX,CZ,DK 90 | sta: "*" 91 | loc: "*" 92 | cha: HHE 93 | start: "2012-04-05T00:01:00" 94 | end: "2012-04-05T23:59:00" 95 | - webservice: availability 96 | scenario: 320streams 97 | net: GE 98 | sta: "*" 99 | loc: "*" 100 | cha: HH*,SH* 101 | start: "2011-06-03T00:01:00" 102 | end: "2011-06-03T23:59:00" 103 | - webservice: availability 104 | scenario: 1stream15days 105 | net: GE 106 | sta: IBBN 107 | loc: "*" 108 | cha: HH? 109 | start: "2022-06-08T00:00:00" 110 | end: "2022-06-23T00:00:00" 111 | - webservice: station 112 | scenario: 9streams 113 | net: CX 114 | sta: PB06,PB07,PB08 115 | loc: "*" 116 | cha: HH? 117 | start: "2015-04-04T23:01:00" 118 | end: "2015-04-04T23:02:00" 119 | - webservice: station 120 | scenario: 54streams 121 | net: CX,CZ,DK 122 | sta: "*" 123 | loc: "*" 124 | cha: HHE 125 | start: "2012-04-05T23:01:00" 126 | end: "2012-04-05T23:02:00" 127 | - webservice: station 128 | scenario: 320streams 129 | net: GE 130 | sta: "*" 131 | loc: "*" 132 | cha: HH*,SH* 133 | start: "2011-06-03T00:01:00" 134 | end: "2011-06-03T23:59:00" 135 | - webservice: station 136 | scenario: 1stream15days 137 | net: GE 138 | sta: IBBN 139 | loc: "*" 140 | cha: HH? 141 | start: "2022-06-08T00:00:00" 142 | end: "2022-06-23T00:00:00" 143 | -------------------------------------------------------------------------------- /eida_nodes/niep.yaml: -------------------------------------------------------------------------------- 1 | --- 2 | node: NIEP 3 | endpoint: eida-sc3.infp.ro 4 | routingFile: eidaws/routing/1/routing.xml 5 | onlineCheck: 6 | net: RO 7 | sta: LEOM 8 | loc: -- 9 | cha: HHZ 10 | start: 2023-01-01T00:00:00 11 | end: 2023-01-01T00:00:05 12 | requiremets: 13 | network: { biggest_network_code } 14 | perfCheck: 15 | - webservice: dataselect 16 | scenario: 9streams 17 | net: RO 18 | sta: PLOR1,PLOR2,PLOR3 19 | loc: "*" 20 | cha: HH? 21 | start: "2017-05-20T00:01:00" 22 | end: "2017-05-20T00:02:00" 23 | - webservice: dataselect 24 | scenario: 54streams # 62 streams 25 | net: RO 26 | sta: "*" 27 | loc: "*" 28 | cha: HHE,HHZ 29 | start: "2021-12-01T00:01:00" 30 | end: "2021-12-01T00:02:00" 31 | - webservice: dataselect 32 | scenario: 320streams 33 | net: "*" 34 | sta: "Z*,V*,T*,S*,P*,O*,M*" 35 | loc: "*" 36 | cha: "*" 37 | start: "2025-12-01T00:01:00" 38 | end: "2025-12-01T00:02:00" 39 | - webservice: dataselect 40 | scenario: 1stream15days 41 | net: RO 42 | sta: GZR 43 | loc: "*" 44 | cha: HHZ 45 | start: "2012-06-04T00:00:00" 46 | end: "2012-06-19T00:00:00" 47 | - webservice: station 48 | scenario: 9streams 49 | net: RO 50 | sta: PLOR1,PLOR2,PLOR3 51 | loc: "*" 52 | cha: HH? 53 | start: "2017-05-20T00:01:00" 54 | end: "2017-05-20T00:02:00" 55 | - webservice: station 56 | scenario: 54streams # 62 streams 57 | net: RO 58 | sta: "*" 59 | loc: "*" 60 | cha: HHE,HHZ 61 | start: "2021-12-01T00:01:00" 62 | end: "2021-12-01T00:02:00" 63 | - webservice: station 64 | scenario: 320streams 65 | net: "*" 66 | sta: "Z*,V*,T*,S*,P*,O*,M*" 67 | loc: "*" 68 | cha: "*" 69 | start: "2025-12-01T00:01:00" 70 | end: "2025-12-01T00:02:00" 71 | - webservice: station 72 | scenario: 1stream15days 73 | net: RO 74 | sta: GZR 75 | loc: "*" 76 | cha: HHZ 77 | start: "2012-06-04T00:00:00" 78 | end: "2012-06-19T00:00:00" 79 | - webservice: availability 80 | scenario: 9streams 81 | net: RO 82 | sta: PLOR1,PLOR2,PLOR3 83 | loc: "*" 84 | cha: HH? 85 | start: "2017-05-20T00:01:00" 86 | end: "2017-05-20T00:02:00" 87 | - webservice: availability 88 | scenario: 54streams # 62 streams 89 | net: RO 90 | sta: "*" 91 | loc: "*" 92 | cha: HHE,HHZ 93 | start: "2021-12-01T00:01:00" 94 | end: "2021-12-01T00:02:00" 95 | - webservice: availability 96 | scenario: 320streams 97 | net: "*" 98 | sta: "Z*,V*,T*,S*,P*,O*,M*" 99 | loc: "*" 100 | cha: "*" 101 | start: "2025-12-01T00:01:00" 102 | end: "2025-12-01T00:02:00" 103 | - webservice: availability 104 | scenario: 1stream15days 105 | net: RO 106 | sta: GZR 107 | loc: "*" 108 | cha: HHZ 109 | start: "2012-06-04T00:00:00" 110 | end: "2012-06-19T00:00:00" 111 | - webservice: wfcatalog 112 | scenario: 9streams 113 | net: RO 114 | sta: PLOR1,PLOR2,PLOR3 115 | loc: "*" 116 | cha: HH? 117 | start: "2017-05-20T00:01:00" 118 | end: "2017-05-20T00:02:00" 119 | - webservice: wfcatalog 120 | scenario: 54streams # 62 streams 121 | net: RO 122 | sta: "*" 123 | loc: "*" 124 | cha: HHE,HHZ 125 | start: "2021-12-01T00:01:00" 126 | end: "2021-12-01T00:02:00" 127 | - webservice: wfcatalog 128 | scenario: 320streams 129 | net: "*" 130 | sta: "Z*,V*,T*,S*,P*,O*,M*" 131 | loc: "*" 132 | cha: "*" 133 | start: "2025-12-01T00:01:00" 134 | end: "2025-12-01T00:02:00" 135 | - webservice: wfcatalog 136 | scenario: 1stream15days 137 | net: RO 138 | sta: GZR 139 | loc: "*" 140 | cha: HHZ 141 | start: "2012-06-04T00:00:00" 142 | end: "2012-06-19T00:00:00" 143 | -------------------------------------------------------------------------------- /.github/workflows/docker-publish.yml: -------------------------------------------------------------------------------- 1 | name: Docker 2 | 3 | # This workflow uses actions that are not certified by GitHub. 4 | # They are provided by a third-party and are governed by 5 | # separate terms of service, privacy policy, and support 6 | # documentation. 7 | 8 | on: 9 | push: 10 | branches: ["main"] 11 | # Publish semver tags as releases. 12 | paths: ["perf_checks/*"] 13 | pull_request: 14 | branches: ["main"] 15 | 16 | env: 17 | # Use docker.io for Docker Hub if empty 18 | REGISTRY: ghcr.io 19 | # github.repository as / 20 | IMAGE_NAME: eida/oculus-perfchecks 21 | 22 | jobs: 23 | perfcheck_image_build_and_publish: 24 | runs-on: ubuntu-latest 25 | permissions: 26 | contents: read 27 | packages: write 28 | # This is used to complete the identity challenge 29 | # with sigstore/fulcio when running outside of PRs. 30 | id-token: write 31 | 32 | steps: 33 | - name: Checkout repository 34 | uses: actions/checkout@v4 35 | 36 | # Install the cosign tool except on PR 37 | # https://github.com/sigstore/cosign-installer 38 | - name: Install cosign 39 | if: github.event_name != 'pull_request' 40 | uses: sigstore/cosign-installer@59acb6260d9c0ba8f4a2f9d9b48431a222b68e20 #v3.5.0 41 | with: 42 | cosign-release: "v2.2.4" 43 | 44 | # Set up BuildKit Docker container builder to be able to build 45 | # multi-platform images and export cache 46 | # https://github.com/docker/setup-buildx-action 47 | - name: Set up Docker Buildx 48 | uses: docker/setup-buildx-action@f95db51fddba0c2d1ec667646a06c2ce06100226 # v3.0.0 49 | 50 | # Login against a Docker registry except on PR 51 | # https://github.com/docker/login-action 52 | - name: Log into registry ${{ env.REGISTRY }} 53 | if: github.event_name != 'pull_request' 54 | uses: docker/login-action@343f7c4344506bcbf9b4de18042ae17996df046d # v3.0.0 55 | with: 56 | registry: ${{ env.REGISTRY }} 57 | username: ${{ github.actor }} 58 | password: ${{ secrets.GITHUB_TOKEN }} 59 | 60 | # Extract metadata (tags, labels) for Docker 61 | # https://github.com/docker/metadata-action 62 | - name: Extract Docker metadata 63 | id: meta 64 | uses: docker/metadata-action@96383f45573cb7f253c731d3b3ab81c87ef81934 # v5.0.0 65 | with: 66 | images: ${{ env.REGISTRY }}/${{ env.IMAGE_NAME }} 67 | 68 | # Build and push Docker image with Buildx (don't push on PR) 69 | # https://github.com/docker/build-push-action 70 | - name: Build and push Docker image 71 | id: build-and-push 72 | uses: docker/build-push-action@0565240e2d4ab88bba5387d719585280857ece09 # v5.0.0 73 | with: 74 | context: perf_checks 75 | push: ${{ github.event_name != 'pull_request' }} 76 | tags: ${{ steps.meta.outputs.tags }} 77 | labels: ${{ steps.meta.outputs.labels }} 78 | cache-from: type=gha 79 | cache-to: type=gha,mode=max 80 | 81 | # Sign the resulting Docker image digest except on PRs. 82 | # This will only write to the public Rekor transparency log when the Docker 83 | # repository is public to avoid leaking data. If you would like to publish 84 | # transparency data even for private images, pass --force to cosign below. 85 | # https://github.com/sigstore/cosign 86 | - name: Sign the published Docker image 87 | if: ${{ github.event_name != 'pull_request' }} 88 | env: 89 | # https://docs.github.com/en/actions/security-guides/security-hardening-for-github-actions#using-an-intermediate-environment-variable 90 | TAGS: ${{ steps.meta.outputs.tags }} 91 | DIGEST: ${{ steps.build-and-push.outputs.digest }} 92 | # This step uses the identity token to provision an ephemeral certificate 93 | # against the sigstore community Fulcio instance. 94 | run: echo "${TAGS}" | xargs -I {} cosign sign --yes {}@${DIGEST} 95 | -------------------------------------------------------------------------------- /eida_consistency/automate_eida_consistency.py: -------------------------------------------------------------------------------- 1 | import os 2 | import subprocess 3 | import logging 4 | import json 5 | from pathlib import Path 6 | from zabbix_utils import Sender, ItemValue 7 | 8 | # config logging 9 | logging.basicConfig( 10 | level=logging.INFO, 11 | format='%(asctime)s - %(levelname)s - %(message)s', 12 | handlers=[ 13 | logging.FileHandler('automate_eida_consistency.log'), 14 | logging.StreamHandler() 15 | ] 16 | ) 17 | logger = logging.getLogger(__name__) 18 | 19 | def log_and_print(message, level=logging.INFO): 20 | """utilities for log and print""" 21 | print(message) 22 | logger.log(level, message) 23 | 24 | def run_eida_consistency(node, epochs=10, duration=600): 25 | """run eida-consistency command""" 26 | try: 27 | cmd = [ 28 | 'eida-consistency', 29 | 'consistency', 30 | '--node', node, 31 | '--epochs', str(epochs), 32 | '--duration', str(duration) 33 | ] 34 | 35 | log_and_print(f"running command {' '.join(cmd)}") 36 | 37 | result = subprocess.run( 38 | cmd, 39 | check=True, 40 | capture_output=True, 41 | text=True 42 | ) 43 | 44 | log_and_print(f"command completed successfullt") 45 | log_and_print(f"stdout: {result.stdout}") 46 | 47 | return True 48 | 49 | except subprocess.CalledProcessError as e: 50 | log_and_print(f"error running eida-consistency: {e}", logging.ERROR) 51 | log_and_print(f"stderr: {e.stderr}", logging.ERROR) 52 | return False 53 | except Exception as e: 54 | log_and_print(f"unexpected error: {e}", logging.ERROR) 55 | return False 56 | 57 | def get_latest_json_file(reports_dir='reports'): 58 | """get the latest JSON file fromreports directory""" 59 | try: 60 | reports_path = Path(reports_dir) 61 | 62 | if not reports_path.exists(): 63 | log_and_print(f"reports directory not found {reports_dir}", logging.ERROR) 64 | return None 65 | 66 | json_files = list(reports_path.glob('*.json')) 67 | 68 | if not json_files: 69 | log_and_print("no JSON files found in reports directory", logging.ERROR) 70 | return None 71 | 72 | # get the most recent file 73 | latest_file = max(json_files, key=lambda f: f.stat().st_mtime) 74 | 75 | log_and_print(f"latest JSON file: {latest_file}") 76 | return latest_file 77 | 78 | except Exception as e: 79 | log_and_print(f"error finding latest JSON file: {e}", logging.ERROR) 80 | return None 81 | 82 | def send_to_zabbix(hostname, json_file_path): 83 | """send JSON report to zbx""" 84 | try: 85 | # zbx srv config 86 | #ZABBIX_SERVER = os.getenv('ZABBIX_SERVER') 87 | ZABBIX_SERVER = 'localhost' 88 | ZABBIX_PORT = 10051 89 | 90 | if not ZABBIX_SERVER: 91 | log_and_print("ZABBIX_SERVER environement variable not set", logging.ERROR) 92 | return False 93 | 94 | # read JSON file 95 | with open(json_file_path, 'r') as f: 96 | json_content = json.load(f) 97 | 98 | # convert JSON to string for sending 99 | json_string = json.dumps(json_content) 100 | 101 | # connect to zbx srv 102 | sender = Sender(server=ZABBIX_SERVER, port=ZABBIX_PORT) 103 | 104 | log_and_print(f"sending data to zabbix fot host: {hostname}") 105 | 106 | # create item 107 | item = ItemValue(hostname, 'report.json', json_string) 108 | 109 | # send via zbx srv 110 | response = sender.send([item]) 111 | 112 | log_and_print(f"{hostname}: {response.processed}/{response.total} items sent successfully") 113 | 114 | if response.failed > 0: 115 | log_and_print(f"failed: {response.failed} items", logging.ERROR) 116 | return False 117 | else: 118 | log_and_print("all items sent successfully") 119 | return True 120 | 121 | except Exception as e: 122 | log_and_print(f"error sending to zabbix:{e}", logging.ERROR) 123 | return False 124 | 125 | def main(): 126 | node ='ICGC' 127 | epochs = 10 128 | duration = 600 129 | 130 | log_and_print(f"{'='*50}") 131 | log_and_print(f"starting EIDA consistency check for node: {node}") 132 | log_and_print(f"{'='*50}") 133 | 134 | # run eida-consistency 135 | if not run_eida_consistency(node, epochs, duration): 136 | log_and_print("eida-consistency command failed", logging.ERROR) 137 | return 138 | 139 | # get latest JSON file 140 | json_file = get_latest_json_file() 141 | 142 | if not json_file: 143 | log_and_print("no JSON file to send", logging.ERROR) 144 | return 145 | 146 | # send to zabbix 147 | hostname = node.upper() 148 | if send_to_zabbix(hostname, json_file): 149 | log_and_print(f"process completed successfully for {node}") 150 | else: 151 | log_and_print(f"failed to send report to zabbix for {node}", logging.ERROR) 152 | 153 | if __name__ == "__main__": 154 | main() -------------------------------------------------------------------------------- /scripts/yaml_to_query.py: -------------------------------------------------------------------------------- 1 | import yaml 2 | import argparse 3 | from urllib.parse import urlencode, unquote 4 | 5 | def load_eposfr_config(file_path): 6 | """load configuration from yaml file""" 7 | with open(file_path, 'r', encoding='utf-8') as file: 8 | return yaml.safe_load(file) 9 | 10 | def clean_value(value): 11 | """remove quotes from string values""" 12 | if isinstance(value, str): 13 | return value.replace('"', '').replace("'", "") 14 | return value 15 | 16 | def build_query_url(base_url, webservice, params): 17 | """build query url with parameters""" 18 | # mapping webservices to their endpoints with correct paths 19 | if webservice == 'wfcatalog': 20 | full_base_url = f"{base_url}eidaws/wfcatalog/1/query" 21 | elif webservice in ['dataselect', 'availability', 'station']: 22 | full_base_url = f"{base_url}fdsnws/{webservice}/1/query" 23 | else: 24 | # fallback for other webservices 25 | full_base_url = f"{base_url}fdsnws/{webservice}/1/query" 26 | 27 | # query parameters 28 | query_params = {} 29 | 30 | if 'net' in params: 31 | query_params['network'] = clean_value(params['net']) 32 | if 'sta' in params: 33 | query_params['station'] = clean_value(params['sta']) 34 | if 'loc' in params: 35 | query_params['location'] = clean_value(params['loc']) 36 | if 'cha' in params: 37 | query_params['channel'] = clean_value(params['cha']) 38 | if 'start' in params: 39 | query_params['starttime'] = clean_value(params['start']) 40 | if 'end' in params: 41 | query_params['endtime'] = clean_value(params['end']) 42 | 43 | # build complete url 44 | if query_params: 45 | url = f"{full_base_url}?{urlencode(query_params)}" 46 | # decode url to remove % encoding 47 | url = unquote(url) 48 | else: 49 | url = full_base_url 50 | 51 | return url 52 | 53 | def generate_urls_from_config(file_path): 54 | """generate all query urls from yaml file""" 55 | config = load_eposfr_config(file_path) 56 | 57 | base_url = f"https://{config['endpoint']}/" 58 | perf_checks = config.get('perfCheck', []) 59 | 60 | urls_by_scenario = {} 61 | 62 | for check in perf_checks: 63 | webservice = check.get('webservice') 64 | scenario = check.get('scenario') 65 | 66 | if not webservice or not scenario: 67 | continue 68 | 69 | # build url for this check 70 | url = build_query_url(base_url, webservice, check) 71 | 72 | # organize by scenario 73 | scenario_key = f"{webservice}_{scenario}" 74 | if scenario_key not in urls_by_scenario: 75 | urls_by_scenario[scenario_key] = [] 76 | 77 | urls_by_scenario[scenario_key].append({ 78 | 'url': url, 79 | 'webservice': webservice, 80 | 'scenario': scenario, 81 | 'params': {k: v for k, v in check.items() if k not in ['webservice', 'scenario']} 82 | }) 83 | 84 | return urls_by_scenario 85 | 86 | def print_urls(urls_by_scenario): 87 | """display urls organized by scenario""" 88 | for scenario_key, urls in urls_by_scenario.items(): 89 | print(f"\n=== {scenario_key} ===") 90 | for i, url_info in enumerate(urls, 1): 91 | print(f"{i}. {url_info['url']}") 92 | print(f" webservice: {url_info['webservice']}") 93 | print(f" scenario: {url_info['scenario']}") 94 | print(f" params: {url_info['params']}") 95 | print() 96 | 97 | def save_urls_only(urls_by_scenario, output_file): 98 | """save only urls to file""" 99 | with open(output_file, 'w', encoding='utf-8') as f: 100 | for scenario_key, urls in urls_by_scenario.items(): 101 | for url_info in urls: 102 | f.write(f"{url_info['url']}\n") 103 | 104 | if __name__ == "__main__": 105 | parser = argparse.ArgumentParser(description='generate query urls from yaml file') 106 | parser.add_argument('yaml_file', 107 | help='input yaml file (ex: eposfr.yaml)') 108 | parser.add_argument('--output', '-o', 109 | help='output file to save urls') 110 | parser.add_argument('--scenario', '-s', 111 | help='filter by specific scenario') 112 | 113 | args = parser.parse_args() 114 | 115 | try: 116 | # generate urls 117 | urls_by_scenario = generate_urls_from_config(args.yaml_file) 118 | 119 | # filter by scenario if specified 120 | if args.scenario: 121 | filtered_urls = {k: v for k, v in urls_by_scenario.items() 122 | if args.scenario.lower() in k.lower()} 123 | urls_by_scenario = filtered_urls 124 | 125 | # display urls (detailed) 126 | print_urls(urls_by_scenario) 127 | 128 | # save only urls if requested 129 | if args.output: 130 | save_urls_only(urls_by_scenario, args.output) 131 | print(f"\nurls saved to {args.output}") 132 | 133 | print(f"\ntotal: {sum(len(urls) for urls in urls_by_scenario.values())} urls generated") 134 | print(f"scenarios: {len(urls_by_scenario)}") 135 | 136 | except FileNotFoundError: 137 | print(f"error: file '{args.yaml_file}' not found") 138 | except yaml.YAMLError as e: 139 | print(f"error: invalid yaml format - {e}") 140 | except Exception as e: 141 | print(f"error: {e}") -------------------------------------------------------------------------------- /zabbix_server/templates/zbx_export_templates_routing.yaml: -------------------------------------------------------------------------------- 1 | zabbix_export: 2 | version: '7.0' 3 | template_groups: 4 | - uuid: 44950db14d8549c2a022ab3d5ba3ff96 5 | name: Templates/EIDA 6 | templates: 7 | - uuid: bd7e234e9843428694e3a69342571091 8 | template: 'Template routing' 9 | name: 'Template routing' 10 | groups: 11 | - name: Templates/EIDA 12 | items: 13 | - uuid: 801dc7c5ae3e446f91f9babff21c90a4 14 | name: 'Routing central global config' 15 | type: HTTP_AGENT 16 | key: routing.central 17 | delay: 1d 18 | value_type: TEXT 19 | trends: '0' 20 | preprocessing: 21 | - type: JSONPATH 22 | parameters: 23 | - '$.datacenters[?(@.name==''{$NODE}'')].name' 24 | error_handler: CUSTOM_VALUE 25 | error_handler_params: ERROR 26 | timeout: 30s 27 | url: 'https://www.orfeus-eu.org/eidaws/routing/1/globalconfig' 28 | query_fields: 29 | - name: format 30 | value: fdsn 31 | headers: 32 | - name: Accept 33 | value: application/json 34 | - name: Content-Type 35 | value: application/json 36 | - name: User-Agent 37 | value: oculus-monitor 38 | tags: 39 | - tag: format 40 | value: json 41 | - tag: routing 42 | value: central 43 | triggers: 44 | - uuid: 5a0bf9b2fcbc437eb399f0683dcfb95e 45 | expression: 'last(/Template routing/routing.central)="ERROR"' 46 | name: '{$NODE} disapeared from EIDA central routing service' 47 | priority: WARNING 48 | description: | 49 | The node is not present in EIDA central routing here: 50 | https://www.orfeus-eu.org/eidaws/routing/1/globalconfig?format=fdsn 51 | manual_close: 'YES' 52 | - uuid: ec644c6f926642f8b2d76db892f33316 53 | name: 'Routing central count datasets' 54 | type: HTTP_AGENT 55 | key: routing.central_datasets 56 | delay: 1d 57 | value_type: TEXT 58 | trends: '0' 59 | preprocessing: 60 | - type: JSONPATH 61 | parameters: 62 | - '$.datacenters[?(@.name == ''{$NODE}'')].repositories[?(@.name == ''archive'')].datasets.first().length()' 63 | error_handler: CUSTOM_VALUE 64 | error_handler_params: '0' 65 | timeout: 1m 66 | url: 'https://www.orfeus-eu.org/eidaws/routing/1/globalconfig' 67 | query_fields: 68 | - name: format 69 | value: fdsn 70 | headers: 71 | - name: Accept 72 | value: application/json 73 | - name: Content-Type 74 | value: application/json 75 | - name: User-Agent 76 | value: oculus-monitor 77 | tags: 78 | - tag: datasets 79 | - tag: format 80 | value: json 81 | - tag: routing 82 | value: central 83 | triggers: 84 | - uuid: 535e10a824c849a8a4e6ae79c0066730 85 | expression: 'change(/Template routing/routing.central_datasets)<0' 86 | recovery_mode: RECOVERY_EXPRESSION 87 | recovery_expression: 'change(/Template routing/routing.central_datasets)>0' 88 | name: 'EIDA central routing service lost at least one network for node {$NODE}' 89 | priority: AVERAGE 90 | description: | 91 | Central routing system advertises less networks than before (now : {ITEM.LASTVALUE} 92 | check the full information : https://www.orfeus-eu.org/eidaws/routing/1/globalconfig?format=fdsn 93 | manual_close: 'YES' 94 | - uuid: 856a7898776a49afa4355aac03465147 95 | name: 'Routing central information' 96 | type: HTTP_AGENT 97 | key: routing.central_station 98 | delay: 60m 99 | value_type: TEXT 100 | trends: '0' 101 | timeout: 15s 102 | url: 'https://www.orfeus-eu.org/eidaws/routing/1/query' 103 | query_fields: 104 | - name: service 105 | value: station 106 | - name: network 107 | value: '{$ONLINECHECK_NET}' 108 | headers: 109 | - name: User-Agent 110 | value: oculus-monior 111 | tags: 112 | - tag: format 113 | value: xml 114 | - tag: routing 115 | value: central 116 | triggers: 117 | - uuid: e2ca8a12a83a436b89c23515bddb571c 118 | expression: 'nodata(/Template routing/routing.central_station,65m)=1' 119 | name: 'Network code {$ONLINECHECK_NET} missing in central routing for webservice station' 120 | priority: WARNING 121 | description: 'test request: https://www.orfeus-eu.org/eidaws/routing/1/query?service=station&network={$ONLINECHECK_NET}' 122 | manual_close: 'YES' 123 | - uuid: 22f1d0f0d02c4540bc59a21554235404 124 | name: 'Routing information published at node' 125 | type: HTTP_AGENT 126 | key: routing.local 127 | delay: 15m 128 | value_type: TEXT 129 | trends: '0' 130 | preprocessing: 131 | - type: XMLPATH 132 | parameters: 133 | - '//*[local-name()=''route''][@networkCode=''{$ONLINECHECK_NET}'']' 134 | timeout: 15s 135 | url: 'https://{$ENDPOINT}/{$ROUTINGFILE}' 136 | headers: 137 | - name: User-Agent 138 | value: oculus-monior 139 | tags: 140 | - tag: format 141 | value: xml 142 | - tag: routing 143 | value: local 144 | triggers: 145 | - uuid: d7e4ce5e501f4836805cd06ae940034b 146 | expression: 'nodata(/Template routing/routing.local,15m)=1' 147 | name: 'Local routing information not published at node {$NODE}' 148 | priority: WARNING 149 | description: 'test request: https://{$ENDPOINT}/{$ROUTINGFILE}' 150 | manual_close: 'YES' 151 | -------------------------------------------------------------------------------- /scripts/pc_tests_results_clean.csv: -------------------------------------------------------------------------------- 1 | "testid","timestamp","requestduration","connected","completed","returncode","responselength" 2 | "statxt-1min-koeri-54streams","2025-06-23 07:22:09.317932","00:00:01.351381",True,True,200,8606 3 | "statxt-1min-sed-54streams","2025-06-23 07:22:07.966072","00:00:02.007327",True,True,200,6725 4 | "statxt-1min-resif-54streams","2025-06-23 07:22:05.958156","00:00:01.844989",True,True,204,0 5 | "staxmlresp-1min-sed-54streams","2025-06-23 07:22:04.112891","00:00:02.298042",True,True,200,1008660 6 | "staxmlresp-1min-resif-54streams","2025-06-23 07:22:01.814259","00:00:01.880137",True,True,204,0 7 | "staxmlresp-1min-odc-54streams","2025-06-23 07:21:59.93355","00:00:01.678184",True,True,204,0 8 | "staxmlresp-1min-niep-54streams","2025-06-23 07:21:58.255045","00:00:05.672007",True,True,200,3867723 9 | "avail-1day-text-resif-320streams","2025-06-23 07:21:52.582418","00:00:13.908373",True,True,200,48811 10 | "avail-1day-text-gfz-320streams","2025-06-23 07:21:38.673366","00:00:04.538125",True,True,204,0 11 | "avail-1day-text-odc-320streams","2025-06-23 07:21:34.134628","00:08:51.409414",True,True,204,0 12 | "avail-1day-text-sed-320streams","2025-06-23 07:12:42.724592","00:00:28.593827",True,True,200,33660 13 | "avail-1day-text-afad-54streams","2025-06-23 07:12:14.130208","00:00:00.583029",True,True,204,0 14 | "statxt-1min-lmu-54streams","2025-06-23 07:12:13.546731","00:00:01.454537",True,True,200,9722 15 | "staxml-1min-sed-54streams","2025-06-23 07:12:12.091634","00:00:02.153593",True,True,200,68401 16 | "staxml-1min-lmu-54streams","2025-06-23 07:12:09.937451","00:00:02.277855",True,True,200,59787 17 | "staxml-1min-uib-54streams","2025-06-23 07:12:07.658931","00:00:03.51803",True,True,200,126969 18 | "statxt-1min-iris-54streams","2025-06-23 07:12:04.140312","00:00:00.171947",True,True,204,0 19 | "avail-1day-text-ingv-54streams","2025-06-23 07:12:03.967901","00:00:23.176666",True,True,204,0 20 | "avail-1day-text-resif-54streams","2025-06-23 07:11:40.790623","00:00:03.108836",True,True,200,6846 21 | "avail-1day-text-odc-54streams","2025-06-23 07:11:37.681181","00:01:20.085998",True,False,NULL,0 22 | "avail-1day-text-bgr-54streams","2025-06-23 07:10:17.594639","00:00:09.406265",True,True,200,7173 23 | "avail-1day-text-uib-54streams","2025-06-23 07:10:08.187753","00:00:07.590726",True,True,200,6410 24 | "avail-1day-text-niep-54streams","2025-06-23 07:10:00.596439","00:01:20.085886",True,False,NULL,0 25 | "avail-1day-text-sed-54streams","2025-06-23 07:08:40.509986","00:00:05.067904",True,True,200,5974 26 | "avail-1day-text-bgs-9streams","2025-06-23 07:08:35.441375","00:00:02.612581",True,True,204,0 27 | "avail-1day-text-afad-9streams","2025-06-23 07:08:32.828107","00:00:06.97583",True,True,204,0 28 | "avail-1day-text-resif-9streams","2025-06-23 07:08:25.8515","00:00:00.551871",True,True,200,1069 29 | "avail-1day-text-ingv-9streams","2025-06-23 07:08:25.298941","00:00:12.440301",True,True,200,23959 30 | "avail-1day-text-bgr-9streams","2025-06-23 07:08:12.858032","00:00:00.979538",True,True,200,1069 31 | "avail-1day-text-niep-9streams","2025-06-23 07:08:11.877887","00:00:40.04577",True,False,NULL,0 32 | "avail-1day-text-gfz-9streams","2025-06-23 07:07:31.831457","00:00:00.284673",True,True,204,0 33 | "avail-1day-text-odc-9streams","2025-06-23 07:07:31.546077","00:00:15.452412",True,True,204,0 34 | "avail-1day-text-iris-9streams","2025-06-23 07:07:16.093056","00:00:00.838472",True,True,200,851 35 | "avail-1day-text-sed-9streams","2025-06-23 07:07:15.253958","00:00:01.216269",True,True,200,1069 36 | "avail-1day-text-noa-9streams","2025-06-23 07:07:14.036976","00:00:29.649123",True,True,204,0 37 | "staxmlresp-1min-bgs-9streams","2025-06-23 07:06:44.387222","00:00:00.8394",True,True,200,164029 38 | "staxml-1min-bgs-9streams","2025-06-23 07:06:43.547143","00:00:00.834899",True,True,200,4950 39 | "staxmlresp-1min-afad-9streams","2025-06-23 07:06:42.711893","00:00:11.648126",True,True,200,7249663 40 | "statxt-1min-afad-9streams","2025-06-23 07:06:31.063083","00:00:11.429495",True,True,200,42488 41 | "staxmlresp-1min-sed-9streams","2025-06-23 07:06:19.633022","00:00:00.411653",True,True,200,124775 42 | "staxmlresp-1min-resif-9streams","2025-06-23 07:06:19.220811","00:00:00.370849",True,True,200,217714 43 | "staxmlresp-1min-odc-9streams","2025-06-23 07:06:18.849349","00:00:00.601843",True,True,200,31308 44 | "staxmlresp-1min-noa-9streams","2025-06-23 07:06:18.246916","00:00:07.208927",True,True,204,0 45 | "staxmlresp-1min-niep-9streams","2025-06-23 07:06:11.037443","00:00:00.610458",True,True,200,59579 46 | "staxmlresp-1min-lmu-9streams","2025-06-23 07:06:10.426423","00:00:00.395791",True,True,200,173724 47 | "staxmlresp-1min-bgr-9streams","2025-06-23 07:06:10.030106","00:00:00.422788",True,True,200,19882 48 | "statxt-1min-sed-9streams","2025-06-23 07:06:09.606793","00:00:00.38845",True,True,200,1286 49 | "statxt-1min-resif-9streams","2025-06-23 07:06:09.217805","00:00:00.364328",True,True,200,1682 50 | "statxt-1min-odc-9streams","2025-06-23 07:06:08.852928","00:00:00.399263",True,True,200,1238 51 | "statxt-1min-noa-9streams","2025-06-23 07:06:08.45304","00:00:07.228854",True,True,204,0 52 | "statxt-1min-niep-9streams","2025-06-23 07:06:01.223664","00:00:00.584241",True,True,200,1643 53 | "statxt-1min-lmu-9streams","2025-06-23 07:06:00.6389","00:00:00.39657",True,True,200,1845 54 | "statxt-1min-koeri-9streams","2025-06-23 07:06:00.241772","00:00:01.168745",True,True,200,1517 55 | "statxt-1min-iris-9streams","2025-06-23 07:05:59.072467","00:00:01.433832",True,True,200,1400 56 | "statxt-1min-ingv-9streams","2025-06-23 07:05:57.638164","00:00:03.289351",True,True,200,6499 57 | "statxt-1min-gfz-9streams","2025-06-23 07:05:54.348448","00:00:00.292578",True,True,200,2429 58 | "statxt-1min-bgr-9streams","2025-06-23 07:05:54.055274","00:00:00.433316",True,True,200,1160 59 | "staxml-1min-sed-9streams","2025-06-23 07:05:53.621403","00:00:00.519523",True,True,200,10523 60 | "staxml-1min-resif-9streams","2025-06-23 07:05:53.101312","00:00:00.361819",True,True,200,17770 61 | "staxml-1min-noa-9streams","2025-06-23 07:05:52.738883","00:00:07.098769",True,True,204,0 62 | "1minPM-bgs-320streams","2025-06-23 07:05:45.639766","00:00:25.163987",True,True,200,1814528 63 | "1minPM-sed-320streams","2025-06-23 07:05:20.475438","00:00:15.852736",True,True,200,3985408 64 | "1minAM-resif-320streams","2025-06-23 07:05:04.622391","00:00:30.195336",True,True,200,4460544 65 | "1minPM-resif-320streams","2025-06-23 07:04:34.426745","00:00:10.609102",True,True,200,4521984 66 | "1minAM-gfz-320streams","2025-06-23 07:04:23.817343","00:01:31.536837",True,True,200,1668096 67 | "1minPM-gfz-320streams","2025-06-23 07:02:52.280152","00:01:17.516083",True,True,200,1472512 68 | "1minPM-odc-320streams","2025-06-23 07:01:34.76367","00:00:36.35732",True,True,200,1872384 69 | "1minPM-niep-320streams","2025-06-23 07:00:58.405958","00:01:15.456136",True,True,200,3085312 70 | "1minPM-noa-320streams","2025-06-23 06:59:42.949085","00:10:08.800376",True,True,200,760320 71 | "1minPM-ingv-320streams","2025-06-23 06:49:34.14823","00:04:55.412751",True,True,200,2452480 72 | "1minPM-iris-320streams","2025-06-23 06:44:38.735041","00:10:40.526349",True,True,200,24906752 73 | "1minAM-odc-320streams","2025-06-23 06:33:58.208345","00:00:33.092858",True,True,200,1911296 74 | "1minAM-sed-320streams","2025-06-23 06:33:25.115175","00:00:16.970561",True,True,200,3990016 75 | "1minAM-afad-54streams","2025-06-23 06:33:08.144026","00:00:00.609169",True,True,204,0 76 | "1minAM-ingv-54streams","2025-06-23 06:33:07.534228","00:00:05.037788",True,True,200,193536 77 | "1minAM-resif-54streams","2025-06-23 06:33:02.496076","00:00:07.042336",True,True,200,782336 78 | "1minAM-odc-54streams","2025-06-23 06:32:55.453118","00:00:05.37399",True,True,200,26112 79 | "1minPM-bgr-54streams","2025-06-23 06:32:50.078478","00:00:25.725376",True,True,200,301568 80 | "1minPM-lmu-54streams","2025-06-23 06:32:24.352763","00:00:19.552946",True,True,200,1079296 81 | "1minAM-bgr-54streams","2025-06-23 06:32:04.799032","00:00:23.949436",True,True,200,293888 82 | "1minAM-uib-54streams","2025-06-23 06:31:40.84902","00:00:13.696092",True,True,200,505856 83 | "1minPM-sed-54streams","2025-06-23 06:31:27.152502","00:00:03.004841",True,True,200,476672 84 | "1minAM-niep-54streams","2025-06-23 06:31:24.146937","00:00:08.577202",True,True,200,340480 85 | "1minPM-ingv-54streams","2025-06-23 06:31:15.56907","00:00:15.758775",True,True,200,175104 86 | "1minPM-noa-54streams","2025-06-23 06:30:59.809597","00:04:30.709951",True,True,200,619520 87 | "1minAM-sed-54streams","2025-06-23 06:26:29.09904","00:00:03.438178",True,True,200,475648 88 | "1minPM-resif-54streams","2025-06-23 06:26:25.660511","00:00:02.62906",True,True,200,790528 89 | "1minPM-bgs-9streams","2025-06-23 06:26:23.03087","00:00:01.128904",True,True,200,40960 90 | "1minAM-bgs-9streams","2025-06-23 06:26:21.90138","00:00:01.44872",True,True,200,40960 91 | "1minPM-afad-9streams","2025-06-23 06:26:20.452367","00:00:00.007835",True,True,400,362 92 | "1minAM-afad-9streams","2025-06-23 06:26:20.444198","00:00:00.009001",True,True,400,362 93 | "1minAM-resif-9streams","2025-06-23 06:26:20.434605","00:00:00.839296",True,True,200,122880 94 | "1minAM-ingv-9streams","2025-06-23 06:26:19.594819","00:00:13.396243",True,True,200,289792 95 | "1minPM-noa-9streams","2025-06-23 06:26:06.197934","00:00:06.987273",True,True,204,0 96 | "1minAM-bgr-9streams","2025-06-23 06:25:59.210035","00:00:01.193896",True,True,200,50688 97 | "1minPM-sed-9streams","2025-06-23 06:25:58.015429","00:00:00.610182",True,True,200,67584 98 | "1minPM-resif-9streams","2025-06-23 06:25:57.40463","00:00:01.405016",True,True,200,135168 99 | "1minPM-iris-9streams","2025-06-23 06:25:55.998933","00:00:01.808542",True,True,200,139264 100 | "1minPM-gfz-9streams","2025-06-23 06:25:54.18996","00:00:00.725377",True,True,200,98304 101 | "1minPM-odc-9streams","2025-06-23 06:25:53.463938","00:00:02.07006",True,True,200,81408 102 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # Oculus Monitoring 2 | For the EIDA Technical Committee and EIDA Management Board that need to improve there services quality, Oculus is a central monitoring and alerting system that tests all the services at EIDA nodes. Unlike the previous situation where the monitoring was very scattered and uneven, OCULUS will provide a global view of the services status and indicators for keeping track of service quality evolution. 3 | 4 | ## Table of contents 5 | - [Oculus Monitoring](#oculus-monitoring) 6 | - [Table of contents](#table-of-contents) 7 | - [How to monitor a new thing](#how-to-monitor-a-new-thing) 8 | - [Deploying Oculus Zabbix and Grafana on Kubernetes using Helm](#deploying-oculus-zabbix-and-grafana-on-kubernetes-using-helm) 9 | - [Prerequisites](#prerequisites) 10 | - [Accessing the Zabbix Application (for development)](#accessing-the-zabbix-application-for-development) 11 | - [Zabbix configuration](#zabbix-configuration) 12 | - [Deploy Zabbix configuration with Ansible](#deploy-zabbix-configuration-with-ansible) 13 | - [Zabbix Ansible deployment descriptions](#zabbix-ansible-deployment-descriptions) 14 | - [Create Ansible user](#create-ansible-user) 15 | - [Zabbix configuration deployment](#zabbix-configuration-deployment) 16 | - [Deploying Oculus Grafana](#deploying-oculus-grafana) 17 | - [Prerequisites](#prerequisites-1) 18 | - [Accessing the Grafana Application (for development)](#accessing-the-grafana-application-for-development) 19 | - [Add Zabbix datasources](#add-zabbix-datasources) 20 | - [Accessing the Zabbix Application](#accessing-the-zabbix-application) 21 | - [Create Zabbix API tokens](#create-zabbix-api-tokens) 22 | - [Deploy Grafana configuration with Ansible](#deploy-grafana-configuration-with-ansible) 23 | - [Grafana Ansible deployment descriptions](#grafana-ansible-deployment-descriptions) 24 | - [Create Ansible user and service accounts](#create-ansible-user-and-service-accounts) 25 | - [Configure Ansible Grafana auth](#configure-ansible-grafana-auth) 26 | - [Launch Ansible](#launch-ansible) 27 | # How to monitor a new thing 28 | So you woud like to monitor something related to EIDA federation ? 29 | 30 | Please create a new issue using the template "New Monitoring". 31 | 32 | In order to edit Nodes values is in this [procedures](contribute_to_change_values.md) 33 | 34 | # Deploying Oculus Zabbix and Grafana on Kubernetes using Helm 35 | ## Prerequisites 36 | - Kubernetes cluster (version 1.20 or later) configured and running 37 | - ```kubectl``` installed and configured 38 | - ```git``` installed and configured 39 | - Helm CLI (version 3 or later) installed https://helm.sh/docs/intro/install 40 | - Plugin Helm secret https://github.com/jkroepke/helm-secrets 41 | - Sops core https://github.com/getsops/sops 42 | - Sufficient resources in the cluster to run Zabbix components 43 | 44 | ## Installation steps Zabbix 45 | ### 1. Clone this repository 46 | ```sh 47 | git clone https://github.com/EIDA/oculus-monitoring-backend 48 | ``` 49 | 50 | ### 2. Go to .yaml location 51 | ```sh 52 | cd zabbix_server/helm_values 53 | ``` 54 | 55 | ### 3. Add the Helm repository 56 | ```sh 57 | helm repo add zabbix-community https://zabbix-community.github.io/helm-zabbix 58 | helm repo update 59 | ``` 60 | 61 | ### 4. Create a Namespace for Zabbix 62 | ```sh 63 | kubectl create namespace eida-monitoring 64 | ``` 65 | 66 | ### 5. Create DataBase postgresql 67 | ```sql 68 | CREATE USER oculus WITH PASSWORD '{password}'; 69 | CREATE DATABASE oculus_zabbix OWNER oculus; 70 | ``` 71 | 72 | ### 6. Connection to the DataBase 73 | We recommend to use ```pgcli``` 74 | 75 | Usage : 76 | ``` 77 | pgcli postgres://{user}@{netloc}/{dbname} 78 | ``` 79 | Example: 80 | ``` 81 | pgcli postgres://oculus@bdd-resif.fr/oculus_zabbix 82 | ``` 83 | 84 | ### 7. decrypt password 85 | ```sh 86 | cd oculus-monitoring-backend/zabbix_server/helm_values 87 | sops -d -i values.yaml 88 | ``` 89 | /!\ TODO 90 | 91 | ### 8. Install Zabbix 92 | Apply Helm Chart 93 | ```sh 94 | helm secrets upgrade --install oculus-zabbix zabbix-community/zabbix \ 95 | --dependency-update \ 96 | -f values.yaml -n eida-monitoring --debug 97 | ``` 98 | 99 | ## Accessing the Zabbix Application (for development) 100 | - Port forward 101 | ```sh 102 | kubectl port-forward service/oculus-zabbix-zabbix-web 8888:80 -n eida-monitoring 103 | ``` 104 | - [localhost:8888](http://localhost:8888) 105 | 106 | - Default credentials: 107 | - Username: Admin 108 | - Password: zabbix 109 | 110 | # Zabbix configuration 111 | 112 | ## Deploy Zabbix configuration with Ansible 113 | For deploying playbook with Ansible, you need to install [Ansible](https://docs.ansible.com/ansible/latest/installation_guide/intro_installation.html) 114 | 115 | ### Zabbix Ansible deployment descriptions 116 | The Zabbix Ansible script will deploy this playbooks: 117 | - Import templates 118 | - Config autoregistration 119 | - Deploying agents 120 | - Activate media type 121 | - Create EIDA users 122 | - Configuration triggers actions 123 | 124 | ### Create Ansible user 125 | Go to "Users > Users" 126 | - Click "Create user" 127 | - Username: ansible 128 | - Groups: "No access to the frontend" and "Zabbix administrator" 129 | - Password: {ansible_password} 130 | - Click to "Permissions" 131 | - Role: "Super admin role" 132 | - Click "Add" 133 | 134 | ### Create Ansible API token 135 | Go to "User > API tokens" 136 | - Click "Create API token" 137 | - Name: ansible 138 | - User: ansible 139 | - uncheck "Set expiration date and time" 140 | - check "Enabled" 141 | - Click "Add" 142 | - Copy to clipboard "Auth token" value and paste in file "config_prod" or "config_staging" in path ``` ansible/config/``` 143 | 144 | ### Zabbix configuration deployment 145 | 146 | #### 1. Go to .yaml location 147 | ```sh 148 | cd ansible/playbooks 149 | ``` 150 | #### 2. Run playbook Ansible 151 | ```sh 152 | ansible-playbook zbx_deployment.yaml 153 | ``` 154 | 155 | # Deploying Oculus Grafana 156 | ## Prerequisites 157 | - Kubernetes cluster (version 1.20 or later) configured and running 158 | - ```kubectl``` installed and configured 159 | - ```git``` installed and configured 160 | - Helm CLI (version 3 or later) installed https://helm.sh/docs/intro/install 161 | - Plugin Helm secret https://github.com/jkroepke/helm-secrets 162 | - Sops core https://github.com/getsops/sops 163 | - Sufficient resources in the cluster to run Grafana components 164 | 165 | ## Installation steps Grafana 166 | ### 1. Clone this repository 167 | ```sh 168 | git clone https://github.com/EIDA/oculus-monitoring-backend 169 | ``` 170 | 171 | ### 2. Go to .yaml location 172 | ```sh 173 | cd grafana_server/helm_values 174 | ``` 175 | 176 | ### 3. Add the Helm repository 177 | ```sh 178 | helm repo add grafana https://grafana.github.io/helm-charts 179 | helm repo update 180 | ``` 181 | 182 | ### 4. Decrypt password 183 | ```sh 184 | cd oculus-monitoring-backend/grafana_server/helm_values 185 | sops decrypt values.yaml 186 | ``` 187 | 188 | ### 5. Install Grafana 189 | ```sh 190 | helm upgrade --install oculus-grafana grafana/grafana \ 191 | -f values.yaml -n eida-monitoring 192 | ``` 193 | 194 | ## Accessing the Grafana Application (for development) 195 | - Port forward Grafana 196 | ```sh 197 | kubectl port-forward service/oculus-grafana 3000:3000 -n eida-monitoring 198 | ``` 199 | - [localhost:3000](http://localhost:3000) 200 | - Default credentials: 201 | - Username: admin 202 | - Password: {admin_passwd} 203 | 204 | ## Add Zabbix datasources 205 | ### Accessing the Zabbix Application 206 | - Port forwrd Zabbix 207 | ```sh 208 | kubectl port-forward service/oculus-zabbix-zabbix-web 8888:8888 -n eida-monitoring 209 | ``` 210 | - localhost:8888 211 | 212 | ### Create Zabbix API tokens 213 | In Zabbix application, go to "Users > API token" 214 | - Click "Create API token" 215 | - Name: grafana 216 | - User: grafana 217 | - Set expiration date and time: uncheck 218 | - Enabled: check 219 | - Click "Add" 220 | - Copy the {auth_token} 221 | 222 | ## Deploy Grafana configuration with Ansible 223 | For deploying playbook with Ansible, you need to install [Ansible](https://docs.ansible.com/ansible/latest/installation_guide/intro_installation.html) 224 | 225 | ### Grafana Ansible deployment descriptions 226 | The Grafana Ansible script will deploy this playbooks: 227 | - Add datasources 228 | - Import dashboards 229 | 230 | ### Create Ansible user and service accounts 231 | In Grafana application, go to "Administration > User and access > Users" 232 | - Click "New user" 233 | - Name: ansible 234 | - Username: ansible 235 | - Password: {ansible_password} 236 | - Click "Create user" 237 | - In section "Permissions" click to "Change" and "Yes" and reclick to "Change" 238 | - In section "Organization" click to "Change role", select "Admin" and click to "Save" 239 | Go to "Administration > User and access > Service accounts" 240 | - Click "Add service account" 241 | - Name: ansible 242 | - Role: Admin 243 | - Click "Add service account token" 244 | - Click "No expiration" 245 | - Click "Generate token" 246 | - Copy to clipboard and paste in file "config_prod" or "config_staging" in path ``` ansible/config/``` 247 | - Click "Close" 248 | - Click in red cross on section "User" 249 | - Click "Add permission" 250 | - Select "User" 251 | - Select "ansible" 252 | - Select "Admin" 253 | - Click "Save" 254 | 255 | ### Configure Ansible Grafana auth 256 | #### 1. Go to config auth location 257 | ```sh 258 | cd ansible/config 259 | ``` 260 | 261 | #### 2. Decrypt file 262 | ```sh 263 | sops -d -i config_prod.yaml 264 | OR 265 | sops -d -i config_staging.yaml 266 | ``` 267 | 268 | ### Launch Ansible 269 | #### 1. Go to .yaml location 270 | ```sh 271 | cd ansible/playbooks 272 | ``` 273 | 274 | #### 2. Run playbook Ansible 275 | ```sh 276 | ansible-playbook grafana_deployment.yaml 277 | ``` 278 | -------------------------------------------------------------------------------- /perf_checks/webscenarios_perfcheck.py: -------------------------------------------------------------------------------- 1 | # /// script 2 | # requires-python = ">=3.13" 3 | # dependencies = [ 4 | # "pyyaml", 5 | # "requests", 6 | # "zabbix-utils", 7 | # ] 8 | # /// 9 | import os 10 | import yaml 11 | import time 12 | import requests 13 | import subprocess 14 | import shutil 15 | import logging 16 | from pathlib import Path 17 | from urllib.parse import urlencode 18 | from zabbix_utils import Sender, ItemValue 19 | 20 | # config logging 21 | logging.basicConfig( 22 | level=logging.INFO, 23 | format='%(asctime)s - %(levelname)s - %(message)s', 24 | handlers=[ 25 | logging.FileHandler('webscenarios_perfcheck.log'), 26 | logging.StreamHandler() 27 | ] 28 | ) 29 | logger = logging.getLogger(__name__) 30 | 31 | def log_and_print(message, level=logging.INFO): 32 | """utilities fot log and print""" 33 | print(message) 34 | logger.log(level, message) 35 | 36 | def clone_repository(): 37 | """clone the oculus-monitoring-backend repo """ 38 | repo_url = "https://github.com/EIDA/oculus-monitoring-backend" 39 | clone_dir = "oculus-monitoring-backend" 40 | 41 | try: 42 | # remove existing repo if exists 43 | if os.path.exists(clone_dir): 44 | shutil.rmtree(clone_dir) 45 | 46 | log_and_print(f"cloning repository from {repo_url}") 47 | subprocess.run(['git', 'clone', repo_url, clone_dir], check=True, capture_output=True) 48 | 49 | nodes_dir = os.path.join(clone_dir, "eida_nodes") 50 | 51 | if not os.path.exists(nodes_dir): 52 | log_and_print(f"eida_nodes directory not found, in cloned repository") 53 | return None 54 | 55 | log_and_print(f"repository cloned successfully to {clone_dir}") 56 | return nodes_dir 57 | 58 | except subprocess.CalledProcessError as e: 59 | log_and_print(f"error cloning repository: {e}") 60 | return None 61 | except Exception as e: 62 | log_and_print(f"unexpected error during git clone: {e}") 63 | return None 64 | 65 | 66 | def load_yaml_files(nodes_dir): 67 | """load all EIDA nodes .yaml""" 68 | yaml_files = {} 69 | nodes_path = Path(nodes_dir) 70 | 71 | for yaml_file in nodes_path.glob("*.yaml"): 72 | with open(yaml_file, 'r') as f: 73 | data = yaml.safe_load(f) 74 | yaml_files[yaml_file.stem] = data 75 | 76 | return yaml_files 77 | 78 | def build_url(endpoint, webservice, params): 79 | """build the URL for the requests""" 80 | 81 | # for ws wfcatalog is /eidaws/, for other is /fdsnws/ 82 | if webservice == 'wfcatalog': 83 | service_path = 'eidaws' 84 | else: 85 | service_path = 'fdsnws' 86 | 87 | 88 | base_url = f"https://{endpoint}/{service_path}/{webservice}/1/query" 89 | 90 | # query parameters 91 | query_params = {} 92 | if 'net' in params and params['net']: 93 | query_params['net'] = params['net'] 94 | if 'sta' in params and params['sta']: 95 | query_params['sta'] = params['sta'] 96 | if 'loc' in params and params['loc']: 97 | query_params['loc'] = params['loc'] 98 | if 'cha' in params and params['cha']: 99 | query_params['cha'] = params['cha'] 100 | if 'start' in params and params['start']: 101 | query_params['start'] = params['start'] 102 | if 'end' in params and params['end']: 103 | query_params['end'] = params['end'] 104 | 105 | return f"{base_url}?{urlencode(query_params)}" 106 | 107 | def make_request(url): 108 | """make http request""" 109 | temp_file_path = None 110 | try: 111 | start_time = time.time() 112 | 113 | headers = { 114 | 'User-Agent': 'oculus-monitor' 115 | } 116 | 117 | # download full content with get 118 | response = requests.get(url, headers=headers, timeout=180, allow_redirects=True) 119 | 120 | end_time = time.time() 121 | response_time = round((end_time - start_time) * 1000, 2) 122 | 123 | # get size file 124 | content_size = len(response.content) 125 | 126 | return { 127 | 'status_code': response.status_code, 128 | 'response_time_ms': response_time, 129 | 'content_size_bytes': content_size, 130 | 'url': url 131 | } 132 | 133 | # exceltions timeout 134 | except requests.exceptions.Timeout: 135 | return { 136 | 'status_code': 'TIMEOUT', 137 | 'response_time_ms': 180000, 138 | 'content_size_bytes': 0, 139 | 'url': url 140 | } 141 | # exception connection error 142 | except requests.exceptions.ConnectionError: 143 | return { 144 | 'status_code': 'CONNECTION_ERROR', 145 | 'response_time_ms': 0, 146 | 'content_size_bytes': 0, 147 | 'url': url 148 | } 149 | # eception request exception 150 | except requests.exceptions.RequestException as e: 151 | return { 152 | 'status_code': 'REQUEST_ERROR', 153 | 'response_time_ms': 0, 154 | 'content_size_bytes': 0, 155 | 'error': str(e), 156 | 'url': url 157 | } 158 | except Exception as e: 159 | return { 160 | 'status_code': 'ERROR', 161 | 'response_time_ms': 0, 162 | 'content_size_bytes': 0, 163 | 'error': str(e), 164 | 'url': url 165 | } 166 | 167 | def send_to_zabbix(hostname, results): 168 | """send results to zbx srv""" 169 | try: 170 | # zbx srv config 171 | ZABBIX_SERVER = os.getenv('ZABBIX_SERVER') 172 | ZABBIX_PORT = 10051 173 | 174 | # connecy to zbx srv 175 | sender = Sender(server=ZABBIX_SERVER, port=ZABBIX_PORT) 176 | 177 | log_and_print(f"\nsending data to zabbix for host: {hostname}") 178 | 179 | items = [] 180 | 181 | for key, metrics in results.items(): 182 | # calculate transfer rate (bytes/s) 183 | transfer_rate = 0 184 | if metrics['response_time_ms'] > 0 and metrics['content_size_bytes'] > 0: 185 | # convert to seconds and calc bytes/s 186 | transfer_rate = round(metrics['content_size_bytes'] / (metrics['response_time_ms'] / 1000), 2) 187 | 188 | # key format: dataselect.9streams 189 | items.extend([ 190 | ItemValue(hostname, f"{key}.status_code", str(metrics['status_code'])), 191 | ItemValue(hostname, f"{key}.response_time_ms", metrics['response_time_ms']), 192 | ItemValue(hostname, f"{key}.content_size_bytes", metrics['content_size_bytes']), 193 | ItemValue(hostname, f"{key}.transfer_rate", transfer_rate) 194 | ]) 195 | 196 | log_and_print(f" {key}.status_code = {metrics['status_code']}") 197 | log_and_print(f" {key}.response_time_ms = {metrics['response_time_ms']}") 198 | log_and_print(f" {key}.content_size_bytes = {metrics['content_size_bytes']}") 199 | log_and_print(f" {key}.transfer_rate = {transfer_rate} bytes/sec") 200 | 201 | # send via zbx srv 202 | log_and_print(f"\nsending {len(items)} items to zabbix") 203 | response = sender.send(items) 204 | log_and_print(f"{hostname}: {response.processed}/{response.total} items send successfully") 205 | 206 | if response.failed > 0: 207 | log_and_print(f" failed: {response.failed} items") 208 | return False 209 | else: 210 | log_and_print(f" all items sent successfully") 211 | return True 212 | except Exception as e: 213 | log_and_print(f" error sending to zabbix: {e}") 214 | return False 215 | 216 | 217 | def process_node(node_name, node_data): 218 | """process one node and return results""" 219 | results = {} 220 | endpoint = node_data.get('endpoint') 221 | 222 | if not endpoint: 223 | log_and_print(f"no endpoint found for node {node_name}") 224 | return results 225 | 226 | perf_checks = node_data.get('perfCheck', []) 227 | 228 | for check in perf_checks: 229 | webservice = check.get('webservice') 230 | scenario = check.get('scenario') 231 | 232 | if not webservice or not scenario: 233 | continue 234 | 235 | url = build_url(endpoint, webservice, check) 236 | 237 | log_and_print(f"testing {node_name}: {webservice}.{scenario}") 238 | result = make_request(url) 239 | 240 | # store result 241 | key = f"{webservice}.{scenario}" 242 | results[key] = { 243 | 'status_code': result['status_code'], 244 | 'response_time_ms': result['response_time_ms'], 245 | 'content_size_bytes': result['content_size_bytes'] 246 | } 247 | 248 | log_and_print(f" -> status: {result['status_code']}, time: {result['response_time_ms']}ms, size: {result['content_size_bytes']} bytes") 249 | 250 | return results 251 | 252 | def main(): 253 | # load all .yaml files 254 | nodes_dir = clone_repository() 255 | 256 | if not nodes_dir: 257 | log_and_print("failed to clone repository or find eida_nodes directory") 258 | return 259 | 260 | yaml_data = load_yaml_files(nodes_dir) 261 | 262 | if not yaml_data: 263 | log_and_print(f"no yaml files found in {nodes_dir}") 264 | return 265 | 266 | for node_name, node_data in yaml_data.items(): 267 | log_and_print(f"\n{'='*50}") 268 | log_and_print(f"processing node: {node_name}") 269 | log_and_print(f"{'='*50}") 270 | 271 | results = process_node(node_name, node_data) 272 | 273 | if results: 274 | # send to zbx 275 | hostname = node_name.upper() 276 | if send_to_zabbix(hostname, results): 277 | log_and_print(f"{node_name}: perfCheck and zabbix sending completed") 278 | else: 279 | log_and_print(f"{node_name}: perfCheck completed but zabbix sending failed") 280 | else: 281 | log_and_print(f"no results for node {node_name}") 282 | 283 | log_and_print(f"\nall nodes processing completed") 284 | 285 | if __name__ == "__main__": 286 | main() 287 | -------------------------------------------------------------------------------- /grafana_server/dashboards/EIDA_perfcheck_inter-nodes.json: -------------------------------------------------------------------------------- 1 | { 2 | "annotations": { 3 | "list": [ 4 | { 5 | "builtIn": 1, 6 | "datasource": { 7 | "type": "grafana", 8 | "uid": "-- Grafana --" 9 | }, 10 | "enable": true, 11 | "hide": true, 12 | "iconColor": "rgba(0, 211, 255, 1)", 13 | "name": "Annotations & Alerts", 14 | "type": "dashboard" 15 | } 16 | ] 17 | }, 18 | "editable": true, 19 | "fiscalYearStartMonth": 0, 20 | "graphTooltip": 0, 21 | "id": 5, 22 | "links": [], 23 | "panels": [ 24 | { 25 | "collapsed": false, 26 | "gridPos": { 27 | "h": 1, 28 | "w": 24, 29 | "x": 0, 30 | "y": 0 31 | }, 32 | "id": 3, 33 | "panels": [], 34 | "title": "Response time", 35 | "type": "row" 36 | }, 37 | { 38 | "datasource": { 39 | "type": "alexanderzobnin-zabbix-datasource", 40 | "uid": "ef0ms3r1nwoowb" 41 | }, 42 | "description": "", 43 | "fieldConfig": { 44 | "defaults": { 45 | "color": { 46 | "mode": "palette-classic" 47 | }, 48 | "custom": { 49 | "axisBorderShow": false, 50 | "axisCenteredZero": false, 51 | "axisColorMode": "text", 52 | "axisLabel": "", 53 | "axisPlacement": "auto", 54 | "barAlignment": 0, 55 | "barWidthFactor": 0.6, 56 | "drawStyle": "line", 57 | "fillOpacity": 0, 58 | "gradientMode": "none", 59 | "hideFrom": { 60 | "legend": false, 61 | "tooltip": false, 62 | "viz": false 63 | }, 64 | "insertNulls": false, 65 | "lineInterpolation": "linear", 66 | "lineStyle": { 67 | "fill": "solid" 68 | }, 69 | "lineWidth": 1, 70 | "pointSize": 6, 71 | "scaleDistribution": { 72 | "type": "linear" 73 | }, 74 | "showPoints": "auto", 75 | "spanNulls": false, 76 | "stacking": { 77 | "group": "A", 78 | "mode": "none" 79 | }, 80 | "thresholdsStyle": { 81 | "mode": "off" 82 | } 83 | }, 84 | "fieldMinMax": false, 85 | "mappings": [], 86 | "thresholds": { 87 | "mode": "absolute", 88 | "steps": [ 89 | { 90 | "color": "green", 91 | "value": 0 92 | } 93 | ] 94 | }, 95 | "unit": "s" 96 | }, 97 | "overrides": [] 98 | }, 99 | "gridPos": { 100 | "h": 11, 101 | "w": 24, 102 | "x": 0, 103 | "y": 1 104 | }, 105 | "id": 4, 106 | "options": { 107 | "legend": { 108 | "calcs": [ 109 | "mean" 110 | ], 111 | "displayMode": "table", 112 | "placement": "right", 113 | "showLegend": true 114 | }, 115 | "tooltip": { 116 | "hideZeros": false, 117 | "mode": "single", 118 | "sort": "none" 119 | } 120 | }, 121 | "pluginVersion": "12.1.1", 122 | "repeat": "SCENARIO", 123 | "repeatDirection": "v", 124 | "targets": [ 125 | { 126 | "application": { 127 | "filter": "" 128 | }, 129 | "countTriggersBy": "", 130 | "datasource": { 131 | "type": "alexanderzobnin-zabbix-datasource", 132 | "uid": "dest9vodnjv9cc" 133 | }, 134 | "evaltype": "0", 135 | "functions": [], 136 | "group": { 137 | "filter": "$EIDA_NODES" 138 | }, 139 | "host": { 140 | "filter": "$EIDA_NODES" 141 | }, 142 | "item": { 143 | "filter": "$SERVICE_SCENARIO_TIME" 144 | }, 145 | "itemTag": { 146 | "filter": "scenario: $SCENARIO" 147 | }, 148 | "macro": { 149 | "filter": "" 150 | }, 151 | "options": { 152 | "count": false, 153 | "disableDataAlignment": true, 154 | "showDisabledItems": false, 155 | "skipEmptyValues": false, 156 | "useTrends": "default", 157 | "useZabbixValueMapping": false 158 | }, 159 | "proxy": { 160 | "filter": "" 161 | }, 162 | "queryType": "0", 163 | "refId": "A", 164 | "resultFormat": "time_series", 165 | "schema": 12, 166 | "table": { 167 | "skipEmptyValues": false 168 | }, 169 | "tags": { 170 | "filter": "" 171 | }, 172 | "textFilter": "", 173 | "trigger": { 174 | "filter": "" 175 | } 176 | } 177 | ], 178 | "title": "$WS : $SCENARIO", 179 | "transformations": [ 180 | { 181 | "id": "renameByRegex", 182 | "options": { 183 | "regex": "(.*):.*", 184 | "renamePattern": "$1" 185 | } 186 | } 187 | ], 188 | "type": "timeseries" 189 | } 190 | ], 191 | "preload": false, 192 | "refresh": "15m", 193 | "schemaVersion": 41, 194 | "tags": [ 195 | "EIDA" 196 | ], 197 | "templating": { 198 | "list": [ 199 | { 200 | "allowCustomValue": false, 201 | "current": { 202 | "text": "All", 203 | "value": [ 204 | "$__all" 205 | ] 206 | }, 207 | "datasource": { 208 | "type": "alexanderzobnin-zabbix-datasource", 209 | "uid": "ef0ms3r1nwoowb" 210 | }, 211 | "definition": "", 212 | "description": "", 213 | "includeAll": true, 214 | "label": "EIDA NODES", 215 | "multi": true, 216 | "name": "EIDA_NODES", 217 | "options": [], 218 | "query": { 219 | "application": "PC", 220 | "group": "/^EIDA nodes$/", 221 | "host": "/.*/", 222 | "item": "/.*/", 223 | "itemTag": "", 224 | "queryType": "host" 225 | }, 226 | "refresh": 1, 227 | "regex": "", 228 | "sort": 1, 229 | "type": "query" 230 | }, 231 | { 232 | "allowCustomValue": false, 233 | "current": { 234 | "text": "webservice: availability", 235 | "value": "webservice: availability" 236 | }, 237 | "definition": "", 238 | "description": "", 239 | "includeAll": true, 240 | "label": "WEB SERVICES", 241 | "name": "WS", 242 | "options": [], 243 | "query": { 244 | "application": "", 245 | "group": "$EIDA_NODES", 246 | "host": "$EIDA_NODES", 247 | "item": "", 248 | "itemTag": "/.*webservice.*/", 249 | "queryType": "itemTag" 250 | }, 251 | "refresh": 1, 252 | "regex": "^(?!.*routing_present|.*routing_information).*$", 253 | "type": "query" 254 | }, 255 | { 256 | "current": { 257 | "text": "All", 258 | "value": "$__all" 259 | }, 260 | "definition": "", 261 | "hide": 2, 262 | "includeAll": true, 263 | "label": "TRANSFER RATE", 264 | "multi": true, 265 | "name": "TRANSFER_RATE", 266 | "options": [], 267 | "query": { 268 | "application": "PC", 269 | "group": "$EIDA_NODES", 270 | "host": "$EIDA_NODES", 271 | "item": "/.*Transfer Rate$/", 272 | "itemTag": "$WS", 273 | "queryType": "item" 274 | }, 275 | "refresh": 1, 276 | "regex": "", 277 | "sort": 1, 278 | "type": "query" 279 | }, 280 | { 281 | "current": { 282 | "text": "All", 283 | "value": "$__all" 284 | }, 285 | "definition": "", 286 | "hide": 2, 287 | "includeAll": true, 288 | "label": "SCENARIO TIME", 289 | "multi": true, 290 | "name": "STREAMS_TIME", 291 | "options": [], 292 | "query": { 293 | "application": "", 294 | "group": "$EIDA_NODES", 295 | "host": "$EIDA_NODES", 296 | "item": "/.*Response Time Ms$/", 297 | "itemTag": "$WS", 298 | "queryType": "item" 299 | }, 300 | "refresh": 1, 301 | "regex": "", 302 | "sort": 1, 303 | "type": "query" 304 | }, 305 | { 306 | "current": { 307 | "text": "All", 308 | "value": "$__all" 309 | }, 310 | "definition": "", 311 | "hide": 2, 312 | "includeAll": true, 313 | "label": "SCENARIO TIME", 314 | "multi": true, 315 | "name": "SERVICE_SCENARIO_TIME", 316 | "options": [], 317 | "query": { 318 | "application": "", 319 | "group": "$EIDA_NODES", 320 | "host": "$EIDA_NODES", 321 | "item": "/.*$SCENARIO - Response Time Ms$/", 322 | "itemTag": "$WS", 323 | "queryType": "item" 324 | }, 325 | "refresh": 1, 326 | "regex": "", 327 | "sort": 1, 328 | "type": "query" 329 | }, 330 | { 331 | "allowCustomValue": false, 332 | "current": { 333 | "text": "All", 334 | "value": "$__all" 335 | }, 336 | "definition": "", 337 | "description": "", 338 | "hide": 2, 339 | "includeAll": true, 340 | "name": "SCENARIO", 341 | "options": [], 342 | "query": { 343 | "application": "", 344 | "group": "/.*/", 345 | "host": "/.*/", 346 | "item": "", 347 | "itemTag": "/scenario:.*/", 348 | "queryType": "itemTag" 349 | }, 350 | "refresh": 1, 351 | "regex": "/scenario: (.*)/", 352 | "type": "query" 353 | } 354 | ] 355 | }, 356 | "time": { 357 | "from": "now-2d", 358 | "to": "now" 359 | }, 360 | "timepicker": {}, 361 | "timezone": "browser", 362 | "title": "EIDA PerfCheck inter - nodes", 363 | "uid": "1a7598eb-5c4e-41c9-a596-70f9ee92eb7f", 364 | "version": 7, 365 | "weekStart": "monday" 366 | } -------------------------------------------------------------------------------- /scripts/pc_tests_results.csv: -------------------------------------------------------------------------------- 1 | "testid","timestamp","requestduration","connected","completed","returncode","responselength","response_md5" 2 | "statxt-1min-koerimedsed-54streams","2025-06-23 07:22:09.317932","00:00:01.351381",True,True,200,8606,"a58d4762e0b84c75ba7296cacf343a25" 3 | "statxt-1min-sedmedsed-54streams","2025-06-23 07:22:07.966072","00:00:02.007327",True,True,200,6725,"c16518c20a07ec06c5b186e5b00c000b" 4 | "statxt-1min-resifmedsed-54streams","2025-06-23 07:22:05.958156","00:00:01.844989",True,True,204,0,"d41d8cd98f00b204e9800998ecf8427e" 5 | "staxmlresp-1min-sedmedsed-54streams","2025-06-23 07:22:04.112891","00:00:02.298042",True,True,200,1008660,"9ae59823a7ae2f3786acf5d7a7358363" 6 | "staxmlresp-1min-resifmedsed-54streams","2025-06-23 07:22:01.814259","00:00:01.880137",True,True,204,0,"d41d8cd98f00b204e9800998ecf8427e" 7 | "staxmlresp-1min-odcmedsed-54streams","2025-06-23 07:21:59.93355","00:00:01.678184",True,True,204,0,"d41d8cd98f00b204e9800998ecf8427e" 8 | "staxmlresp-1min-niepmedsed-54streams","2025-06-23 07:21:58.255045","00:00:05.672007",True,True,200,3867723,"21ffd8f24db1defecc2cf10210d619f5" 9 | "avail-1day-text-resifmedsed-320streams","2025-06-23 07:21:52.582418","00:00:13.908373",True,True,200,48811,"3be73bb779db6bb3b011097706d1098d" 10 | "avail-1day-text-gfzmedsed-320streams","2025-06-23 07:21:38.673366","00:00:04.538125",True,True,204,0,"d41d8cd98f00b204e9800998ecf8427e" 11 | "avail-1day-text-odcmedsed-320streams","2025-06-23 07:21:34.134628","00:08:51.409414",True,True,204,0,"d41d8cd98f00b204e9800998ecf8427e" 12 | "avail-1day-text-sedmedsed-320streams","2025-06-23 07:12:42.724592","00:00:28.593827",True,True,200,33660,"78ead1f7c6fe8809bb0465155fe21eca" 13 | "avail-1day-text-afadmedsed-54streams","2025-06-23 07:12:14.130208","00:00:00.583029",True,True,204,0,"d41d8cd98f00b204e9800998ecf8427e" 14 | "statxt-1min-lmumedsed-54streams","2025-06-23 07:12:13.546731","00:00:01.454537",True,True,200,9722,"04007f7a57eda59d9b2e398ed0aded1e" 15 | "staxml-1min-sedmedsed-54streams","2025-06-23 07:12:12.091634","00:00:02.153593",True,True,200,68401,"1d1d0ad93d4ddc64509e1c260706cfa5" 16 | "staxml-1min-lmumedsed-54streams","2025-06-23 07:12:09.937451","00:00:02.277855",True,True,200,59787,"4cf10eb8ea3601c0fafd1a5f6e2221a7" 17 | "staxml-1min-uibmedsed-54streams","2025-06-23 07:12:07.658931","00:00:03.51803",True,True,200,126969,"c1f81071749c07b034b0cecb1379839c" 18 | "statxt-1min-irismedsed-54streams","2025-06-23 07:12:04.140312","00:00:00.171947",True,True,204,0,"d41d8cd98f00b204e9800998ecf8427e" 19 | "avail-1day-text-ingvmedsed-54streams","2025-06-23 07:12:03.967901","00:00:23.176666",True,True,204,0,"d41d8cd98f00b204e9800998ecf8427e" 20 | "avail-1day-text-resifmedsed-54streams","2025-06-23 07:11:40.790623","00:00:03.108836",True,True,200,6846,"e6b26365a79f56516d9fe2060d68e450" 21 | "avail-1day-text-odcmedsed-54streams","2025-06-23 07:11:37.681181","00:01:20.085998",True,False,NULL,0,NULL 22 | "avail-1day-text-bgrmedsed-54streams","2025-06-23 07:10:17.594639","00:00:09.406265",True,True,200,7173,"6495e3bf6d501c46ea16916a8fddb763" 23 | "avail-1day-text-uibmedsed-54streams","2025-06-23 07:10:08.187753","00:00:07.590726",True,True,200,6410,"8f0e83bf4ef555fbb65f7aff7a9ff032" 24 | "avail-1day-text-niepmedsed-54streams","2025-06-23 07:10:00.596439","00:01:20.085886",True,False,NULL,0,NULL 25 | "avail-1day-text-sedmedsed-54streams","2025-06-23 07:08:40.509986","00:00:05.067904",True,True,200,5974,"aac17b1eda160d9dd0558b914be75a3d" 26 | "avail-1day-text-bgsmedsed-9streams","2025-06-23 07:08:35.441375","00:00:02.612581",True,True,204,0,"d41d8cd98f00b204e9800998ecf8427e" 27 | "avail-1day-text-afadmedsed-9streams","2025-06-23 07:08:32.828107","00:00:06.97583",True,True,204,0,"d41d8cd98f00b204e9800998ecf8427e" 28 | "avail-1day-text-resifmedsed-9streams","2025-06-23 07:08:25.8515","00:00:00.551871",True,True,200,1069,"5464087762dfe73bf3aa7de715492129" 29 | "avail-1day-text-ingvmedsed-9streams","2025-06-23 07:08:25.298941","00:00:12.440301",True,True,200,23959,"98fed3f36437b8ffc9a110d260d89178" 30 | "avail-1day-text-bgrmedsed-9streams","2025-06-23 07:08:12.858032","00:00:00.979538",True,True,200,1069,"de2fb9a570d284996daa6b4864e03b59" 31 | "avail-1day-text-niepmedsed-9streams","2025-06-23 07:08:11.877887","00:00:40.04577",True,False,NULL,0,NULL 32 | "avail-1day-text-gfzmedsed-9streams","2025-06-23 07:07:31.831457","00:00:00.284673",True,True,204,0,"d41d8cd98f00b204e9800998ecf8427e" 33 | "avail-1day-text-odcmedsed-9streams","2025-06-23 07:07:31.546077","00:00:15.452412",True,True,204,0,"d41d8cd98f00b204e9800998ecf8427e" 34 | "avail-1day-text-irismedsed-9streams","2025-06-23 07:07:16.093056","00:00:00.838472",True,True,200,851,"7f67d0a3220ec52edf50d5b4ca25faf2" 35 | "avail-1day-text-sedmedsed-9streams","2025-06-23 07:07:15.253958","00:00:01.216269",True,True,200,1069,"f71126b38e31863cc4dbc19b145fa36a" 36 | "avail-1day-text-noamedsed-9streams","2025-06-23 07:07:14.036976","00:00:29.649123",True,True,204,0,"d41d8cd98f00b204e9800998ecf8427e" 37 | "staxmlresp-1min-bgsmedsed-9streams","2025-06-23 07:06:44.387222","00:00:00.8394",True,True,200,164029,"a7093952eabd66b4ae15b0e4fc2ec524" 38 | "staxml-1min-bgsmedsed-9streams","2025-06-23 07:06:43.547143","00:00:00.834899",True,True,200,4950,"3410b1a72b502f10d8d96b6a590fb5c1" 39 | "staxmlresp-1min-afadmedsed-9streams","2025-06-23 07:06:42.711893","00:00:11.648126",True,True,200,7249663,"01648a392fea1d8e131550d36fc55222" 40 | "statxt-1min-afadmedsed-9streams","2025-06-23 07:06:31.063083","00:00:11.429495",True,True,200,42488,"1b1f312b7493271af9b15aa6fb5a490c" 41 | "staxmlresp-1min-sedmedsed-9streams","2025-06-23 07:06:19.633022","00:00:00.411653",True,True,200,124775,"36fd3da7575823e314b3b0f20afbaaa2" 42 | "staxmlresp-1min-resifmedsed-9streams","2025-06-23 07:06:19.220811","00:00:00.370849",True,True,200,217714,"d631ebfa0b9288cfe9e66c5a76d8fdde" 43 | "staxmlresp-1min-odcmedsed-9streams","2025-06-23 07:06:18.849349","00:00:00.601843",True,True,200,31308,"abd5eef764f69b4b04e8f5d37a31d345" 44 | "staxmlresp-1min-noamedsed-9streams","2025-06-23 07:06:18.246916","00:00:07.208927",True,True,204,0,"d41d8cd98f00b204e9800998ecf8427e" 45 | "staxmlresp-1min-niepmedsed-9streams","2025-06-23 07:06:11.037443","00:00:00.610458",True,True,200,59579,"888153ace5b257e47d964e03d2983ecf" 46 | "staxmlresp-1min-lmumedsed-9streams","2025-06-23 07:06:10.426423","00:00:00.395791",True,True,200,173724,"6c0f8a5cd7e1079e9931187da560e169" 47 | "staxmlresp-1min-bgrmedsed-9streams","2025-06-23 07:06:10.030106","00:00:00.422788",True,True,200,19882,"b9b9e37778a7c3f3bb3be97237315247" 48 | "statxt-1min-sedmedsed-9streams","2025-06-23 07:06:09.606793","00:00:00.38845",True,True,200,1286,"742e80e2056eff7e0191b69fc62bd673" 49 | "statxt-1min-resifmedsed-9streams","2025-06-23 07:06:09.217805","00:00:00.364328",True,True,200,1682,"e16bbd6f9f4d35f4c74d2e0ef0af9800" 50 | "statxt-1min-odcmedsed-9streams","2025-06-23 07:06:08.852928","00:00:00.399263",True,True,200,1238,"5c87d57626a0b620984bc037c7ae4915" 51 | "statxt-1min-noamedsed-9streams","2025-06-23 07:06:08.45304","00:00:07.228854",True,True,204,0,"d41d8cd98f00b204e9800998ecf8427e" 52 | "statxt-1min-niepmedsed-9streams","2025-06-23 07:06:01.223664","00:00:00.584241",True,True,200,1643,"89ab3823a94af8625f8106a05483560d" 53 | "statxt-1min-lmumedsed-9streams","2025-06-23 07:06:00.6389","00:00:00.39657",True,True,200,1845,"a49c38aaaa560e3df8a4368c76605d89" 54 | "statxt-1min-koerimedsed-9streams","2025-06-23 07:06:00.241772","00:00:01.168745",True,True,200,1517,"7f0f90c873a68e7cf870f04db5711dc0" 55 | "statxt-1min-irismedsed-9streams","2025-06-23 07:05:59.072467","00:00:01.433832",True,True,200,1400,"c62960929ad0b9f8fe7b0abd150941d8" 56 | "statxt-1min-ingvmedsed-9streams","2025-06-23 07:05:57.638164","00:00:03.289351",True,True,200,6499,"bb6fabb279d43e2e285936e06e78c5dc" 57 | "statxt-1min-gfzmedsed-9streams","2025-06-23 07:05:54.348448","00:00:00.292578",True,True,200,2429,"5fcf87510c5ed7b8634f476fdbc16361" 58 | "statxt-1min-bgrmedsed-9streams","2025-06-23 07:05:54.055274","00:00:00.433316",True,True,200,1160,"d812b83d69a6265f3ee493fbc2550101" 59 | "staxml-1min-sedmedsed-9streams","2025-06-23 07:05:53.621403","00:00:00.519523",True,True,200,10523,"1a7cdae0f4ad23d26d2ec4ed9dc2ef3a" 60 | "staxml-1min-resifmedsed-9streams","2025-06-23 07:05:53.101312","00:00:00.361819",True,True,200,17770,"66552bc78bd95e325c094bb3652ba1ea" 61 | "staxml-1min-noamedsed-9streams","2025-06-23 07:05:52.738883","00:00:07.098769",True,True,204,0,"d41d8cd98f00b204e9800998ecf8427e" 62 | "1minPM-bgsmedsed-320streams","2025-06-23 07:05:45.639766","00:00:25.163987",True,True,200,1814528,"4fed26ab6277f6a0cfe638a315516fd6" 63 | "1minPM-sedmedsed-320streams","2025-06-23 07:05:20.475438","00:00:15.852736",True,True,200,3985408,"17723883fcc5cc2e48558c011e5869fd" 64 | "1minAM-resifmedsed-320streams","2025-06-23 07:05:04.622391","00:00:30.195336",True,True,200,4460544,"ed51a0ab6e599107beaa4e5e1e005215" 65 | "1minPM-resifmedsed-320streams","2025-06-23 07:04:34.426745","00:00:10.609102",True,True,200,4521984,"2499494273b824861116a350e438dbc7" 66 | "1minAM-gfzmedsed-320streams","2025-06-23 07:04:23.817343","00:01:31.536837",True,True,200,1668096,"f454599597264d86ef6af4fe461b6825" 67 | "1minPM-gfzmedsed-320streams","2025-06-23 07:02:52.280152","00:01:17.516083",True,True,200,1472512,"4213ea5eed0ea92ced0efb4256d6d060" 68 | "1minPM-odcmedsed-320streams","2025-06-23 07:01:34.76367","00:00:36.35732",True,True,200,1872384,"7bb12de99a2b21c594785c2cd3a03b21" 69 | "1minPM-niepmedsed-320streams","2025-06-23 07:00:58.405958","00:01:15.456136",True,True,200,3085312,"9f379985e33959ed483907caf7c6b111" 70 | "1minPM-noamedsed-320streams","2025-06-23 06:59:42.949085","00:10:08.800376",True,True,200,760320,"1a5f89ce18d113ce5065bae0e96af20c" 71 | "1minPM-ingvmedsed-320streams","2025-06-23 06:49:34.14823","00:04:55.412751",True,True,200,2452480,"4c9321a7bb4a635173165c37da75ff9c" 72 | "1minPM-irismedsed-320streams","2025-06-23 06:44:38.735041","00:10:40.526349",True,True,200,24906752,"06e3abfaf8ee01a3b7027fd14bb0aa12" 73 | "1minAM-odcmedsed-320streams","2025-06-23 06:33:58.208345","00:00:33.092858",True,True,200,1911296,"dba822d36210958b10555b15fa9adb92" 74 | "1minAM-sedmedsed-320streams","2025-06-23 06:33:25.115175","00:00:16.970561",True,True,200,3990016,"03eee6107af0c539eec4044263775f84" 75 | "1minAM-afadmedsed-54streams","2025-06-23 06:33:08.144026","00:00:00.609169",True,True,204,0,"d41d8cd98f00b204e9800998ecf8427e" 76 | "1minAM-ingvmedsed-54streams","2025-06-23 06:33:07.534228","00:00:05.037788",True,True,200,193536,"42537d0641b1c7de834df7107c4a2f91" 77 | "1minAM-resifmedsed-54streams","2025-06-23 06:33:02.496076","00:00:07.042336",True,True,200,782336,"2332bf8b56cd24e1bb76e7fd52439f28" 78 | "1minAM-odcmedsed-54streams","2025-06-23 06:32:55.453118","00:00:05.37399",True,True,200,26112,"5f5bf87f91c982f263596115ca7680e3" 79 | "1minPM-bgrmedsed-54streams","2025-06-23 06:32:50.078478","00:00:25.725376",True,True,200,301568,"aec8d3c78e82e8bb514d9acf8f3b43fb" 80 | "1minPM-lmumedsed-54streams","2025-06-23 06:32:24.352763","00:00:19.552946",True,True,200,1079296,"bb5cb9661c23ab6ee43e4d08000988ea" 81 | "1minAM-bgrmedsed-54streams","2025-06-23 06:32:04.799032","00:00:23.949436",True,True,200,293888,"c13f3d6c9984a14cadefeab4f7ebef00" 82 | "1minAM-uibmedsed-54streams","2025-06-23 06:31:40.84902","00:00:13.696092",True,True,200,505856,"be40102c3e427e3f9f2082c9bde0b9b5" 83 | "1minPM-sedmedsed-54streams","2025-06-23 06:31:27.152502","00:00:03.004841",True,True,200,476672,"41a35afb315dc5ad2d7bfcaef998fdd6" 84 | "1minAM-niepmedsed-54streams","2025-06-23 06:31:24.146937","00:00:08.577202",True,True,200,340480,"260145e07e89ea159973f999fc6b7fcc" 85 | "1minPM-ingvmedsed-54streams","2025-06-23 06:31:15.56907","00:00:15.758775",True,True,200,175104,"b2cf49709a2f5782180f9804122a4600" 86 | "1minPM-noamedsed-54streams","2025-06-23 06:30:59.809597","00:04:30.709951",True,True,200,619520,"d22cc8c7b4dfa6e7825e0f6a3b4627ba" 87 | "1minAM-sedmedsed-54streams","2025-06-23 06:26:29.09904","00:00:03.438178",True,True,200,475648,"5f268fae6bae9d911c03d58467402e27" 88 | "1minPM-resifmedsed-54streams","2025-06-23 06:26:25.660511","00:00:02.62906",True,True,200,790528,"8450a29f6beeebd2327d9f56aae01ff8" 89 | "1minPM-bgsmedsed-9streams","2025-06-23 06:26:23.03087","00:00:01.128904",True,True,200,40960,"a7ba4a5d7962a1b83f2f004fb418460e" 90 | "1minAM-bgsmedsed-9streams","2025-06-23 06:26:21.90138","00:00:01.44872",True,True,200,40960,"270cfbebd207e431cc14d3075bd1e503" 91 | "1minPM-afadmedsed-9streams","2025-06-23 06:26:20.452367","00:00:00.007835",True,True,400,362,"55dcc05ab4a4debe15aa0e5eb6ced3ee" 92 | "1minAM-afadmedsed-9streams","2025-06-23 06:26:20.444198","00:00:00.009001",True,True,400,362,"0706c9c23e2a97a65fbb56f01db3e1fb" 93 | "1minAM-resifmedsed-9streams","2025-06-23 06:26:20.434605","00:00:00.839296",True,True,200,122880,"7e05392c0c3f5206b98b9c4fa5cd851b" 94 | "1minAM-ingvmedsed-9streams","2025-06-23 06:26:19.594819","00:00:13.396243",True,True,200,289792,"a38d134fc947531ac0c5b3a4f6c20674" 95 | "1minPM-noamedsed-9streams","2025-06-23 06:26:06.197934","00:00:06.987273",True,True,204,0,"d41d8cd98f00b204e9800998ecf8427e" 96 | "1minAM-bgrmedsed-9streams","2025-06-23 06:25:59.210035","00:00:01.193896",True,True,200,50688,"ec9d99735cee69049975b423e54b7a1c" 97 | "1minPM-sedmedsed-9streams","2025-06-23 06:25:58.015429","00:00:00.610182",True,True,200,67584,"fc7c780f104c25afb2df830b7b3bd0a7" 98 | "1minPM-resifmedsed-9streams","2025-06-23 06:25:57.40463","00:00:01.405016",True,True,200,135168,"15c39aff9fa86473efa569ebdc3d6b57" 99 | "1minPM-irismedsed-9streams","2025-06-23 06:25:55.998933","00:00:01.808542",True,True,200,139264,"81ca1646cd3309857bd830f931021a49" 100 | "1minPM-gfzmedsed-9streams","2025-06-23 06:25:54.18996","00:00:00.725377",True,True,200,98304,"d096d221d07ac8ca0c9d13edec314466" 101 | "1minPM-odcmedsed-9streams","2025-06-23 06:25:53.463938","00:00:02.07006",True,True,200,81408,"e4285ad9d5b273e0e76e144e0bef5731" 102 | -------------------------------------------------------------------------------- /scripts/transform_perfcheck.py: -------------------------------------------------------------------------------- 1 | import json 2 | import re 3 | import requests 4 | import argparse 5 | from datetime import datetime 6 | 7 | ZABBIX_URL = "http://localhost:8888/api_jsonrpc.php" 8 | ZABBIX_API_TOKEN = "9a195f4faab56c7ed9e0124efad34f33a6cd0291649f58c5ac9183af0e733c0b" 9 | 10 | def get_zabbix_hosts(): 11 | """get list og host names from zbx""" 12 | payload = { 13 | "jsonrpc": "2.0", 14 | "method": "host.get", 15 | "params": { 16 | "output": ["host"] 17 | }, 18 | "auth": ZABBIX_API_TOKEN, 19 | "id": 1 20 | } 21 | 22 | try: 23 | response = requests.post(ZABBIX_URL, json=payload, headers={'Content-Type': 'application/json'}) 24 | response.raise_for_status() 25 | result = response.json() 26 | 27 | if 'result' in result: 28 | # convert host name to lowercase for comparison 29 | return [host['host'].lower() for host in result['result']] 30 | else: 31 | print(f"error getting hosts: {result.get('error', 'unknow error')}") 32 | return [] 33 | except requests.exceptions.RequestException as e: 34 | print(f"error connecting to zbx {e}") 35 | return [] 36 | 37 | def send_to_zabbix(data_points, dry_run=False): 38 | """send history data to zabbux using history?.push API""" 39 | # convert tumestamp to unix timestamp for zbx 40 | zabbix_data = [] 41 | 42 | for item in data_points: 43 | timestamp = item["timestamp"] 44 | # convert 45 | dt = datetime.strptime(timestamp, "%Y-%m-%d %H:%M:%S.%f") 46 | unix_timestamp = int(dt.timestamp()) 47 | 48 | # user uppercase eida_node for zbx host 49 | eida_node = item["eida_node"].upper() 50 | 51 | # create data points for metrics 52 | for key, value in item.items(): 53 | if key not in ["eida_node", "timestamp"]: 54 | zabbix_data.append({ 55 | "host": eida_node, 56 | "key": key, 57 | "value": str(value), 58 | "clock": unix_timestamp 59 | }) 60 | 61 | if not zabbix_data: 62 | print("no data to send to zabbix") 63 | return True 64 | 65 | if dry_run: 66 | print(f"[dry-run] would send {len(zabbix_data)} data points to zbx") 67 | print("[dry-run] sample data points:") 68 | for i, point in enumerate(zabbix_data[:5]): # show firsts 5 points 69 | print(f" {i+1}. host: {point['host']}, key {point['key']}, value: {point['value']}, timestamp: {point['clock']}") 70 | if len(zabbix_data) > 5: 71 | print(f" ... and {len(zabbix_data) - 5} more data points") 72 | return True 73 | 74 | # split chunchks of 1000 zbx iteùs (limit) 75 | chunk_size = 1000 76 | total_chunks = (len(zabbix_data) + chunk_size - 1) // chunk_size 77 | 78 | for i in range(0, len(zabbix_data), chunk_size): 79 | chunk = zabbix_data[i:i + chunk_size] 80 | chunk_num = i // chunk_size + 1 81 | 82 | payload = { 83 | "jsonrpc": "2.0", 84 | "method": "history.push", 85 | "params": chunk, 86 | "auth": ZABBIX_API_TOKEN, 87 | "id": chunk_num 88 | } 89 | 90 | try: 91 | response = requests.post(ZABBIX_URL, json=payload, headers={'Content-Type': 'application/json'}) 92 | response.raise_for_status() 93 | result = response.json() 94 | 95 | if 'result' in result: 96 | print(f"successfully sent chunk {chunk_num}/{total_chunks} ({len(chunk)} items)") 97 | else: 98 | print(f"error sending chunk {chunk_num}: {result.get('error', 'unknow error')}") 99 | return False 100 | 101 | except requests.exceptions.RequestException as e: 102 | print(f"error senfing chunk {chunk_num} to zbx: {e}") 103 | return False 104 | 105 | print(f"successfully snt all {len(zabbix_data)} data points to zbx") 106 | return True 107 | 108 | def send_file_to_zbx(file_path, dry_run=False): 109 | """send transfomed data from file to zbx""" 110 | try: 111 | with open(file_path, 'r', encoding='utf-8') as f: 112 | data = json.load(f) 113 | 114 | print(f"loaded {len(data)} records from {file_path}") 115 | return send_to_zabbix(data, dry_run) 116 | 117 | except FileNotFoundError: 118 | print(f"error: file '{file_path}' not found") 119 | return False 120 | except json.JSONDecodeError as e: 121 | print(f"error: invalid json in file - {e}") 122 | return False 123 | 124 | def parse_duration_to_ms(duration_str): 125 | """convert duration string from format "HH:MM:SS.microsecnodes" to milliseconds""" 126 | #parse the duaratio string format : "00:00:01.351381" 127 | match = re.match(r'(\d+):(\d+):(\d+)\(\d+)', duration_str) 128 | if match: 129 | hours, minutes, seconds, microseconds = map(int, match.groups()) 130 | total_ms = (hours * 3600 + minutes * 60 + seconds) * 1000 + microseconds // 1000 131 | return total_ms 132 | return 0 133 | 134 | def parse_duration_to_seconds(duration_str): 135 | """convert duration string fril "HH:MM:SS.microsecon" to seconds""" 136 | # parse the duration string format: "00:00:01.351381" 137 | match = re.match(r'(\d+):(\d+):(\d+)\.(\d+)', duration_str) 138 | if match: 139 | hours, minutes, seconds, microseconds = map(int, match.groups()) 140 | total_seconds = hours * 3600 + minutes * 60 + seconds + microseconds / 1000000 141 | return total_seconds 142 | return 0 143 | 144 | def calculate_transfer_rate(response_length, request_duration): 145 | """calculate transfer rate bytes/s""" 146 | try: 147 | length = int(response_length) 148 | duration_seconds = parse_duration_to_seconds(request_duration) 149 | 150 | if duration_seconds > 0: 151 | return round(length / duration_seconds, 2) 152 | else: 153 | return 0 154 | except (ValueError, ZeroDivisionError): 155 | return 0 156 | 157 | def transform_testid_and_extract_host(testid): 158 | """transform testid according to rules and extract host name: 159 | - staxmlresp or statxt -> ignore (return none) 160 | - staxml -> station 161 | - avail -> availability 162 | - 1minAM or 1minPM -> dataselect 163 | - remove "1min and "1day" from result 164 | - replace "resif" to "eposfr" 165 | - remove "text" 166 | - extract host name (second part) and rmit from testid""" 167 | # check longer patterns fist to apartial matchs 168 | if testid.startswith("staxmlresp") or testid.startswith("statxt"): 169 | return None, None 170 | elif testid.startswith('staxml'): 171 | result = "station" + testid[6:] #replace first + chars 172 | elif testid.startswith("avail"): 173 | result = "availability" + testid[5:] 174 | elif testid.startswith("1minAM"): 175 | result = "dataselect" + testid[6:] 176 | elif testid.startswith("1minPM"): 177 | result = "dataselect" + testid[6:] 178 | else: 179 | result = testid 180 | 181 | # clean up patterns 182 | result = result.replace("-1min-", "-") 183 | result = result.replace("-1day-", "-") 184 | result = result.replace("resif", "eposfr") 185 | result = result.replace("-text-", "-") 186 | if result.endwith("-text"): 187 | result = result[:-5] 188 | 189 | # split and extrazct host 190 | parts = result.split('-') 191 | if len(parts) >= 3: 192 | service = parts[0] 193 | host = parts[1] 194 | streams = '-'.join(parts[2:]) 195 | final_testid = f"{service}.{streams.replace('-', '.')}" 196 | return final_testid, host 197 | elif len(parts) >= 2: 198 | # failback for cases with only 2 parts 199 | host = parts = parts[1] 200 | final_testid = parts[0] 201 | return final_testid, host 202 | 203 | return result.replace('-', '.'), None 204 | 205 | def filter_testid_by_host(eida_node, zabbix_hosts): 206 | """filter testid based on zbx hosts, keep only if eida_nodes match a zbx host""" 207 | if eida_node and eida_node.lower() in zabbix_hosts: 208 | return True 209 | return False 210 | 211 | def transform_test_results(input_file, output_file): 212 | """transform test results json file with new dield names and convert duration to ms""" 213 | try: 214 | # get zbx hosts 215 | print("connecting to zbx server") 216 | zabbix_hosts = get_zabbix_hosts() 217 | 218 | if not zabbix_hosts: 219 | print("failed to get hosts from zabbix, processing shitout host filtering") 220 | else: 221 | print(f"retrived {len(zabbix_hosts)} hosts from zabbix") 222 | 223 | # read the input from json files 224 | with open(input_file, 'r', encoding='utf-8') as f: 225 | data = json.load(f) 226 | 227 | print(f"loaded {len(data)} records from {input_file}") 228 | 229 | # transform test result 230 | transformed_data = [] 231 | filtered_count = 0 232 | ignored_count = 0 233 | filtered_items = [] 234 | ignored_items = [] 235 | 236 | for item in data: 237 | # skip incomplete items 238 | if not all(key in item for key in ["testid", "timestamp", "requestduration", "returncode", "responselength"]): 239 | continue 240 | 241 | # transform testid and extract host 242 | new_testid, eida_node = transform_testid_and_extract_host(item["testid"]) 243 | 244 | # skip item ignored (statxt, staxmlresp...) 245 | if new_testid is None: 246 | ignored_count += 1 247 | ignored_items.append({ 248 | "original_testid": item["testid"], 249 | "reason": "ignored rule (statxt/staxmlresp)" 250 | }) 251 | continue 252 | 253 | # fiilter by zbx host if available 254 | if zabbix_hosts and eida_node: 255 | if not filter_testid_by_host(eida_node, zabbix_hosts): 256 | filtered_count += 1 257 | filtered_items.append({ 258 | "original_testid": item["testid"], 259 | "extracted_host": eida_node, 260 | "reason": "host not found in zabbix" 261 | }) 262 | continue 263 | 264 | response_time = parse_duration_to_ms(item["requestduration"]) 265 | status_code = int(item["returncode"]) # convert to int 266 | content_size = int(item["responselength"]) 267 | transfer_rate = calculate_transfer_rate(item["responselength"], item["requestduration"]) 268 | 269 | transformed_item = { 270 | "eida_node": eida_node.upper() if eida_node else None, 271 | "timestamp": item["timestamp"], 272 | f"{new_testid}.response_time_ms": response_time, 273 | f"{new_testid}.status_code": status_code, 274 | f"{new_testid}.content_size_bytes": content_size, 275 | f"{new_testid}.transfer_rate": transfer_rate 276 | } 277 | transformed_data.append(transformed_item) 278 | 279 | # display filtered and ignored items 280 | if filtered_items: 281 | print(f"\n filtered out items ({len(filtered_items)}):") 282 | for item in filtered_items: 283 | print(f" - {item['original_testid']} (host: {item['extracted_host']}) - {item['reason']}") 284 | 285 | if ignored_items: 286 | print(f"\nignored items ({len(ignored_items)}):") 287 | for item in ignored_items: 288 | print(f" - {item['original_testid']} - {item['reason']}") 289 | 290 | # write transformed data to to output file 291 | with open(output_file, 'w', encoding='utf-8') as f: 292 | json.dump(transformed_data, f, indent=2, ensure_ascii=False) 293 | 294 | print(f"\ntransformation completed successfully") 295 | print(f" input file: {input_file}") 296 | print(f" output file: {output_file}") 297 | print(f" records processed: {len(transformed_data)}") 298 | print(f" records filtered: {filtered_count}") 299 | print(f" records ignored: {ignored_count}") 300 | 301 | return True 302 | 303 | except FileNotFoundError: 304 | print(f"error: input file '{input_file}' not found") 305 | return False 306 | except json.JSONDecodeError as e: 307 | print(f"error: invlid json in input file - {e}") 308 | return False 309 | except Exception as e: 310 | print(f"error {e}") 311 | return False 312 | 313 | if __name__ == "__main__": 314 | parser = argparse.ArgumentParser(description='transform perfcheck results and send to zabbix') 315 | parser.add_argument('--input', '-i', default='pc_tests_results_clean.json', 316 | help='input json file default: pc_tests_results_clean.json') 317 | parser.add_argument('--output', '-o', default='pc_tests_results_transformed.json', 318 | help='output json file (default: pc_tests_results_transformed.json)') 319 | parser.add_argument('--send_file', '-s', 320 | help='send specific transformed file to zabbix') 321 | parser.add_argument('--dry-run', action='store_true', 322 | help='dry run mode - show what would be done without actually doing it') 323 | 324 | args = parser.parse_args() 325 | 326 | if args.send_file: 327 | #send specific file to zbx 328 | print(f"sending file {args.send_file} to zabbix") 329 | success = send_file_to_zbx(args.send_file, dry_run=args.dry_run) 330 | else: 331 | # transform data only 332 | print("transforming data") 333 | success = transform_test_results(args.input, args.output) 334 | 335 | if not success: 336 | exit(1) -------------------------------------------------------------------------------- /grafana_server/dashboards/EIDA_nodes_requirements.json: -------------------------------------------------------------------------------- 1 | { 2 | "annotations": { 3 | "list": [ 4 | { 5 | "builtIn": 1, 6 | "datasource": { 7 | "type": "grafana", 8 | "uid": "-- Grafana --" 9 | }, 10 | "enable": true, 11 | "hide": true, 12 | "iconColor": "rgba(0, 211, 255, 1)", 13 | "name": "Annotations & Alerts", 14 | "type": "dashboard" 15 | } 16 | ] 17 | }, 18 | "editable": true, 19 | "fiscalYearStartMonth": 0, 20 | "graphTooltip": 0, 21 | "id": 1, 22 | "links": [], 23 | "panels": [ 24 | { 25 | "collapsed": false, 26 | "gridPos": { 27 | "h": 1, 28 | "w": 24, 29 | "x": 0, 30 | "y": 0 31 | }, 32 | "id": 6, 33 | "panels": [], 34 | "repeat": "EIDA_NODE", 35 | "title": "CORS problems at $EIDA_NODE", 36 | "type": "row" 37 | }, 38 | { 39 | "datasource": { 40 | "type": "alexanderzobnin-zabbix-datasource", 41 | "uid": "ef0ms3r1nwoowb" 42 | }, 43 | "fieldConfig": { 44 | "defaults": {}, 45 | "overrides": [] 46 | }, 47 | "gridPos": { 48 | "h": 11, 49 | "w": 24, 50 | "x": 0, 51 | "y": 1 52 | }, 53 | "id": 2, 54 | "options": { 55 | "ackEventColor": "rgb(56, 219, 156)", 56 | "ackField": true, 57 | "ageField": true, 58 | "allowDangerousHTML": false, 59 | "customLastChangeFormat": false, 60 | "descriptionAtNewLine": false, 61 | "descriptionField": true, 62 | "fontSize": "100%", 63 | "highlightBackground": false, 64 | "highlightNewEvents": false, 65 | "highlightNewerThan": "1h", 66 | "hostField": true, 67 | "hostGroups": false, 68 | "hostProxy": false, 69 | "hostTechNameField": false, 70 | "lastChangeFormat": "", 71 | "layout": "table", 72 | "markAckEvents": false, 73 | "okEventColor": "rgb(56, 189, 113)", 74 | "opdataField": false, 75 | "pageSize": 1000, 76 | "problemTimeline": true, 77 | "resizedColumns": [ 78 | { 79 | "id": "name", 80 | "value": 414.29998779296875 81 | }, 82 | { 83 | "id": "host", 84 | "value": 105.14999389648438 85 | }, 86 | { 87 | "id": "lastchange", 88 | "value": 316 89 | }, 90 | { 91 | "id": "ack", 92 | "value": 186.40000915527344 93 | }, 94 | { 95 | "id": "tags", 96 | "value": 402.1500244140625 97 | } 98 | ], 99 | "severityField": true, 100 | "showDatasourceName": false, 101 | "showTags": true, 102 | "sortProblems": "lastchange", 103 | "statusField": true, 104 | "statusIcon": false, 105 | "triggerSeverity": [ 106 | { 107 | "color": "rgb(108, 108, 108)", 108 | "priority": 0, 109 | "severity": "Not classified", 110 | "show": true 111 | }, 112 | { 113 | "color": "rgb(120, 158, 183)", 114 | "priority": 1, 115 | "severity": "Information", 116 | "show": true 117 | }, 118 | { 119 | "color": "rgb(175, 180, 36)", 120 | "priority": 2, 121 | "severity": "Warning", 122 | "show": true 123 | }, 124 | { 125 | "color": "rgb(255, 137, 30)", 126 | "priority": 3, 127 | "severity": "Average", 128 | "show": true 129 | }, 130 | { 131 | "color": "rgb(255, 101, 72)", 132 | "priority": 4, 133 | "severity": "High", 134 | "show": true 135 | }, 136 | { 137 | "color": "rgb(215, 0, 0)", 138 | "priority": 5, 139 | "severity": "Disaster", 140 | "show": true 141 | } 142 | ] 143 | }, 144 | "pluginVersion": "6.0.2", 145 | "repeat": "WS", 146 | "repeatDirection": "h", 147 | "targets": [ 148 | { 149 | "application": { 150 | "filter": "" 151 | }, 152 | "countTriggersBy": "problems", 153 | "evaltype": "0", 154 | "functions": [], 155 | "group": { 156 | "filter": "$EIDA_NODE" 157 | }, 158 | "host": { 159 | "filter": "$EIDA_NODE" 160 | }, 161 | "item": { 162 | "filter": "" 163 | }, 164 | "itemTag": { 165 | "filter": "" 166 | }, 167 | "macro": { 168 | "filter": "" 169 | }, 170 | "options": { 171 | "acknowledged": 2, 172 | "count": true, 173 | "disableDataAlignment": false, 174 | "hostProxy": false, 175 | "hostsInMaintenance": false, 176 | "limit": 1001, 177 | "minSeverity": 0, 178 | "showDisabledItems": false, 179 | "skipEmptyValues": false, 180 | "sortProblems": "default", 181 | "useTimeRange": false, 182 | "useTrends": "default", 183 | "useZabbixValueMapping": false 184 | }, 185 | "proxy": { 186 | "filter": "" 187 | }, 188 | "queryType": "5", 189 | "refId": "A", 190 | "resultFormat": "time_series", 191 | "schema": 12, 192 | "showProblems": "problems", 193 | "table": { 194 | "skipEmptyValues": false 195 | }, 196 | "tags": { 197 | "filter": "Test:cors, Application:availability" 198 | }, 199 | "textFilter": "", 200 | "trigger": { 201 | "filter": "" 202 | } 203 | } 204 | ], 205 | "title": "CORS availability", 206 | "type": "alexanderzobnin-zabbix-triggers-panel" 207 | }, 208 | { 209 | "collapsed": false, 210 | "gridPos": { 211 | "h": 1, 212 | "w": 24, 213 | "x": 0, 214 | "y": 12 215 | }, 216 | "id": 7, 217 | "panels": [], 218 | "repeat": "EIDA_NODE", 219 | "title": "WADL at $EIDA_NODE", 220 | "type": "row" 221 | }, 222 | { 223 | "datasource": { 224 | "type": "alexanderzobnin-zabbix-datasource", 225 | "uid": "ef0ms3r1nwoowb" 226 | }, 227 | "fieldConfig": { 228 | "defaults": { 229 | "color": { 230 | "mode": "thresholds" 231 | }, 232 | "custom": { 233 | "axisPlacement": "auto", 234 | "fillOpacity": 70, 235 | "hideFrom": { 236 | "legend": false, 237 | "tooltip": false, 238 | "viz": false 239 | }, 240 | "insertNulls": false, 241 | "lineWidth": 0, 242 | "spanNulls": false 243 | }, 244 | "mappings": [ 245 | { 246 | "options": { 247 | "200": { 248 | "color": "green", 249 | "index": 0, 250 | "text": "OK" 251 | }, 252 | "201": { 253 | "color": "yellow", 254 | "index": 1 255 | }, 256 | "204": { 257 | "color": "yellow", 258 | "index": 2, 259 | "text": "No Content" 260 | }, 261 | "400": { 262 | "color": "dark-orange", 263 | "index": 3, 264 | "text": "Bad Request" 265 | }, 266 | "401": { 267 | "color": "dark-orange", 268 | "index": 4, 269 | "text": "Unauthorized" 270 | }, 271 | "403": { 272 | "color": "dark-orange", 273 | "index": 5, 274 | "text": "Forbidden" 275 | }, 276 | "404": { 277 | "color": "yellow", 278 | "index": 6, 279 | "text": "Not Found" 280 | }, 281 | "500": { 282 | "color": "dark-red", 283 | "index": 7, 284 | "text": "Internal Server Error" 285 | }, 286 | "502": { 287 | "color": "dark-red", 288 | "index": 8, 289 | "text": "Bad Gateway" 290 | }, 291 | "503": { 292 | "color": "dark-red", 293 | "index": 9, 294 | "text": "Service Unavailable" 295 | }, 296 | "null": { 297 | "color": "transparent", 298 | "index": 10 299 | } 300 | }, 301 | "type": "value" 302 | } 303 | ], 304 | "thresholds": { 305 | "mode": "absolute", 306 | "steps": [ 307 | { 308 | "color": "green", 309 | "value": 0 310 | } 311 | ] 312 | } 313 | }, 314 | "overrides": [] 315 | }, 316 | "gridPos": { 317 | "h": 13, 318 | "w": 24, 319 | "x": 0, 320 | "y": 13 321 | }, 322 | "id": 9, 323 | "options": { 324 | "alignValue": "left", 325 | "legend": { 326 | "displayMode": "list", 327 | "placement": "bottom", 328 | "showLegend": false 329 | }, 330 | "mergeValues": true, 331 | "rowHeight": 0.9, 332 | "showValue": "auto", 333 | "tooltip": { 334 | "hideZeros": false, 335 | "mode": "single", 336 | "sort": "none" 337 | } 338 | }, 339 | "pluginVersion": "12.1.1", 340 | "targets": [ 341 | { 342 | "application": { 343 | "filter": "" 344 | }, 345 | "countTriggersBy": "", 346 | "evaltype": "0", 347 | "functions": [ 348 | { 349 | "added": true, 350 | "def": { 351 | "category": "Alias", 352 | "defaultParams": [ 353 | "/(.*)/", 354 | "$1" 355 | ], 356 | "name": "replaceAlias", 357 | "params": [ 358 | { 359 | "name": "regexp", 360 | "type": "string" 361 | }, 362 | { 363 | "name": "newAlias", 364 | "type": "string" 365 | } 366 | ] 367 | }, 368 | "params": [ 369 | "/.*scenario \"(.*)\"./", 370 | "$1" 371 | ], 372 | "text": "replaceAlias(/(.*)/, $1)" 373 | } 374 | ], 375 | "group": { 376 | "filter": "$EIDA_NODE" 377 | }, 378 | "host": { 379 | "filter": "$EIDA_NODE" 380 | }, 381 | "item": { 382 | "filter": "Response code for step \".* application.wadl\" of scenario \"$WS\".*" 383 | }, 384 | "itemTag": { 385 | "filter": "webservice: $WS" 386 | }, 387 | "macro": { 388 | "filter": "" 389 | }, 390 | "options": { 391 | "count": false, 392 | "disableDataAlignment": true, 393 | "showDisabledItems": false, 394 | "skipEmptyValues": false, 395 | "useTrends": "default", 396 | "useZabbixValueMapping": false 397 | }, 398 | "proxy": { 399 | "filter": "" 400 | }, 401 | "queryType": "0", 402 | "refId": "A", 403 | "resultFormat": "time_series", 404 | "schema": 12, 405 | "table": { 406 | "skipEmptyValues": false 407 | }, 408 | "tags": { 409 | "filter": "" 410 | }, 411 | "textFilter": "", 412 | "trigger": { 413 | "filter": "" 414 | } 415 | } 416 | ], 417 | "title": "WADL files accessible", 418 | "transformations": [ 419 | { 420 | "id": "renameByRegex", 421 | "options": { 422 | "regex": "^(.*):.*$", 423 | "renamePattern": "$1" 424 | } 425 | }, 426 | { 427 | "id": "convertFieldType", 428 | "options": { 429 | "conversions": [ 430 | { 431 | "destinationType": "string", 432 | "targetField": "availability" 433 | }, 434 | { 435 | "destinationType": "string", 436 | "targetField": "dataselect" 437 | }, 438 | { 439 | "destinationType": "string", 440 | "targetField": "station" 441 | }, 442 | { 443 | "destinationType": "string", 444 | "targetField": "wfcatalog" 445 | } 446 | ], 447 | "fields": {} 448 | } 449 | } 450 | ], 451 | "type": "state-timeline" 452 | } 453 | ], 454 | "preload": false, 455 | "refresh": "1m", 456 | "schemaVersion": 41, 457 | "tags": [ 458 | "EIDA" 459 | ], 460 | "templating": { 461 | "list": [ 462 | { 463 | "allowCustomValue": false, 464 | "current": { 465 | "text": "BGS", 466 | "value": "BGS" 467 | }, 468 | "definition": "", 469 | "includeAll": false, 470 | "label": "EIDA NODE", 471 | "name": "EIDA_NODE", 472 | "options": [], 473 | "query": { 474 | "application": "", 475 | "group": "/^EIDA nodes$/", 476 | "host": "/.*/", 477 | "item": "", 478 | "itemTag": "", 479 | "queryType": "host" 480 | }, 481 | "refresh": 1, 482 | "regex": "", 483 | "sort": 1, 484 | "type": "query" 485 | }, 486 | { 487 | "allowCustomValue": false, 488 | "current": { 489 | "text": [ 490 | "dataselect", 491 | "availability", 492 | "station", 493 | "wfcatalog" 494 | ], 495 | "value": [ 496 | "dataselect", 497 | "availability", 498 | "station", 499 | "wfcatalog" 500 | ] 501 | }, 502 | "definition": "", 503 | "hide": 2, 504 | "includeAll": true, 505 | "label": "webservice", 506 | "multi": true, 507 | "name": "WS", 508 | "options": [], 509 | "query": { 510 | "application": "", 511 | "group": "/.*/", 512 | "host": "/.*/", 513 | "item": "", 514 | "itemTag": "/webservice: [^(routing.*)]/", 515 | "queryType": "itemTag" 516 | }, 517 | "refresh": 1, 518 | "regex": "/webservice: (.*)/", 519 | "type": "query" 520 | } 521 | ] 522 | }, 523 | "time": { 524 | "from": "now-2d", 525 | "to": "now" 526 | }, 527 | "timepicker": { 528 | "hidden": true, 529 | "nowDelay": "1m" 530 | }, 531 | "timezone": "browser", 532 | "title": "EIDA Nodes requirements", 533 | "uid": "eeq7mizn2y8zkb", 534 | "version": 10 535 | } --------------------------------------------------------------------------------