├── templates
└── .gitkeep
├── tests
├── __init__.py
├── Dockerfile
├── test_common.py
├── 1-nginx.json
├── 1-nginx-marathon1.5.json
├── marathon15_apps.json
├── run-benchmark.rb
├── test_marathon_lb_haproxy_options.py
├── haproxy_stats.csv
├── zdd_app_blue.json
├── zdd_app_blue_marathon1.5.json
└── zdd_apps.json
├── testsAT
├── .gitignore
├── src
│ └── test
│ │ ├── resources
│ │ ├── META-INF
│ │ │ └── aop.xml
│ │ ├── scripts
│ │ │ ├── iptables.sh
│ │ │ ├── nginx-qa-invalid-certs.sh
│ │ │ ├── marathon-lb-invalid-certs.sh
│ │ │ ├── marathon-lb-restore-password.sh
│ │ │ ├── marathon-lb-restore-certs.sh
│ │ │ ├── marathon-lb-app-certs.sh
│ │ │ ├── nginx-qa-restore-certs.sh
│ │ │ ├── marathon-lb-invalid-password.sh
│ │ │ └── marathon-lb-manage-certs.sh
│ │ ├── schemas
│ │ │ ├── marathonlb_https_rules.txt
│ │ │ ├── marathonlb_http_rules.txt
│ │ │ ├── nginx-qa-config.json
│ │ │ └── nginx-qa-config_original.json
│ │ ├── features
│ │ │ └── functionalAT
│ │ │ │ ├── 030_Logs
│ │ │ │ ├── 02_QATM_2113_Log_Haproxy_Wrapper_Debug.feature
│ │ │ │ ├── 01_MARATHONLB_1388_CentralizedLogs.feature
│ │ │ │ └── 03_QATM_2113_Vault_Renewal_Token.feature
│ │ │ │ ├── 010_Installation
│ │ │ │ ├── 002_checkDeployment_IT.feature
│ │ │ │ └── 001_installationCCT_IT.feature
│ │ │ │ ├── 020_Certificates
│ │ │ │ ├── 04_QATM_2113_Certificates_MarathonLB_Service.feature
│ │ │ │ ├── QATM_1685_Invalid_Password.feature
│ │ │ │ ├── QATM_1685_Invalid_Certificates_IT.feature
│ │ │ │ ├── 02_MARATHONLB_1386_ClientCertificate.feature
│ │ │ │ ├── 01_MARATHONLB_1386_AppCertificate.feature
│ │ │ │ └── 03_QATM_2113_Check_Invalid_AppCertificate.feature
│ │ │ │ ├── 070_bugs
│ │ │ │ └── 01_EOS_3591_changeTimezone.feature
│ │ │ │ ├── 099_Uninstall
│ │ │ │ └── CCT_Uninstall_IT.feature
│ │ │ │ ├── 040_IpTables
│ │ │ │ └── QATM_1685_Check_Iptables.feature
│ │ │ │ ├── 060_monitoring
│ │ │ │ └── 01_EOS_3139_monitoring_IT.feature
│ │ │ │ └── 050_check_haproxy_host_path
│ │ │ │ └── 01_EOS_2920_check_multiple_deployments.feature
│ │ └── log4j2.xml
│ │ └── java
│ │ └── com
│ │ └── stratio
│ │ ├── marathonlbsec
│ │ ├── functionalAT
│ │ │ ├── Uninstall_CCT_IT.java
│ │ │ ├── QATM_1388_Logs_IT.java
│ │ │ ├── Check_Deployment_IT.java
│ │ │ ├── Installation_CCT_IT.java
│ │ │ ├── EOS_3139_Monitoring_IT.java
│ │ │ ├── EOS_3591_ChangeTimezone_IT.java
│ │ │ ├── QATM_1685_Check_Iptables_IT.java
│ │ │ ├── QATM_2113_Vault_Renewal_Token_IT.java
│ │ │ ├── QATM_1685_Invalid_Password_IT.java
│ │ │ ├── QATM_2113_Log_Haproxy_Wrapper_Debug_IT.java
│ │ │ ├── QATM_1685_Invalid_Certificates_IT.java
│ │ │ ├── QATM_2113_CheckInvalidAppCertificates_IT.java
│ │ │ ├── QATM_2113_Certificates_MarathonLB_Service_IT.java
│ │ │ ├── QATM_1386_Certificates_IT.java
│ │ │ └── Nightly_IT.java
│ │ └── specs
│ │ │ ├── ThenSpec.java
│ │ │ ├── HookSpec.java
│ │ │ ├── BaseSpec.java
│ │ │ ├── Common.java
│ │ │ ├── GivenSpec.java
│ │ │ └── WhenSpec.java
│ │ └── tests
│ │ └── utils
│ │ └── BaseTest.java
├── pom.xml
└── README.md
├── VERSION
├── bluegreen_deploy.py
├── .coveragerc
├── syslogd
├── syslog.conf
└── run
├── requirements-dev.txt
├── requirements.txt
├── .dockerignore
├── MARATHON-LB-VERSION
├── Makefile
├── .gitignore
├── reload_haproxy.sh
├── service
└── haproxy
│ └── run
├── bin
├── package.sh
└── change-version.sh
├── scripts
├── build-docs.sh
└── install-git-hooks.sh
├── hooks
└── pre-commit
├── rsyslog
├── run
└── rsyslog.conf
├── VERSION_HISTORY.md
├── getpids.lua
├── health.lua
├── lrucache.py
├── token_renewal.py
├── getconfig.lua
├── signalmlb.lua
├── getmaps.lua
├── zdd_exceptions.py
├── CHANGELOG.md
├── Dockerfile
├── Jenkinsfile
├── kms_utils.py
├── common.py
├── run
├── LICENSE
└── utils.py
/templates/.gitkeep:
--------------------------------------------------------------------------------
1 |
--------------------------------------------------------------------------------
/tests/__init__.py:
--------------------------------------------------------------------------------
1 |
--------------------------------------------------------------------------------
/testsAT/.gitignore:
--------------------------------------------------------------------------------
1 | *.iml
--------------------------------------------------------------------------------
/VERSION:
--------------------------------------------------------------------------------
1 | 0.8.0-SNAPSHOT
2 |
--------------------------------------------------------------------------------
/bluegreen_deploy.py:
--------------------------------------------------------------------------------
1 | zdd.py
--------------------------------------------------------------------------------
/.coveragerc:
--------------------------------------------------------------------------------
1 | [run]
2 | source = .
3 | omit = tests/*
4 |
--------------------------------------------------------------------------------
/syslogd/syslog.conf:
--------------------------------------------------------------------------------
1 | *.*;auth,authpriv.none -/var/log/container_stdout
2 |
--------------------------------------------------------------------------------
/requirements-dev.txt:
--------------------------------------------------------------------------------
1 | -r requirements.txt
2 | coverage
3 | flake8
4 | mock
5 | nose
6 |
--------------------------------------------------------------------------------
/requirements.txt:
--------------------------------------------------------------------------------
1 | cryptography
2 | PyJWT==1.7.1
3 | pycurl
4 | python-dateutil
5 | requests
6 | six
7 |
--------------------------------------------------------------------------------
/.dockerignore:
--------------------------------------------------------------------------------
1 | .git
2 | build.bash
3 | tests
4 | requirements-dev.txt
5 | hooks
6 | scripts
7 | .*
8 | Dockerfile
9 |
--------------------------------------------------------------------------------
/MARATHON-LB-VERSION:
--------------------------------------------------------------------------------
1 | ###################
2 | ### Marathon-LB ###
3 | ###################
4 |
5 | MARATHON_LB_PKG_VERSION=v1.11.3
6 |
--------------------------------------------------------------------------------
/Makefile:
--------------------------------------------------------------------------------
1 | all: package
2 | change-version:
3 | bin/change-version.sh $(version)
4 | package:
5 | bin/package.sh $(version)
6 |
7 |
--------------------------------------------------------------------------------
/testsAT/src/test/resources/META-INF/aop.xml:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Stratio/marathon-lb-sec/HEAD/testsAT/src/test/resources/META-INF/aop.xml
--------------------------------------------------------------------------------
/testsAT/src/test/resources/scripts/iptables.sh:
--------------------------------------------------------------------------------
1 | #! /bin/bash
2 | for run in {1..300}
3 | do
4 | curl $1:9090/_mlb_signal/usr1
5 | sleep 5
6 | done
--------------------------------------------------------------------------------
/.gitignore:
--------------------------------------------------------------------------------
1 | .idea
2 | templates/
3 | __pycache__
4 | *.pyc
5 | .env
6 | .cache
7 |
8 | .coverage
9 | .coverage.*
10 | *,cover
11 | coverage.xml
12 | htmlcov/
13 |
14 | *.iml
15 | testsAT/target
16 |
--------------------------------------------------------------------------------
/reload_haproxy.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 |
3 | socat /var/run/haproxy/socket - <<< "show servers state" > /var/state/haproxy/global
4 |
5 | # "sv reload ${HAPROXY_SERVICE}" will be added here by /marathon-lb/run:
6 |
--------------------------------------------------------------------------------
/service/haproxy/run:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 | exec 2>&1
3 | WHICH_HAPROXY=$(which haproxy)
4 |
5 | mkdir -p /var/state/haproxy
6 | mkdir -p /var/run/haproxy
7 |
8 | exec $WHICH_HAPROXY -W -f /marathon-lb/haproxy.cfg -x /var/run/haproxy/socket
--------------------------------------------------------------------------------
/bin/package.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash -e
2 | BASEDIR=$( cd "$(dirname "$0")" ; pwd -P )/..
3 |
4 | cd $BASEDIR
5 |
6 | if [[ -z "$1" ]]; then
7 | VERSION=$(cat $BASEDIR/VERSION)
8 | else
9 | VERSION=$1
10 | fi
11 |
12 | docker build . -t "stratio/marathon-lb-sec:$VERSION"
13 |
--------------------------------------------------------------------------------
/bin/change-version.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash -e
2 | BASEDIR=$( cd "$(dirname "$0")" ; pwd -P )/..
3 |
4 | cd $BASEDIR
5 |
6 | if [[ -z "$1" ]]; then
7 | VERSION=$(cat $BASEDIR/VERSION)
8 | else
9 | VERSION=$1
10 | fi
11 |
12 | echo "Modifying marathon-lb-sec version to: $1"
13 | echo $VERSION > VERSION
--------------------------------------------------------------------------------
/syslogd/run:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 | exec 2>&1
3 |
4 | # A hack to redirect syslog to stdout.
5 | # This is necessary because haproxy only logs to syslog.
6 | ln -sf /proc/$$/fd/1 /var/log/container_stdout
7 |
8 | # Start syslog in foreground
9 | /usr/sbin/syslogd -f "/marathon-lb/syslogd/syslog.conf" -n
10 |
--------------------------------------------------------------------------------
/scripts/build-docs.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 | set -o errexit -o nounset -o pipefail
3 |
4 | SCRIPTS_DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )";
5 | REPO_DIR="$SCRIPTS_DIR/..";
6 | META_DIR="$REPO_DIR/repo/meta"
7 |
8 | echo "Building docs...";
9 | $REPO_DIR/"marathon_lb.py" --longhelp > $REPO_DIR/Longhelp.md;
10 | echo "OK";
11 |
--------------------------------------------------------------------------------
/hooks/pre-commit:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 | set -o errexit -o nounset -o pipefail
3 |
4 | echo "RUNNING PRE-COMMIT";
5 |
6 | GIT_HOOKS_DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )";
7 | REPO_DIR=$GIT_HOOKS_DIR/../..
8 | SCRIPTS_DIR=$REPO_DIR/scripts
9 |
10 | $SCRIPTS_DIR/build-docs.sh
11 |
12 | # Fail if there are unstaged changes
13 | git diff --exit-code
14 |
--------------------------------------------------------------------------------
/scripts/install-git-hooks.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 | set -o errexit -o nounset -o pipefail
3 |
4 | SCRIPTS_DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )";
5 | REPO_DIR=$SCRIPTS_DIR/..
6 | HOOKS_DIR=$REPO_DIR/hooks
7 | GIT_HOOKS_DIR=$REPO_DIR/.git/hooks
8 |
9 | echo "Installing git hooks...";
10 |
11 | for file in $(ls $HOOKS_DIR); do
12 | echo "Copying $file";
13 | cp "$HOOKS_DIR/$file" $GIT_HOOKS_DIR/;
14 | chmod +x "$GIT_HOOKS_DIR/$file";
15 | done
16 |
17 | echo "OK";
18 |
--------------------------------------------------------------------------------
/rsyslog/run:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 | exec 2>&1
3 |
4 | source /usr/sbin/b-log.sh
5 | B_LOG --stdout true
6 | DOCKER_LOG_LEVEL=${DOCKER_LOG_LEVEL:-INFO}
7 | eval LOG_LEVEL_${DOCKER_LOG_LEVEL}
8 |
9 |
10 | # A hack to redirect syslog to stdout.
11 | # This is necessary because haproxy only logs to syslog.
12 | ln -sf /proc/$$/fd/1 /var/log/container_stdout
13 |
14 | cp /marathon-lb/rsyslog/rsyslog.conf /etc/rsyslog.conf
15 |
16 | # Start rsyslog service
17 | INFO $(service rsyslog start)
18 |
19 | while true; do sleep 0.5; done
--------------------------------------------------------------------------------
/tests/Dockerfile:
--------------------------------------------------------------------------------
1 | FROM debian:jessie
2 |
3 | COPY . /marathon-lb
4 |
5 | RUN apt-get update && apt-get install -y ruby apache2 vim curl ruby-dev build-essential \
6 | && echo "deb http://debian.datastax.com/community stable main" | tee -a /etc/apt/sources.list.d/cassandra.sources.list \
7 | && curl -L http://debian.datastax.com/debian/repo_key | apt-key add - \
8 | && apt-get update && apt-get install -y cassandra \
9 | && gem install --no-ri --no-rdoc cassandra-driver \
10 | && apt-get clean && rm -rf /var/lib/apt/lists/*
11 |
12 | WORKDIR /marathon-lb
13 |
--------------------------------------------------------------------------------
/testsAT/src/test/resources/schemas/marathonlb_https_rules.txt:
--------------------------------------------------------------------------------
1 | frontend marathon_https_in
2 | bind *:443 ssl crt /marathon-lb/vault_certs/
3 | mode http
4 | acl path_nginx-qa-testqa3_80 path_beg testqa3
5 | use_backend nginx-qa-testqa3_80 if { ssl_fc_sni nginx-qa.labs.stratio.com } path_nginx-qa-testqa3_80
6 | acl path_nginx-qa-testqa2_80 path_beg testqa2
7 | use_backend nginx-qa-testqa2_80 if { ssl_fc_sni nginx-qa.labs.stratio.com } path_nginx-qa-testqa2_80
8 | acl path_nginx-qa-testqa1_80 path_beg testqa1
9 | use_backend nginx-qa-testqa1_80 if { ssl_fc_sni nginx-qa.labs.stratio.com } path_nginx-qa-testqa1_80
10 | use_backend %[ssl_fc_sni,lower,map(/marathon-lb/domain2backend.map)]
11 |
--------------------------------------------------------------------------------
/testsAT/src/test/resources/features/functionalAT/030_Logs/02_QATM_2113_Log_Haproxy_Wrapper_Debug.feature:
--------------------------------------------------------------------------------
1 | @rest
2 | @mandatory(DCOS_CLI_HOST,DCOS_CLI_USER,DCOS_CLI_PASSWORD)
3 | Feature:[QATM-2113] Haproxy Wrapper logging debug
4 |
5 | Scenario:[01] Check marathon-lb logs
6 | Given I open a ssh connection to '${DCOS_CLI_HOST}' with user '${DCOS_CLI_USER}' and password '${DCOS_CLI_PASSWORD}'
7 | And I run 'dcos task | grep marathon.*lb.* | tail -1 | awk '{print $5}'' in the ssh connection and save the value in environment variable 'TaskID'
8 | When in less than '300' seconds, checking each '10' seconds, the command output 'dcos task log --lines 10000000 !{TaskID} 2>/dev/null | grep -e 'DEBUG.*haproxy_wrapper.*' | wc -l' contains '0'
--------------------------------------------------------------------------------
/tests/test_common.py:
--------------------------------------------------------------------------------
1 | import unittest
2 |
3 | from mock import Mock
4 |
5 | import common
6 |
7 |
8 | class TestCommon(unittest.TestCase):
9 |
10 | def test_setup_logging_log_level(self):
11 | logger = Mock()
12 | common.setup_logging(logger, '/dev/null',
13 | '%(name)s: %(message)s', 'info')
14 |
15 | logger.setLevel.assert_called_with(20)
16 |
17 | def test_setup_logging_invalid_log_level(self):
18 | logger = Mock()
19 | with self.assertRaises(Exception) as context:
20 | common.setup_logging(logger, '/dev/null',
21 | '%(name)s: %(message)s', 'banana')
22 |
23 | assert str(context.exception) == 'Invalid log level: BANANA'
24 |
--------------------------------------------------------------------------------
/VERSION_HISTORY.md:
--------------------------------------------------------------------------------
1 | # Version History
2 |
3 |
4 | #### 0.7.0-62c453c (Built: June 01, 2020 | Released: June 19, 2020)
5 |
6 | #### 0.6.2-e7c6599 (Built: April 14, 2020 | Released: April 14, 2020)
7 |
8 | #### 0.6.1-3903506 (Built: March 20, 2020 | Released: March 20, 2020)
9 |
10 | #### 0.6.0-458dce6 (Built: January 14, 2020 | Released: January 27, 2020)
11 |
12 | #### 0.5.0-96093b8 (Built: October 03, 2019 | Released: November 18, 2019)
13 |
14 | #### 0.4.1-c930090 (Built: September 12, 2019 | Released: September 17, 2019)
15 |
16 | #### 0.4.0-aa77ef6 (Built: July 02, 2019 | Released: July 04, 2019)
17 |
18 | #### 0.3.1 (March 06, 2018)
19 |
20 | #### 0.3.0 (February 20, 2018)
21 |
22 | #### 0.2.0 (December 19, 2017)
23 |
24 | #### 0.1.0 (November 22, 2017)
--------------------------------------------------------------------------------
/getpids.lua:
--------------------------------------------------------------------------------
1 | -- A simple Lua module for HAProxy which returns
2 | -- a list of all the current HAProxy PIDs by using
3 | -- the unix `pidof` command.
4 | -- :)
5 |
6 | function os.capture(cmd)
7 | local f = assert(io.popen(cmd, 'r'))
8 | local s = assert(f:read('*a'))
9 | f:close()
10 | s = string.gsub(s, '^%s+', '')
11 | s = string.gsub(s, '%s+$', '')
12 | s = string.gsub(s, '[\n\r]+', ' ')
13 | return s
14 | end
15 |
16 | core.register_service("getpids", "http", function(applet)
17 | local response = os.capture("pidof haproxy", false)
18 | applet:set_status(200)
19 | applet:add_header("content-length", string.len(response))
20 | applet:add_header("content-type", "text/plain")
21 | applet:start_response()
22 | applet:send(response)
23 | end)
24 |
--------------------------------------------------------------------------------
/health.lua:
--------------------------------------------------------------------------------
1 | function check_token()
2 |
3 | local is_ok = os.execute("bash -c \"if [[ $(expr $(cat /marathon-lb/token-status) - $(date +\"%s\")) -lt 0 ]] ; then (exit 1) ; else (exit 0) ; fi > /dev/null 2>&1 \"")
4 |
5 | if is_ok==true then
6 | return 200
7 | else
8 | return 501
9 | end
10 | end
11 |
12 | core.register_service("health", "http", function(applet)
13 | token_status = check_token()
14 | if token_status == 200 then
15 | response = "OK"
16 | else
17 | response = "Something went wrong"
18 | end
19 | applet:set_status(token_status)
20 | applet:add_header("content-length", string.len(response))
21 | applet:add_header("content-type", "text/plain")
22 | applet:start_response()
23 | applet:send(response)
24 | end)
25 |
--------------------------------------------------------------------------------
/lrucache.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python3
2 |
3 | """
4 | A simple LRU cache based on the one described at:
5 | https://www.kunxi.org/blog/2014/05/lru-cache-in-python/
6 | """
7 | import collections
8 |
9 |
10 | class LRUCache:
11 | def __init__(self, capacity=100):
12 | self.capacity = capacity
13 | self.cache = collections.OrderedDict()
14 |
15 | def get(self, key, default):
16 | try:
17 | value = self.cache.pop(key)
18 | self.cache[key] = value
19 | return value
20 | except KeyError:
21 | return default
22 |
23 | def set(self, key, value):
24 | try:
25 | self.cache.pop(key)
26 | except KeyError:
27 | if len(self.cache) >= self.capacity:
28 | self.cache.popitem(last=False)
29 | self.cache[key] = value
30 |
--------------------------------------------------------------------------------
/token_renewal.py:
--------------------------------------------------------------------------------
1 | import common
2 | import kms_utils
3 | import threading
4 | import time
5 |
6 | logger = None
7 |
8 |
9 | class TokenRenewal(threading.Thread):
10 |
11 | def __init__(self):
12 | threading.Thread.__init__(self, daemon=True)
13 |
14 | global logger
15 | logger = common.marathon_lb_logger.getChild('token_renewal.py')
16 | self.period = 45
17 | logger.info('Starting token renewal thread')
18 |
19 | def run(self):
20 | while True:
21 | logger.info('Checking if token needs renewal')
22 |
23 | try:
24 | kms_utils.check_token_needs_renewal(False)
25 |
26 | except Exception as e:
27 | logger.error('Something went wrong when checking the token')
28 | logger.error(e)
29 |
30 | time.sleep(self.period)
31 |
32 |
33 |
--------------------------------------------------------------------------------
/tests/1-nginx.json:
--------------------------------------------------------------------------------
1 | {
2 | "id": "nginx",
3 | "container": {
4 | "type": "DOCKER",
5 | "docker": {
6 | "image": "brndnmtthws/nginx-echo-sleep",
7 | "network": "BRIDGE",
8 | "portMappings": [
9 | { "hostPort": 0, "containerPort": 8080, "servicePort": 10000 }
10 | ],
11 | "forcePullImage":true
12 | }
13 | },
14 | "instances": 5,
15 | "cpus": 0.1,
16 | "mem": 65,
17 | "healthChecks": [{
18 | "protocol": "HTTP",
19 | "path": "/",
20 | "portIndex": 0,
21 | "timeoutSeconds": 15,
22 | "gracePeriodSeconds": 15,
23 | "intervalSeconds": 3,
24 | "maxConsecutiveFailures": 10
25 | }],
26 | "labels":{
27 | "HAPROXY_DEPLOYMENT_GROUP":"nginx",
28 | "HAPROXY_DEPLOYMENT_ALT_PORT":"10001",
29 | "HAPROXY_GROUP":"external"
30 | },
31 | "acceptedResourceRoles":["*", "slave_public"]
32 | }
33 |
--------------------------------------------------------------------------------
/testsAT/src/test/java/com/stratio/marathonlbsec/functionalAT/Uninstall_CCT_IT.java:
--------------------------------------------------------------------------------
1 | package com.stratio.marathonlbsec.functionalAT;
2 |
3 | import com.stratio.qa.cucumber.testng.CucumberFeatureWrapper;
4 | import com.stratio.qa.cucumber.testng.PickleEventWrapper;
5 | import com.stratio.qa.utils.BaseGTest;
6 | import cucumber.api.CucumberOptions;
7 | import org.testng.annotations.Test;
8 |
9 | @CucumberOptions(features = { "src/test/resources/features/functionalAT/099_Uninstall/CCT_Uninstall_IT.feature" },plugin = "json:target/cucumber.json")
10 | public class Uninstall_CCT_IT extends BaseGTest {
11 |
12 | public Uninstall_CCT_IT() {
13 | }
14 |
15 | @Test(enabled = true, groups = {"purge_cct"}, dataProvider = "scenarios")
16 | public void installation(PickleEventWrapper pickleWrapper, CucumberFeatureWrapper featureWrapper) throws Throwable {
17 | runScenario(pickleWrapper, featureWrapper);
18 | }
19 | }
20 |
--------------------------------------------------------------------------------
/testsAT/src/test/java/com/stratio/marathonlbsec/specs/ThenSpec.java:
--------------------------------------------------------------------------------
1 | /*
2 | * Copyright (C) 2015 Stratio (http://stratio.com)
3 | *
4 | * Licensed under the Apache License, Version 2.0 (the "License");
5 | * you may not use this file except in compliance with the License.
6 | * You may obtain a copy of the License at
7 | *
8 | * http://www.apache.org/licenses/LICENSE-2.0
9 | *
10 | * Unless required by applicable law or agreed to in writing, software
11 | * distributed under the License is distributed on an "AS IS" BASIS,
12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 | * See the License for the specific language governing permissions and
14 | * limitations under the License.
15 | */
16 | package com.stratio.marathonlbsec.specs;
17 |
18 | public class ThenSpec extends BaseSpec {
19 |
20 | public ThenSpec(Common spec) {
21 | this.commonspec = spec;
22 | }
23 | }
--------------------------------------------------------------------------------
/testsAT/src/test/java/com/stratio/marathonlbsec/specs/HookSpec.java:
--------------------------------------------------------------------------------
1 | /*
2 | * Copyright (C) 2015 Stratio (http://stratio.com)
3 | *
4 | * Licensed under the Apache License, Version 2.0 (the "License");
5 | * you may not use this file except in compliance with the License.
6 | * You may obtain a copy of the License at
7 | *
8 | * http://www.apache.org/licenses/LICENSE-2.0
9 | *
10 | * Unless required by applicable law or agreed to in writing, software
11 | * distributed under the License is distributed on an "AS IS" BASIS,
12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 | * See the License for the specific language governing permissions and
14 | * limitations under the License.
15 | */
16 | package com.stratio.marathonlbsec.specs;
17 |
18 | public class HookSpec extends BaseSpec {
19 |
20 | public HookSpec(Common spec) {
21 | this.commonspec = spec;
22 | }
23 |
24 | }
25 |
--------------------------------------------------------------------------------
/testsAT/src/test/java/com/stratio/marathonlbsec/functionalAT/QATM_1388_Logs_IT.java:
--------------------------------------------------------------------------------
1 | package com.stratio.marathonlbsec.functionalAT;
2 |
3 | import com.stratio.qa.cucumber.testng.CucumberFeatureWrapper;
4 | import com.stratio.qa.cucumber.testng.PickleEventWrapper;
5 | import com.stratio.qa.utils.BaseGTest;
6 | import cucumber.api.CucumberOptions;
7 | import org.testng.annotations.Test;
8 |
9 | @CucumberOptions(features = {
10 | "src/test/resources/features/functionalAT/030_Logs/01_MARATHONLB_1388_CentralizedLogs.feature",
11 | },plugin = "json:target/cucumber.json")
12 | public class QATM_1388_Logs_IT extends BaseGTest {
13 |
14 | public QATM_1388_Logs_IT() {
15 | }
16 |
17 | @Test(enabled = true, groups = {"centralizedlogs"}, dataProvider = "scenarios")
18 | public void QATM1388_Logs(PickleEventWrapper pickleWrapper, CucumberFeatureWrapper featureWrapper) throws Throwable {
19 | runScenario(pickleWrapper, featureWrapper);
20 | }
21 | }
22 |
--------------------------------------------------------------------------------
/testsAT/src/test/java/com/stratio/marathonlbsec/functionalAT/Check_Deployment_IT.java:
--------------------------------------------------------------------------------
1 | package com.stratio.marathonlbsec.functionalAT;
2 |
3 | import com.stratio.qa.cucumber.testng.CucumberFeatureWrapper;
4 | import com.stratio.qa.cucumber.testng.PickleEventWrapper;
5 | import com.stratio.qa.utils.BaseGTest;
6 | import cucumber.api.CucumberOptions;
7 | import org.testng.annotations.Test;
8 |
9 | @CucumberOptions(features = {
10 | "src/test/resources/features/functionalAT/010_Installation/002_checkDeployment_IT.feature"
11 | },plugin = "json:target/cucumber.json")
12 |
13 | public class Check_Deployment_IT extends BaseGTest {
14 |
15 | public Check_Deployment_IT() {
16 | }
17 |
18 | @Test(enabled = true, groups = {"check_deployment"}, dataProvider = "scenarios")
19 | public void checkDeployment(PickleEventWrapper pickleWrapper, CucumberFeatureWrapper featureWrapper) throws Throwable {
20 | runScenario(pickleWrapper, featureWrapper);
21 | }
22 | }
--------------------------------------------------------------------------------
/testsAT/src/test/java/com/stratio/marathonlbsec/functionalAT/Installation_CCT_IT.java:
--------------------------------------------------------------------------------
1 | package com.stratio.marathonlbsec.functionalAT;
2 |
3 | import com.stratio.qa.cucumber.testng.CucumberFeatureWrapper;
4 | import com.stratio.qa.cucumber.testng.PickleEventWrapper;
5 | import com.stratio.qa.utils.BaseGTest;
6 | import cucumber.api.CucumberOptions;
7 | import org.testng.annotations.Test;
8 |
9 | @CucumberOptions(features = {
10 | "src/test/resources/features/functionalAT/010_Installation/001_installationCCT_IT.feature"
11 | },plugin = "json:target/cucumber.json")
12 |
13 | public class Installation_CCT_IT extends BaseGTest {
14 |
15 | public Installation_CCT_IT() {
16 | }
17 |
18 | @Test(enabled = true, groups = {"installation_cct"}, dataProvider = "scenarios")
19 | public void AppWithSecurityES(PickleEventWrapper pickleWrapper, CucumberFeatureWrapper featureWrapper) throws Throwable {
20 | runScenario(pickleWrapper, featureWrapper);
21 | }
22 | }
--------------------------------------------------------------------------------
/testsAT/src/test/java/com/stratio/marathonlbsec/specs/BaseSpec.java:
--------------------------------------------------------------------------------
1 | /*
2 | * Copyright (C) 2015 Stratio (http://stratio.com)
3 | *
4 | * Licensed under the Apache License, Version 2.0 (the "License");
5 | * you may not use this file except in compliance with the License.
6 | * You may obtain a copy of the License at
7 | *
8 | * http://www.apache.org/licenses/LICENSE-2.0
9 | *
10 | * Unless required by applicable law or agreed to in writing, software
11 | * distributed under the License is distributed on an "AS IS" BASIS,
12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 | * See the License for the specific language governing permissions and
14 | * limitations under the License.
15 | */
16 | package com.stratio.marathonlbsec.specs;
17 |
18 | public class BaseSpec {
19 |
20 | protected Common commonspec;
21 |
22 | public Common getCommonSpec() {
23 | return this.commonspec;
24 | }
25 | }
26 |
--------------------------------------------------------------------------------
/testsAT/src/test/resources/schemas/marathonlb_http_rules.txt:
--------------------------------------------------------------------------------
1 | frontend marathon_http_in
2 | bind *:80
3 | mode http
4 | acl host_nginx-qa_labs_stratio_com_nginx-qa-testqa3 hdr(host) -i nginx-qa.labs.stratio.com
5 | acl path_nginx-qa-testqa3_80 path_beg testqa3
6 | use_backend nginx-qa-testqa3_80 if host_nginx-qa_labs_stratio_com_nginx-qa-testqa3 path_nginx-qa-testqa3_80
7 | acl host_nginx-qa_labs_stratio_com_nginx-qa-testqa2 hdr(host) -i nginx-qa.labs.stratio.com
8 | acl path_nginx-qa-testqa2_80 path_beg testqa2
9 | use_backend nginx-qa-testqa2_80 if host_nginx-qa_labs_stratio_com_nginx-qa-testqa2 path_nginx-qa-testqa2_80
10 | acl host_nginx-qa_labs_stratio_com_nginx-qa-testqa1 hdr(host) -i nginx-qa.labs.stratio.com
11 | acl path_nginx-qa-testqa1_80 path_beg testqa1
12 | use_backend nginx-qa-testqa1_80 if host_nginx-qa_labs_stratio_com_nginx-qa-testqa1 path_nginx-qa-testqa1_80
13 | use_backend %[req.hdr(host),lower,regsub(:.*$,,),map(/marathon-lb/domain2backend.map)]
14 |
--------------------------------------------------------------------------------
/testsAT/src/test/java/com/stratio/marathonlbsec/functionalAT/EOS_3139_Monitoring_IT.java:
--------------------------------------------------------------------------------
1 | package com.stratio.marathonlbsec.functionalAT;
2 |
3 | import com.stratio.qa.cucumber.testng.CucumberFeatureWrapper;
4 | import com.stratio.qa.cucumber.testng.PickleEventWrapper;
5 | import com.stratio.qa.utils.BaseGTest;
6 | import cucumber.api.CucumberOptions;
7 | import org.testng.annotations.Test;
8 |
9 | @CucumberOptions(features = {
10 | "src/test/resources/features/functionalAT/060_monitoring/01_EOS_3139_monitoring_IT.feature"
11 | }, plugin = "json:target/cucumber.json")
12 | public class EOS_3139_Monitoring_IT extends BaseGTest {
13 |
14 | public EOS_3139_Monitoring_IT() {}
15 |
16 | @Test(enabled = true, groups = {"monitoring_MarathonLB"}, dataProvider = "scenarios")
17 | public void monitoringMarathonLB(PickleEventWrapper pickleWrapper, CucumberFeatureWrapper featureWrapper) throws Throwable {
18 | runScenario(pickleWrapper, featureWrapper);
19 | }
20 |
21 | }
22 |
--------------------------------------------------------------------------------
/testsAT/src/test/java/com/stratio/marathonlbsec/functionalAT/EOS_3591_ChangeTimezone_IT.java:
--------------------------------------------------------------------------------
1 | package com.stratio.marathonlbsec.functionalAT;
2 |
3 | import com.stratio.qa.cucumber.testng.CucumberFeatureWrapper;
4 | import com.stratio.qa.cucumber.testng.PickleEventWrapper;
5 | import com.stratio.qa.utils.BaseGTest;
6 | import cucumber.api.CucumberOptions;
7 | import org.testng.annotations.Test;
8 |
9 | @CucumberOptions(features = {
10 | "src/test/resources/features/functionalAT/030_Logs/01_MARATHONLB_1388_CentralizedLogs.feature",
11 | },plugin = "json:target/cucumber.json")
12 | public class EOS_3591_ChangeTimezone_IT extends BaseGTest {
13 |
14 | public EOS_3591_ChangeTimezone_IT() {}
15 |
16 | @Test(enabled = true, groups = {"changeTimezone"}, dataProvider = "scenarios")
17 | public void EOS3591_changeTimezone(PickleEventWrapper pickleWrapper, CucumberFeatureWrapper featureWrapper) throws Throwable {
18 | runScenario(pickleWrapper, featureWrapper);
19 | }
20 | }
21 |
--------------------------------------------------------------------------------
/testsAT/src/test/java/com/stratio/marathonlbsec/functionalAT/QATM_1685_Check_Iptables_IT.java:
--------------------------------------------------------------------------------
1 | package com.stratio.marathonlbsec.functionalAT;
2 |
3 | import com.stratio.qa.cucumber.testng.CucumberFeatureWrapper;
4 | import com.stratio.qa.cucumber.testng.PickleEventWrapper;
5 | import com.stratio.qa.utils.BaseGTest;
6 | import cucumber.api.CucumberOptions;
7 | import org.testng.annotations.Test;
8 |
9 | @CucumberOptions(features = {
10 | "src/test/resources/features/functionalAT/040_IpTables/QATM_1685_Check_Iptables.feature"
11 | },plugin = "json:target/cucumber.json")
12 | public class QATM_1685_Check_Iptables_IT extends BaseGTest {
13 |
14 | public QATM_1685_Check_Iptables_IT() {
15 | }
16 |
17 | @Test(enabled = true, groups = {"iptables"}, dataProvider = "scenarios")
18 | public void QATM_1685_Check_Iptables_IT(PickleEventWrapper pickleWrapper, CucumberFeatureWrapper featureWrapper) throws Throwable {
19 | runScenario(pickleWrapper, featureWrapper);
20 | }
21 | }
22 |
--------------------------------------------------------------------------------
/testsAT/src/test/java/com/stratio/marathonlbsec/functionalAT/QATM_2113_Vault_Renewal_Token_IT.java:
--------------------------------------------------------------------------------
1 | package com.stratio.marathonlbsec.functionalAT;
2 |
3 | import com.stratio.qa.cucumber.testng.CucumberFeatureWrapper;
4 | import com.stratio.qa.cucumber.testng.PickleEventWrapper;
5 | import com.stratio.qa.utils.BaseGTest;
6 | import cucumber.api.CucumberOptions;
7 | import org.testng.annotations.Test;
8 |
9 | @CucumberOptions(features = { "src/test/resources/features/functionalAT/030_Logs/03_QATM_2113_Vault_Renewal_Token.feature" },plugin = "json:target/cucumber.json")
10 | public class QATM_2113_Vault_Renewal_Token_IT extends BaseGTest {
11 |
12 | public QATM_2113_Vault_Renewal_Token_IT() {
13 |
14 | }
15 |
16 | @Test(enabled = true, groups = {"vaultRenewalToken"}, dataProvider = "scenarios")
17 | public void QATM1386_Certificates(PickleEventWrapper pickleWrapper, CucumberFeatureWrapper featureWrapper) throws Throwable {
18 | runScenario(pickleWrapper, featureWrapper);
19 | }
20 | }
21 |
--------------------------------------------------------------------------------
/testsAT/src/test/java/com/stratio/marathonlbsec/functionalAT/QATM_1685_Invalid_Password_IT.java:
--------------------------------------------------------------------------------
1 | package com.stratio.marathonlbsec.functionalAT;
2 |
3 | import com.stratio.qa.cucumber.testng.CucumberFeatureWrapper;
4 | import com.stratio.qa.cucumber.testng.PickleEventWrapper;
5 | import com.stratio.qa.utils.BaseGTest;
6 | import cucumber.api.CucumberOptions;
7 | import org.testng.annotations.Test;
8 |
9 | @CucumberOptions(features = {
10 | "src/test/resources/features/functionalAT/020_Certificates/QATM_1685_Invalid_Password.feature"
11 | },plugin = "json:target/cucumber.json")
12 | public class QATM_1685_Invalid_Password_IT extends BaseGTest {
13 |
14 | public QATM_1685_Invalid_Password_IT() {
15 | }
16 |
17 | @Test(enabled = true, groups = {"invalid_password"}, dataProvider = "scenarios")
18 | public void QATM_1685_Invalid_Password(PickleEventWrapper pickleWrapper, CucumberFeatureWrapper featureWrapper) throws Throwable {
19 | runScenario(pickleWrapper, featureWrapper);
20 | }
21 | }
--------------------------------------------------------------------------------
/tests/1-nginx-marathon1.5.json:
--------------------------------------------------------------------------------
1 | {
2 | "id": "nginx",
3 | "container": {
4 | "type": "DOCKER",
5 | "docker": {
6 | "image": "brndnmtthws/nginx-echo-sleep",
7 | "forcePullImage":true
8 | },
9 | "portMappings": [
10 | { "hostPort": 0, "containerPort": 8080, "servicePort": 10000 }
11 | ]
12 | },
13 | "networks": [
14 | { "mode": "container/bridge" }
15 | ],
16 | "instances": 5,
17 | "cpus": 0.1,
18 | "mem": 65,
19 | "healthChecks": [{
20 | "protocol": "MESOS_HTTP",
21 | "path": "/",
22 | "portIndex": 0,
23 | "timeoutSeconds": 15,
24 | "gracePeriodSeconds": 15,
25 | "intervalSeconds": 3,
26 | "maxConsecutiveFailures": 10
27 | }],
28 | "labels":{
29 | "HAPROXY_DEPLOYMENT_GROUP":"nginx",
30 | "HAPROXY_DEPLOYMENT_ALT_PORT":"10001",
31 | "HAPROXY_GROUP":"external"
32 | },
33 | "acceptedResourceRoles":["*", "slave_public"]
34 | }
35 |
--------------------------------------------------------------------------------
/testsAT/src/test/java/com/stratio/marathonlbsec/functionalAT/QATM_2113_Log_Haproxy_Wrapper_Debug_IT.java:
--------------------------------------------------------------------------------
1 | package com.stratio.marathonlbsec.functionalAT;
2 |
3 | import com.stratio.qa.cucumber.testng.CucumberFeatureWrapper;
4 | import com.stratio.qa.cucumber.testng.PickleEventWrapper;
5 | import com.stratio.qa.utils.BaseGTest;
6 | import cucumber.api.CucumberOptions;
7 | import org.testng.annotations.Test;
8 |
9 | @CucumberOptions(features = { "src/test/resources/features/functionalAT/030_Logs/02_QATM_2113_Log_Haproxy_Wrapper_Debug.feature" },plugin = "json:target/cucumber.json")
10 | public class QATM_2113_Log_Haproxy_Wrapper_Debug_IT extends BaseGTest {
11 |
12 | public QATM_2113_Log_Haproxy_Wrapper_Debug_IT() {
13 |
14 | }
15 |
16 | @Test(enabled = true, groups = {"haproxyWrapperDebug"}, dataProvider = "scenarios")
17 | public void QATM1386_Certificates(PickleEventWrapper pickleWrapper, CucumberFeatureWrapper featureWrapper) throws Throwable {
18 | runScenario(pickleWrapper, featureWrapper);
19 | }
20 | }
21 |
--------------------------------------------------------------------------------
/testsAT/src/test/resources/features/functionalAT/010_Installation/002_checkDeployment_IT.feature:
--------------------------------------------------------------------------------
1 | @dcos
2 | @mandatory(BOOTSTRAP_IP,REMOTE_USER,PEM_FILE_PATH,DCOS_PASSWORD)
3 | Feature: [QATM-1870] Check Marathon-LB deployment
4 |
5 | Scenario:[01] Check correct deployment
6 | # Check status in API
7 | Then in less than '1200' seconds, checking each '20' seconds, I check in CCT that the service 'marathonlb' is in 'running' status
8 | Then in less than '1200' seconds, checking each '20' seconds, I check in CCT that the service 'marathonlb' is in 'healthy' status
9 |
10 | Scenario:[02] Make sure service is ready
11 | Given I get the 'external' ip for service id '/marathonlb' for task name 'marathonlb' and save it in environment variable 'hostIP'
12 | Given I send requests to '!{hostIP}:9090'
13 | When I send a 'GET' request to '/_haproxy_health_check'
14 | Then the service response status must be '200'
15 | When I send a 'GET' request to '/_haproxy_getconfig'
16 | Then the service response status must be '200'
--------------------------------------------------------------------------------
/testsAT/src/test/java/com/stratio/marathonlbsec/functionalAT/QATM_1685_Invalid_Certificates_IT.java:
--------------------------------------------------------------------------------
1 | package com.stratio.marathonlbsec.functionalAT;
2 |
3 | import com.stratio.qa.cucumber.testng.CucumberFeatureWrapper;
4 | import com.stratio.qa.cucumber.testng.PickleEventWrapper;
5 | import com.stratio.qa.utils.BaseGTest;
6 | import cucumber.api.CucumberOptions;
7 | import org.testng.annotations.Test;
8 |
9 | @CucumberOptions(features = {
10 | "src/test/resources/features/functionalAT/020_Certificates/QATM_1685_Invalid_Certificates_IT.feature"
11 | },plugin = "json:target/cucumber.json")
12 | public class QATM_1685_Invalid_Certificates_IT extends BaseGTest {
13 |
14 | public QATM_1685_Invalid_Certificates_IT() {
15 | }
16 |
17 | @Test(enabled = true, groups = {"invalid_certificates"}, dataProvider = "scenarios")
18 | public void QATM_1685_Invalid_Certificates_IT(PickleEventWrapper pickleWrapper, CucumberFeatureWrapper featureWrapper) throws Throwable {
19 | runScenario(pickleWrapper, featureWrapper);
20 | }
21 | }
--------------------------------------------------------------------------------
/testsAT/src/test/java/com/stratio/marathonlbsec/functionalAT/QATM_2113_CheckInvalidAppCertificates_IT.java:
--------------------------------------------------------------------------------
1 | package com.stratio.marathonlbsec.functionalAT;
2 |
3 | import com.stratio.qa.cucumber.testng.CucumberFeatureWrapper;
4 | import com.stratio.qa.cucumber.testng.PickleEventWrapper;
5 | import com.stratio.qa.utils.BaseGTest;
6 | import cucumber.api.CucumberOptions;
7 | import org.testng.annotations.Test;
8 |
9 | @CucumberOptions(features = {
10 | "src/test/resources/features/functionalAT/020_Certificates/03_QATM_2113_Check_Invalid_AppCertificate.feature",
11 | },plugin = "json:target/cucumber.json")
12 | public class QATM_2113_CheckInvalidAppCertificates_IT extends BaseGTest {
13 |
14 | public QATM_2113_CheckInvalidAppCertificates_IT() {
15 |
16 | }
17 |
18 | @Test(enabled = true, groups = {"checkInvalidAppCerts"}, dataProvider = "scenarios")
19 | public void nightly(PickleEventWrapper pickleWrapper, CucumberFeatureWrapper featureWrapper) throws Throwable {
20 | runScenario(pickleWrapper, featureWrapper);
21 | }
22 | }
23 |
--------------------------------------------------------------------------------
/testsAT/src/test/resources/scripts/nginx-qa-invalid-certs.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 |
3 | set -e # Exit in case of any error
4 |
5 | err_report() {
6 | echo "$2 -> Error on line $1 with $3"
7 | }
8 | trap 'err_report $LINENO ${BASH_SOURCE[$i]} ${BASH_COMMAND}' ERR
9 |
10 | VAULT_TOKEN=$(grep -Po '"root_token":\s*"(\d*?,|.*?[^\\]")' /stratio_volume/vault_response | awk -F":" '{print $2}' | sed -e 's/^\s*"//' -e 's/"$//')
11 | INTERNAL_DOMAIN=$(grep -Po '"internalDomain":\s"(\d*?,|.*?[^\\]")' /stratio_volume/descriptor.json | awk -F":" '{print $2}' | sed -e 's/^\s"//' -e 's/"$//')
12 | CONSUL_DATACENTER=$(grep -Po '"consulDatacenter":\s"(\d*?,|.*?[^\\]")' /stratio_volume/descriptor.json | awk -F":" '{print $2}' | sed -e 's/^\s"//' -e 's/"$//')
13 |
14 | curl -k -s -GET -H "X-Vault-Token:${VAULT_TOKEN}" "https://vault.service.${INTERNAL_DOMAIN}:8200/v1/userland/certificates/nginx-qa" | jq .data > /stratio_volume/nginx-qa-cert-backup.json
15 |
16 | curl -k -s -XDELETE -H "X-Vault-Token:${VAULT_TOKEN}" "https://vault.service.${INTERNAL_DOMAIN}:8200/v1/userland/certificates/nginx-qa"
--------------------------------------------------------------------------------
/testsAT/src/test/java/com/stratio/marathonlbsec/functionalAT/QATM_2113_Certificates_MarathonLB_Service_IT.java:
--------------------------------------------------------------------------------
1 | package com.stratio.marathonlbsec.functionalAT;
2 |
3 | import com.stratio.qa.cucumber.testng.CucumberFeatureWrapper;
4 | import com.stratio.qa.cucumber.testng.PickleEventWrapper;
5 | import com.stratio.qa.utils.BaseGTest;
6 | import cucumber.api.CucumberOptions;
7 | import org.testng.annotations.Test;
8 |
9 | @CucumberOptions(features = {
10 | "src/test/resources/features/functionalAT/020_Certificates/04_QATM_2113_Certificates_MarathonLB_Service.feature"
11 | },plugin = "json:target/cucumber.json")
12 | public class QATM_2113_Certificates_MarathonLB_Service_IT extends BaseGTest {
13 |
14 | public QATM_2113_Certificates_MarathonLB_Service_IT() {
15 |
16 | }
17 |
18 | @Test(enabled = true, groups = {"certsMarathonLBServ"}, dataProvider = "scenarios")
19 | public void nightly(PickleEventWrapper pickleWrapper, CucumberFeatureWrapper featureWrapper) throws Throwable {
20 | runScenario(pickleWrapper, featureWrapper);
21 | }
22 | }
23 |
--------------------------------------------------------------------------------
/getconfig.lua:
--------------------------------------------------------------------------------
1 | -- A simple Lua script which serves up the HAProxy
2 | -- config as it was at init time.
3 |
4 | function read_config_file(cmdline)
5 | local found = false
6 | local filename = ''
7 | for s in string.gmatch(cmdline, '%g+') do
8 | if s == '-f' then
9 | found = true
10 | elseif found then
11 | filename = s
12 | break
13 | end
14 | end
15 |
16 | local f = io.open(filename, "rb")
17 | local config = f:read("*all")
18 | f:close()
19 | return config
20 | end
21 |
22 | function load_config()
23 | local f = io.open('/proc/self/cmdline', "rb")
24 | local cmdline = f:read("*all")
25 | f:close()
26 | return read_config_file(cmdline)
27 | end
28 |
29 | core.register_init(function()
30 | haproxy_config = load_config()
31 | end)
32 |
33 | core.register_service("getconfig", "http", function(applet)
34 | applet:set_status(200)
35 | applet:add_header("content-length", string.len(haproxy_config))
36 | applet:add_header("content-type", "text/plain")
37 | applet:start_response()
38 | applet:send(haproxy_config)
39 | end)
40 |
--------------------------------------------------------------------------------
/testsAT/src/test/resources/scripts/marathon-lb-invalid-certs.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 |
3 | set -e # Exit in case of any error
4 |
5 | err_report() {
6 | echo "$2 -> Error on line $1 with $3"
7 | }
8 | trap 'err_report $LINENO ${BASH_SOURCE[$i]} ${BASH_COMMAND}' ERR
9 |
10 | VAULT_TOKEN=$(grep -Po '"root_token":\s*"(\d*?,|.*?[^\\]")' /stratio_volume/vault_response | awk -F":" '{print $2}' | sed -e 's/^\s*"//' -e 's/"$//')
11 | INTERNAL_DOMAIN=$(grep -Po '"internalDomain":\s"(\d*?,|.*?[^\\]")' /stratio_volume/descriptor.json | awk -F":" '{print $2}' | sed -e 's/^\s"//' -e 's/"$//')
12 | CONSUL_DATACENTER=$(grep -Po '"consulDatacenter":\s"(\d*?,|.*?[^\\]")' /stratio_volume/descriptor.json | awk -F":" '{print $2}' | sed -e 's/^\s"//' -e 's/"$//')
13 |
14 | curl -k -s -GET -H "X-Vault-Token:${VAULT_TOKEN}" "https://vault.service.${INTERNAL_DOMAIN}:8200/v1/userland/certificates/marathon-lb" | jq .data > /stratio_volume/marathon-lb-cert-backup.json
15 |
16 | curl -k -s -XDELETE -H "X-Vault-Token:${VAULT_TOKEN}" "https://vault.service.${INTERNAL_DOMAIN}:8200/v1/userland/certificates/marathon-lb"
--------------------------------------------------------------------------------
/testsAT/src/test/resources/features/functionalAT/020_Certificates/04_QATM_2113_Certificates_MarathonLB_Service.feature:
--------------------------------------------------------------------------------
1 | @rest @dcos
2 | @mandatory(BOOTSTRAP_IP,REMOTE_USER,PEM_FILE_PATH,DCOS_PASSWORD)
3 | Feature: [QATM-2113] Certificates MarathonLB service
4 |
5 | Scenario:[01] Check Vault path by default
6 | Given I open a ssh connection to '${BOOTSTRAP_IP}' in port '${EOS_NEW_SSH_PORT:-22}' with user '${REMOTE_USER}' using pem file '${PEM_FILE_PATH}'
7 | And I run 'sudo docker exec -t paas-bootstrap curl -X GET -fskL --tlsv1.2 -H "X-Vault-Token:!{VAULT_TOKEN}" "https://vault.service.!{EOS_INTERNAL_DOMAIN}:${EOS_VAULT_PORT:-8200}/v1/${VAULT_USERLAND_CERTIFICATE_BASE_PATH:-userland/certificates/}marathon-lb" | jq '.data."marathon-lb_crt"'' in the ssh connection with exit status '0'
8 | And I run 'sudo docker exec -t paas-bootstrap curl -X GET -fskL --tlsv1.2 -H "X-Vault-Token:!{VAULT_TOKEN}" "https://vault.service.!{EOS_INTERNAL_DOMAIN}:${EOS_VAULT_PORT:-8200}/v1/${VAULT_USERLAND_CERTIFICATE_BASE_PATH:-userland/certificates/}marathon-lb" | jq '.data."marathon-lb_key"'' in the ssh connection with exit status '0'
9 |
--------------------------------------------------------------------------------
/testsAT/src/test/java/com/stratio/marathonlbsec/functionalAT/QATM_1386_Certificates_IT.java:
--------------------------------------------------------------------------------
1 | package com.stratio.marathonlbsec.functionalAT;
2 |
3 | import com.stratio.qa.cucumber.testng.CucumberFeatureWrapper;
4 | import com.stratio.qa.cucumber.testng.PickleEventWrapper;
5 | import com.stratio.qa.utils.BaseGTest;
6 | import cucumber.api.CucumberOptions;
7 | import org.testng.annotations.Test;
8 |
9 | @CucumberOptions(features = {
10 | "src/test/resources/features/functionalAT/020_Certificates/01_MARATHONLB_1386_AppCertificate.feature",
11 | "src/test/resources/features/functionalAT/020_Certificates/02_MARATHONLB_1386_ClientCertificate.feature"
12 | },plugin = "json:target/cucumber.json")
13 | public class QATM_1386_Certificates_IT extends BaseGTest {
14 |
15 | public QATM_1386_Certificates_IT() {
16 | }
17 |
18 | @Test(enabled = true, groups = {"app_client_certificates"}, dataProvider = "scenarios")
19 | public void QATM1386_Certificates(PickleEventWrapper pickleWrapper, CucumberFeatureWrapper featureWrapper) throws Throwable {
20 | runScenario(pickleWrapper, featureWrapper);
21 | }
22 | }
23 |
--------------------------------------------------------------------------------
/testsAT/src/test/resources/scripts/marathon-lb-restore-password.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 |
3 | set -e # Exit in case of any error
4 |
5 | err_report() {
6 | echo "$2 -> Error on line $1 with $3"
7 | }
8 | trap 'err_report $LINENO ${BASH_SOURCE[$i]} ${BASH_COMMAND}' ERR
9 |
10 | VAULT_TOKEN=$(grep -Po '"root_token":\s*"(\d*?,|.*?[^\\]")' /stratio_volume/vault_response | awk -F":" '{print $2}' | sed -e 's/^\s*"//' -e 's/"$//')
11 | INTERNAL_DOMAIN=$(grep -Po '"internalDomain":\s"(\d*?,|.*?[^\\]")' /stratio_volume/descriptor.json | awk -F":" '{print $2}' | sed -e 's/^\s"//' -e 's/"$//')
12 | CONSUL_DATACENTER=$(grep -Po '"consulDatacenter":\s"(\d*?,|.*?[^\\]")' /stratio_volume/descriptor.json | awk -F":" '{print $2}' | sed -e 's/^\s"//' -e 's/"$//')
13 |
14 | curl -k -H "X-Vault-Token:${VAULT_TOKEN}" -H "Content-Type:application/json" -X POST -d @/stratio_volume/marathon-mesos-passw-backup.json https://vault.service.${INTERNAL_DOMAIN}:8200/v1/dcs/passwords/marathon/mesos
15 | curl -k -H "X-Vault-Token:${VAULT_TOKEN}" -H "Content-Type:application/json" -X POST -d @/stratio_volume/marathon-rest-passw-backup.json https://vault.service.${INTERNAL_DOMAIN}:8200/v1/dcs/passwords/marathon/rest
16 |
--------------------------------------------------------------------------------
/testsAT/src/test/resources/scripts/marathon-lb-restore-certs.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 |
3 | set -e # Exit in case of any error
4 |
5 | err_report() {
6 | echo "$2 -> Error on line $1 with $3"
7 | }
8 | trap 'err_report $LINENO ${BASH_SOURCE[$i]} ${BASH_COMMAND}' ERR
9 |
10 |
11 | cat << EOF | tee -a /stratio_volume/certs_restore_marathonlb.list > /dev/null
12 | marathon-lb | "DNS:marathon-lb.marathon.mesos" | client-server | userland/certificates/marathon-lb
13 | nginx-qa | "DNS:nginx-qa.labs.stratio.com" | client-server | userland/certificates/nginx-qa
14 | EOF
15 |
16 | VAULT_TOKEN=$(grep -Po '"root_token":\s*"(\d*?,|.*?[^\\]")' /stratio_volume/vault_response | awk -F":" '{print $2}' | sed -e 's/^\s*"//' -e 's/"$//')
17 | INTERNAL_DOMAIN=$(grep -Po '"internalDomain":\s"(\d*?,|.*?[^\\]")' /stratio_volume/descriptor.json | awk -F":" '{print $2}' | sed -e 's/^\s"//' -e 's/"$//')
18 | CONSUL_DATACENTER=$(grep -Po '"consulDatacenter":\s"(\d*?,|.*?[^\\]")' /stratio_volume/descriptor.json | awk -F":" '{print $2}' | sed -e 's/^\s"//' -e 's/"$//')
19 |
20 | cd /stratio/*secret-utils/
21 | bash -e gencerts -l /stratio_volume/certs_restore_marathonlb.list -w -v vault.service.$INTERNAL_DOMAIN -o 8200 -t $VAULT_TOKEN -d $INTERNAL_DOMAIN -c $CONSUL_DATACENTER
--------------------------------------------------------------------------------
/testsAT/src/test/resources/features/functionalAT/070_bugs/01_EOS_3591_changeTimezone.feature:
--------------------------------------------------------------------------------
1 | @rest @dcos
2 | @mandatory(DCOS_CLI_HOST,DCOS_CLI_USER,DCOS_CLI_PASSWORD,BOOTSTRAP_IP,REMOTE_USER,PEM_FILE_PATH,DCOS_PASSWORD,TIMEZONE)
3 | Feature: Check deployment with change timezone
4 |
5 | @ignore @manual
6 | Scenario:[01] Change timezone in file descriptor.json
7 | Given I open a ssh connection to '${BOOTSTRAP_IP}' in port '${EOS_NEW_SSH_PORT:-22}' with user '${REMOTE_USER}' using pem file '${PEM_FILE_PATH}'
8 | And I run 'sudo jq '. += {"timezone":"GMT+2"}' /stratio_volume/descriptor.json > /tmp/descriptor.json && mv -f /tmp/descriptor.json /stratio_volume/descriptor.json' in the ssh connection
9 |
10 | @ignore @manual
11 | @include(feature:../010_Installation/001_installationCCT_IT.feature,scenario:[01] Get schema and install Marathon-LB)
12 | Scenario:[02] Deploy marathonlb
13 | Given I wait '5' seconds
14 |
15 | @ignore @manual
16 | @include(feature:../010_Installation/002_checkDeployment_IT.feature,scenario:[01] Check correct deployment)
17 | @include(feature:../010_Installation/002_checkDeployment_IT.feature,scenario:[02] Make sure service is ready)
18 | Scenario:[03] Check status of marathon-lb
19 | Given I wait '5' seconds
--------------------------------------------------------------------------------
/testsAT/src/test/resources/scripts/marathon-lb-app-certs.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 |
3 | set -e # Exit in case of any error
4 |
5 | err_report() {
6 | echo "$2 -> Error on line $1 with $3"
7 | }
8 | trap 'err_report $LINENO ${BASH_SOURCE[$i]} ${BASH_COMMAND}' ERR
9 |
10 | cat << EOF | tee -a /stratio_volume/certs_custom_app_marathonlb.list > /dev/null
11 | nginx-qa | "DNS:nginx-qa.labs.stratio.com" | client-server | userland/certificates/nginx-qa
12 | EOF
13 |
14 | VAULT_TOKEN=$(grep -Po '"root_token":\s*"(\d*?,|.*?[^\\]")' /stratio_volume/vault_response | awk -F":" '{print $2}' | sed -e 's/^\s*"//' -e 's/"$//')
15 | INTERNAL_DOMAIN=$(grep -Po '"internalDomain":\s"(\d*?,|.*?[^\\]")' /stratio_volume/descriptor.json | awk -F":" '{print $2}' | sed -e 's/^\s"//' -e 's/"$//')
16 | CONSUL_DATACENTER=$(grep -Po '"consulDatacenter":\s"(\d*?,|.*?[^\\]")' /stratio_volume/descriptor.json | awk -F":" '{print $2}' | sed -e 's/^\s"//' -e 's/"$//')
17 |
18 | if [ -f "/stratio_volume/certificates_additional_data" ]; then
19 | source /stratio_volume/certificates_additional_data
20 | fi
21 |
22 | cd /stratio/*secret-utils/
23 | bash -e gencerts -l /stratio_volume/certs_custom_app_marathonlb.list -w -v vault.service.$INTERNAL_DOMAIN -o 8200 -t $VAULT_TOKEN -d $INTERNAL_DOMAIN -c $CONSUL_DATACENTER -p ${pki_password:-stratio}
--------------------------------------------------------------------------------
/testsAT/src/test/resources/features/functionalAT/099_Uninstall/CCT_Uninstall_IT.feature:
--------------------------------------------------------------------------------
1 | @rest
2 | @mandatory(BOOTSTRAP_IP,REMOTE_USER,PEM_FILE_PATH,DCOS_IP,DCOS_TENANT,DCOS_PASSWORD)
3 | Feature:[QATM-1870] Uninstall CCT
4 |
5 | # Scenario:[01] Obtain info from bootstrap
6 | # Given I open a ssh connection to '${BOOTSTRAP_IP}' in port '${EOS_NEW_SSH_PORT:-22}' with user '${REMOTE_USER}' using pem file '${PEM_FILE_PATH}'
7 | # Then I obtain basic information from bootstrap
8 | #
9 | @include(feature:../010_Installation/001_installationCCT_IT.feature,scenario:[Setup][01] Prepare prerequisites)
10 | Scenario:[02] Uninstall MarathonLB
11 | Given I authenticate to DCOS cluster '${DCOS_IP}' using email '!{DCOS_USER}' with user '${REMOTE_USER}' and pem file '${PEM_FILE_PATH}' over SSH port '${EOS_NEW_SSH_PORT:-22}'
12 | Given I set sso token using host '!{EOS_ACCESS_POINT}' with user '!{DCOS_TENANT_USER}' and password '!{DCOS_TENANT_PASSWORD}' and tenant '!{CC_TENANT}'
13 | And I securely send requests to '!{EOS_ACCESS_POINT}:443'
14 | When I send a 'DELETE' request to '/service/deploy-api/deploy/uninstall?app=marathonlb'
15 | And in less than '200' seconds, checking each '20' seconds, I send a 'GET' request to '/service/deploy-api/deploy/status/all' so that the response does not contains 'marathonlb'
--------------------------------------------------------------------------------
/testsAT/src/test/resources/scripts/nginx-qa-restore-certs.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 |
3 | set -e # Exit in case of any error
4 |
5 | err_report() {
6 | echo "$2 -> Error on line $1 with $3"
7 | }
8 | trap 'err_report $LINENO ${BASH_SOURCE[$i]} ${BASH_COMMAND}' ERR
9 |
10 |
11 | cat << EOF | tee -a /stratio_volume/certs_restore_nginx-qa.list > /dev/null
12 | nginx-qa | "DNS:nginx-qa.marathon.mesos" | client-server | userland/certificates/nginx-qa
13 | nginx-qa | "DNS:nginx-qa.labs.stratio.com" | client-server | userland/certificates/nginx-qa
14 | EOF
15 |
16 | VAULT_TOKEN=$(grep -Po '"root_token":\s*"(\d*?,|.*?[^\\]")' /stratio_volume/vault_response | awk -F":" '{print $2}' | sed -e 's/^\s*"//' -e 's/"$//')
17 | INTERNAL_DOMAIN=$(grep -Po '"internalDomain":\s"(\d*?,|.*?[^\\]")' /stratio_volume/descriptor.json | awk -F":" '{print $2}' | sed -e 's/^\s"//' -e 's/"$//')
18 | CONSUL_DATACENTER=$(grep -Po '"consulDatacenter":\s"(\d*?,|.*?[^\\]")' /stratio_volume/descriptor.json | awk -F":" '{print $2}' | sed -e 's/^\s"//' -e 's/"$//')
19 |
20 | if [ -f "/stratio_volume/certificates_additional_data" ]; then
21 | source /stratio_volume/certificates_additional_data
22 | fi
23 |
24 | cd /stratio/*secret-utils/
25 | bash -e gencerts -l /stratio_volume/certs_restore_nginx-qa.list -w -v vault.service.$INTERNAL_DOMAIN -o 8200 -t $VAULT_TOKEN -d $INTERNAL_DOMAIN -c $CONSUL_DATACENTER -p ${pki_password:-stratio}
--------------------------------------------------------------------------------
/testsAT/src/test/resources/scripts/marathon-lb-invalid-password.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 |
3 | set -e # Exit in case of any error
4 |
5 | err_report() {
6 | echo "$2 -> Error on line $1 with $3"
7 | }
8 | trap 'err_report $LINENO ${BASH_SOURCE[$i]} ${BASH_COMMAND}' ERR
9 |
10 | VAULT_TOKEN=$(grep -Po '"root_token":\s*"(\d*?,|.*?[^\\]")' /stratio_volume/vault_response | awk -F":" '{print $2}' | sed -e 's/^\s*"//' -e 's/"$//')
11 | INTERNAL_DOMAIN=$(grep -Po '"internalDomain":\s"(\d*?,|.*?[^\\]")' /stratio_volume/descriptor.json | awk -F":" '{print $2}' | sed -e 's/^\s"//' -e 's/"$//')
12 | CONSUL_DATACENTER=$(grep -Po '"consulDatacenter":\s"(\d*?,|.*?[^\\]")' /stratio_volume/descriptor.json | awk -F":" '{print $2}' | sed -e 's/^\s"//' -e 's/"$//')
13 |
14 | curl -k -s -GET -H "X-Vault-Token:${VAULT_TOKEN}" "https://vault.service.${INTERNAL_DOMAIN}:8200/v1/dcs/passwords/marathon/mesos" | jq .data > /stratio_volume/marathon-mesos-passw-backup.json
15 | curl -k -s -GET -H "X-Vault-Token:${VAULT_TOKEN}" "https://vault.service.${INTERNAL_DOMAIN}:8200/v1/dcs/passwords/marathon/rest" | jq .data > /stratio_volume/marathon-rest-passw-backup.json
16 |
17 | curl -k -s -XDELETE -H "X-Vault-Token:${VAULT_TOKEN}" "https://vault.service.${INTERNAL_DOMAIN}:8200/v1/dcs/passwords/marathon/mesos"
18 | curl -k -s -XDELETE -H "X-Vault-Token:${VAULT_TOKEN}" "https://vault.service.${INTERNAL_DOMAIN}:8200/v1/dcs/passwords/marathon/rest"
--------------------------------------------------------------------------------
/testsAT/src/test/resources/features/functionalAT/040_IpTables/QATM_1685_Check_Iptables.feature:
--------------------------------------------------------------------------------
1 | @rest
2 | @mandatory(DCOS_CLI_HOST,DCOS_CLI_USER,DCOS_CLI_PASSWORD,BOOTSTRAP_IP,REMOTE_USER,PEM_FILE_PATH,DCOS_IP,DCOS_TENANT,DCOS_PASSWORD)
3 | Feature: Check iptables in the node to avoid conflicts with Marathon-lb calico and minuteman
4 |
5 | Scenario:[01] Obtain node where marathon-lb-sec is running
6 | Given I open a ssh connection to '${DCOS_CLI_HOST}' with user '${DCOS_CLI_USER}' and password '${DCOS_CLI_PASSWORD}'
7 | When I run 'dcos task | grep marathonlb | awk '{print $2}'' in the ssh connection and save the value in environment variable 'publicHostIP'
8 |
9 | Scenario:[02] Check iptables Marathon-lb, Calico y Minuteman
10 | Given I open a ssh connection to '!{publicHostIP}' in port '${EOS_NEW_SSH_PORT:-22}' with user '${REMOTE_USER}' using pem file '${PEM_FILE_PATH}'
11 | And I run 'iptables -L | tail -10' in the ssh connection and save the value in environment variable 'iptablesInicial'
12 | And I outbound copy 'src/test/resources/scripts/iptables.sh' through a ssh connection to '/tmp'
13 | And I run 'chmod +x /tmp/iptables.sh' in the ssh connection
14 | And I run '/tmp/iptables.sh !{publicHostIP}' in the ssh connection
15 | And I run 'iptables -L | tail -10' in the ssh connection and save the value in environment variable 'iptablesFinal'
16 | Then '!{iptablesFinal}' is '!{iptablesInicial}'
17 |
18 |
--------------------------------------------------------------------------------
/testsAT/src/test/resources/log4j2.xml:
--------------------------------------------------------------------------------
1 |
2 |
14 |
15 |
16 | INFO
17 |
18 |
19 |
20 |
21 |
22 |
23 |
24 |
25 |
26 |
27 |
28 |
29 |
30 |
31 |
32 |
33 |
34 |
35 |
--------------------------------------------------------------------------------
/signalmlb.lua:
--------------------------------------------------------------------------------
1 | -- A simple Lua module for HAProxy that sends signals to the marathon-lb process
2 |
3 | function run(cmd)
4 | local file = io.popen(cmd)
5 | local output = file:read('*a')
6 | local success, _, code = file:close()
7 | return output, success, code
8 | end
9 |
10 | function send_response(applet, code, response)
11 | applet:set_status(code)
12 | applet:add_header("content-length", string.len(response))
13 | applet:add_header("content-type", "text/plain")
14 | applet:start_response()
15 | applet:send(response)
16 | end
17 |
18 | core.register_service("signalmlbhup", "http", function(applet)
19 | local _, success, code = run("pkill -HUP -f '^python.*marathon_lb.py'")
20 | if not success then
21 | send_response(applet, 500, string.format(
22 | "Failed to send SIGHUP signal to marathon-lb (exit code %d). Is \z
23 | marathon-lb running in 'poll' mode?", code))
24 | return
25 | end
26 |
27 | send_response(applet, 200, "Sent SIGHUP signal to marathon-lb")
28 | end)
29 |
30 | core.register_service("signalmlbusr1", "http", function(applet)
31 | local _, success, code = run("pkill -USR1 -f '^python.*marathon_lb.py'")
32 | if not success then
33 | send_response(applet, 500, string.format(
34 | "Failed to send SIGUSR1 signal to marathon-lb (exit code %d). Is \z
35 | marathon-lb running in 'poll' mode?", code))
36 | return
37 | end
38 |
39 | send_response(applet, 200, "Sent SIGUSR1 signal to marathon-lb")
40 | end)
41 |
--------------------------------------------------------------------------------
/testsAT/src/test/java/com/stratio/marathonlbsec/specs/Common.java:
--------------------------------------------------------------------------------
1 | /*
2 | * Copyright (C) 2015 Stratio (http://stratio.com)
3 | *
4 | * Licensed under the Apache License, Version 2.0 (the "License");
5 | * you may not use this file except in compliance with the License.
6 | * You may obtain a copy of the License at
7 | *
8 | * http://www.apache.org/licenses/LICENSE-2.0
9 | *
10 | * Unless required by applicable law or agreed to in writing, software
11 | * distributed under the License is distributed on an "AS IS" BASIS,
12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 | * See the License for the specific language governing permissions and
14 | * limitations under the License.
15 | */
16 | package com.stratio.marathonlbsec.specs;
17 |
18 | import com.stratio.qa.specs.CommonG;
19 | import java.net.Socket;
20 | import java.nio.channels.ServerSocketChannel;
21 |
22 | public class Common extends CommonG {
23 | private ServerSocketChannel serverSocket;
24 | private Socket socket;
25 |
26 | public ServerSocketChannel getServerSocket() {
27 | return serverSocket;
28 | }
29 |
30 | public void setServerSocket(ServerSocketChannel serverSocket) {
31 | this.serverSocket = serverSocket;
32 | }
33 |
34 | public Socket getSocket() {
35 | return socket;
36 | }
37 |
38 | public void setSocket(Socket socket) {
39 | this.socket = socket;
40 | }
41 | }
42 |
--------------------------------------------------------------------------------
/testsAT/src/test/resources/features/functionalAT/010_Installation/001_installationCCT_IT.feature:
--------------------------------------------------------------------------------
1 | @rest @dcos
2 | @mandatory(BOOTSTRAP_IP,REMOTE_USER,PEM_FILE_PATH,DCOS_PASSWORD,DCOS_CLI_HOST,DCOS_CLI_USER,DCOS_CLI_PASSWORD,UNIVERSE_VERSION)
3 | Feature: [QATM-1870] Marathon-LB installation
4 |
5 | Scenario:[01] Get schema and install Marathon-LB
6 | Given I set sso token using host '!{EOS_ACCESS_POINT}' with user '!{DCOS_USER}' and password '${DCOS_PASSWORD}' and tenant '!{DCOS_TENANT}'
7 | And I securely send requests to '!{EOS_ACCESS_POINT}:443'
8 | And I get schema from service 'marathonlb' with model '${MLB_FLAVOUR:-basic}' and version '${UNIVERSE_VERSION}' and save it in file 'marathonlb-basic.json'
9 | When I create file 'marathonlb-config.json' based on 'marathonlb-basic.json' as 'json' with:
10 | | $.general.resources.INSTANCES | REPLACE | ${MLB_INSTANCE:-1} | number |
11 | | $.general.resources.CPUS | REPLACE | ${MLB_CPUS:-1} | number |
12 | | $.general.resources.MEM | REPLACE | ${MLB_MEM:-1} | number |
13 | | $.general.resources.DISK | REPLACE | ${MLB_DISK:-1} | number |
14 | | $.environment.haproxy.rsyslog | REPLACE | ${MLB_FLAG_RSYSLOG:-true} | boolean |
15 | Then I install service 'marathon-lb' with model '${MLB_FLAVOUR:-basic}' and version '${UNIVERSE_VERSION}' and instance name 'marathonlb' in tenant '!{DCOS_TENANT}' using json 'marathonlb-config.json'
16 |
--------------------------------------------------------------------------------
/testsAT/src/test/resources/scripts/marathon-lb-manage-certs.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 |
3 | set -e # Exit in case of any error
4 |
5 | err_report() {
6 | echo "$2 -> Error on line $1 with $3"
7 | }
8 | trap 'err_report $LINENO ${BASH_SOURCE[$i]} ${BASH_COMMAND}' ERR
9 |
10 |
11 | cat << EOF | tee -a /stratio_volume/certs_client_marathonlb.list > /dev/null
12 | marathon-lb | "DNS:nginx-qa.labs.stratio.com" | client-server | userland/certificates/marathon-lb
13 | EOF
14 |
15 | VAULT_TOKEN=$(grep -Po '"root_token":\s*"(\d*?,|.*?[^\\]")' /stratio_volume/vault_response | awk -F":" '{print $2}' | sed -e 's/^\s*"//' -e 's/"$//')
16 | INTERNAL_DOMAIN=$(grep -Po '"internalDomain":\s"(\d*?,|.*?[^\\]")' /stratio_volume/descriptor.json | awk -F":" '{print $2}' | sed -e 's/^\s"//' -e 's/"$//')
17 | CONSUL_DATACENTER=$(grep -Po '"consulDatacenter":\s"(\d*?,|.*?[^\\]")' /stratio_volume/descriptor.json | awk -F":" '{print $2}' | sed -e 's/^\s"//' -e 's/"$//')
18 |
19 | curl -k -s -GET -H "X-Vault-Token:${VAULT_TOKEN}" "https://vault.service.${INTERNAL_DOMAIN}:8200/v1/userland/certificates/marathon-lb" | jq .data > /stratio_volume/marathon-lb-cert-backup.json
20 |
21 | if [ -f "/stratio_volume/certificates_additional_data" ]; then
22 | source /stratio_volume/certificates_additional_data
23 | fi
24 |
25 | cd /stratio/*secret-utils/
26 | bash -e gencerts -l /stratio_volume/certs_client_marathonlb.list -w -v vault.service.$INTERNAL_DOMAIN -o 8200 -t $VAULT_TOKEN -d $INTERNAL_DOMAIN -c $CONSUL_DATACENTER -p ${pki_password:-stratio}
27 |
28 |
--------------------------------------------------------------------------------
/rsyslog/rsyslog.conf:
--------------------------------------------------------------------------------
1 | module(load="imuxsock") # provides support for local system logging
2 | module(load="imklog") # provides kernel logging support
3 |
4 | template(name="CentralizedLogs" type="list") {
5 | property(name="timereported" dateformat="year")
6 | constant(value="-")
7 | property(name="timereported" dateformat="month")
8 | constant(value="-")
9 | property(name="timereported" dateformat="day")
10 | constant(value="T")
11 | property(name="timereported" dateformat="hour")
12 | constant(value=":")
13 | property(name="timereported" dateformat="minute")
14 | constant(value=":")
15 | property(name="timereported" dateformat="second")
16 | constant(value=".")
17 | property(name="timereported" dateformat="subseconds" position.to="3")
18 | property(name="timereported" dateformat="tzoffsdirection")
19 | property(name="timereported" dateformat="tzoffshour")
20 | constant(value=":")
21 | property(name="timereported" dateformat="tzoffsmin")
22 | constant(value=" ")
23 | property(name="syslogpriority-text" caseconversion="upper")
24 | constant(value=" - 0 haproxy haproxy {\"@message\": \"")
25 | property(name="msg" droplastlf="on" controlcharacters="drop")
26 | constant(value="\"}")
27 | constant(value="\n")
28 | }
29 |
30 | #$ActionFileDefaultTemplate RSYSLOG_TraditionalFileFormat
31 |
32 | $FileOwner root
33 | $FileGroup adm
34 | $FileCreateMode 0640
35 | $DirCreateMode 0755
36 | $Umask 0022
37 |
38 | $WorkDirectory /var/spool/rsyslog
39 |
40 | $IncludeConfig /etc/rsyslog.d/*.conf
41 |
42 | local0.=info -/var/log/container_stdout;CentralizedLogs
43 | local1.notice -/var/log/container_stdout;CentralizedLogs
--------------------------------------------------------------------------------
/testsAT/src/test/java/com/stratio/marathonlbsec/specs/GivenSpec.java:
--------------------------------------------------------------------------------
1 | /*
2 | * Copyright (C) 2015 Stratio (http://stratio.com)
3 | *
4 | * Licensed under the Apache License, Version 2.0 (the "License");
5 | * you may not use this file except in compliance with the License.
6 | * You may obtain a copy of the License at
7 | *
8 | * http://www.apache.org/licenses/LICENSE-2.0
9 | *
10 | * Unless required by applicable law or agreed to in writing, software
11 | * distributed under the License is distributed on an "AS IS" BASIS,
12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 | * See the License for the specific language governing permissions and
14 | * limitations under the License.
15 | */
16 | package com.stratio.marathonlbsec.specs;
17 |
18 | import com.stratio.qa.utils.ThreadProperty;
19 | import cucumber.api.java.en.Given;
20 |
21 | public class GivenSpec extends BaseSpec {
22 |
23 | public GivenSpec(Common spec) {
24 | this.commonspec = spec;
25 | }
26 |
27 | @Given("^I set variables to login in custom tenant$")
28 | public void setTenantVariables() throws Exception {
29 | String dcosUser = System.getProperty("DCOS_TENANT_USER") != null ? System.getProperty("DCOS_TENANT_USER") : ThreadProperty.get("DCOS_USER");
30 | String dcosPassword = System.getProperty("DCOS_TENANT_PASSWORD") != null ? System.getProperty("DCOS_TENANT_PASSWORD") : System.getProperty("DCOS_PASSWORD");;
31 | ThreadProperty.set("DCOS_TENANT_USER", dcosUser);
32 | ThreadProperty.set("DCOS_TENANT_PASSWORD", dcosPassword);
33 | }
34 | }
--------------------------------------------------------------------------------
/testsAT/src/test/java/com/stratio/tests/utils/BaseTest.java:
--------------------------------------------------------------------------------
1 | /*
2 | * Copyright (C) 2015 Stratio (http://stratio.com)
3 | *
4 | * Licensed under the Apache License, Version 2.0 (the "License");
5 | * you may not use this file except in compliance with the License.
6 | * You may obtain a copy of the License at
7 | *
8 | * http://www.apache.org/licenses/LICENSE-2.0
9 | *
10 | * Unless required by applicable law or agreed to in writing, software
11 | * distributed under the License is distributed on an "AS IS" BASIS,
12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 | * See the License for the specific language governing permissions and
14 | * limitations under the License.
15 | */
16 | package com.stratio.tests.utils;
17 |
18 | import com.stratio.qa.utils.BaseGTest;
19 | import com.stratio.qa.utils.ThreadProperty;
20 | import org.testng.ITestContext;
21 | import org.testng.annotations.*;
22 |
23 | import java.lang.reflect.Method;
24 |
25 | abstract public class BaseTest extends BaseGTest {
26 |
27 | protected String browser = "";
28 |
29 | @BeforeSuite(alwaysRun = true)
30 | public void beforeSuite(ITestContext context) {
31 | }
32 |
33 | @AfterSuite(alwaysRun = true)
34 | public void afterSuite(ITestContext context) {
35 | }
36 |
37 | @BeforeClass(alwaysRun = true)
38 | public void beforeClass(ITestContext context) {
39 | }
40 |
41 | @BeforeMethod(alwaysRun = true)
42 | public void beforeMethod(Method method) {
43 | ThreadProperty.set("browser", this.browser);
44 | }
45 |
46 | @AfterMethod(alwaysRun = true)
47 | public void afterMethod(Method method) {
48 | }
49 |
50 | @AfterClass()
51 | public void afterClass() {
52 | }
53 | }
54 |
--------------------------------------------------------------------------------
/getmaps.lua:
--------------------------------------------------------------------------------
1 | -- A simple Lua script which serves up the HAProxy
2 | -- vhost to backend map file.
3 | function check_file_exists(name)
4 | local f = io.open(name, "r")
5 | if f ~= nil then io.close(f) return true else return false end
6 | end
7 |
8 | function read_file(filepath)
9 | -- Read all of the given file, returning an empty string if the file doesn't
10 | -- exist.
11 | local content = ""
12 | if check_file_exists(filepath) then
13 | local f = io.open(filepath, "rb")
14 | content = f:read("*all")
15 | f:close()
16 | end
17 | return content
18 | end
19 |
20 | function detect_config_dir()
21 | -- Read the process's (HAProxy's) cmdline proc and parse the path to the
22 | -- config file so that we can determine the config directory.
23 | local f = io.open("/proc/self/cmdline", "rb")
24 | local cmdline = f:read("*all")
25 | f:close()
26 |
27 | local found = false
28 | local sep = package.config:sub(1, 1)
29 | for opt in string.gmatch(cmdline, "%g+") do
30 | if opt == "-f" then
31 | found = true
32 | elseif found then
33 | return opt:match("(.*"..sep..")")
34 | end
35 | end
36 | end
37 |
38 | function load_map(filename)
39 | local config_dir = detect_config_dir()
40 | return read_file(config_dir..filename)
41 | end
42 |
43 | function send_map(applet, map)
44 | applet:set_status(200)
45 | applet:add_header("content-length", string.len(map))
46 | applet:add_header("content-type", "text/plain")
47 | applet:start_response()
48 | applet:send(map)
49 | end
50 |
51 | core.register_service("getvhostmap", "http", function(applet)
52 | local haproxy_vhostmap = load_map("domain2backend.map")
53 | send_map(applet, haproxy_vhostmap)
54 | end)
55 |
56 | core.register_service("getappmap", "http", function(applet)
57 | local haproxy_appmap = load_map("app2backend.map")
58 | send_map(applet, haproxy_appmap)
59 | end)
60 |
--------------------------------------------------------------------------------
/testsAT/src/test/resources/schemas/nginx-qa-config.json:
--------------------------------------------------------------------------------
1 | {
2 | "volumes": null,
3 | "id": "/nginx-qa",
4 | "cmd": null,
5 | "args": null,
6 | "user": null,
7 | "env": null,
8 | "instances": 1,
9 | "cpus": 0.5,
10 | "mem": 256,
11 | "disk": 0,
12 | "gpus": 0,
13 | "executor": null,
14 | "constraints": null,
15 | "fetch": null,
16 | "storeUrls": null,
17 | "backoffSeconds": 1,
18 | "backoffFactor": 1.15,
19 | "maxLaunchDelaySeconds": 3600,
20 | "container": {
21 | "docker": {
22 | "image": "qa.stratio.com/nginx:1.10.3-alpine",
23 | "forcePullImage": false,
24 | "privileged": false,
25 | "portMappings": [
26 | {
27 | "containerPort": 80,
28 | "protocol": "tcp",
29 | "servicePort": 80
30 | }
31 | ],
32 | "network": "BRIDGE"
33 | }
34 | },
35 | "healthChecks": [
36 | {
37 | "protocol": "HTTP",
38 | "path": "/",
39 | "gracePeriodSeconds": 100,
40 | "intervalSeconds": 20,
41 | "timeoutSeconds": 20,
42 | "maxConsecutiveFailures": 3,
43 | "ignoreHttp1xx": false
44 | }
45 | ],
46 | "readinessChecks": null,
47 | "dependencies": null,
48 | "upgradeStrategy": {
49 | "minimumHealthCapacity": 1,
50 | "maximumOverCapacity": 1
51 | },
52 | "labels": {
53 | "HAPROXY_0_BACKEND_WEIGHT": "0",
54 | "DCOS_SERVICE_SCHEME": "http",
55 | "DCOS_SERVICE_NAME": "nginx-qa",
56 | "HAPROXY_0_GROUP": "external",
57 | "DCOS_SERVICE_PORT_INDEX": "0",
58 | "DCOS_PACKAGE_NAME": "nginx-qa",
59 | "HAPROXY_0_PATH": null,
60 | "HAPROXY_0_VHOST": "nginx-qa.labs.stratio.com"
61 | },
62 | "acceptedResourceRoles": [
63 | "*"
64 | ],
65 | "residency": null,
66 | "secrets": null,
67 | "taskKillGracePeriodSeconds": null,
68 | "portDefinitions": [
69 | {
70 | "port": 10101,
71 | "protocol": "tcp",
72 | "labels": {}
73 | }
74 | ],
75 | "requirePorts": false
76 | }
--------------------------------------------------------------------------------
/testsAT/src/test/resources/schemas/nginx-qa-config_original.json:
--------------------------------------------------------------------------------
1 | {
2 | "labels": {
3 | "HAPROXY_0_BACKEND_WEIGHT": "0",
4 | "DCOS_SERVICE_SCHEME": "http",
5 | "DCOS_SERVICE_NAME": "nginx-qa",
6 | "HAPROXY_0_GROUP": "external",
7 | "DCOS_SERVICE_PORT_INDEX": "0",
8 | "DCOS_PACKAGE_NAME": "nginx-qa",
9 | "HAPROXY_0_PATH": null,
10 | "HAPROXY_0_VHOST": "nginx-qa.labs.stratio.com"
11 | },
12 | "id": "/nginx-qa",
13 | "acceptedResourceRoles": [
14 | "*"
15 | ],
16 | "backoffFactor": 1.15,
17 | "backoffSeconds": 1,
18 | "container": {
19 | "portMappings": [
20 | {
21 | "containerPort": 80,
22 | "labels": {
23 | "VIP_0": "/nginx-qa-minuteman:1234"
24 | },
25 | "servicePort": 0
26 | }
27 | ],
28 | "network": "BRIDGE",
29 | "type": "DOCKER",
30 | "volumes": [],
31 | "docker": {
32 | "image": "qa.stratio.com/nginx:1.10.3-alpine",
33 | "forcePullImage": false,
34 | "privileged": false,
35 | "parameters": []
36 | }
37 | },
38 | "cpus": 1,
39 | "disk": 0,
40 | "healthChecks": [
41 | {
42 | "gracePeriodSeconds": 100,
43 | "ignoreHttp1xx": false,
44 | "intervalSeconds": 20,
45 | "maxConsecutiveFailures": 3,
46 | "portIndex": 0,
47 | "timeoutSeconds": 20,
48 | "delaySeconds": 15,
49 | "protocol": "HTTP",
50 | "path": "/",
51 | "ipProtocol": "IPv4"
52 | }
53 | ],
54 | "instances": 1,
55 | "maxLaunchDelaySeconds": 3600,
56 | "mem": 256,
57 | "gpus": 0,
58 | "networks": [
59 | {
60 | "name": "stratio",
61 | "mode": "container"
62 | }
63 | ],
64 | "requirePorts": false,
65 | "upgradeStrategy": {
66 | "maximumOverCapacity": 1,
67 | "minimumHealthCapacity": 1
68 | },
69 | "killSelection": "YOUNGEST_FIRST",
70 | "unreachableStrategy": {
71 | "inactiveAfterSeconds": 0,
72 | "expungeAfterSeconds": 0
73 | },
74 | "fetch": [],
75 | "constraints": []
76 | }
--------------------------------------------------------------------------------
/testsAT/src/test/java/com/stratio/marathonlbsec/functionalAT/Nightly_IT.java:
--------------------------------------------------------------------------------
1 | package com.stratio.marathonlbsec.functionalAT;
2 |
3 | import com.stratio.qa.cucumber.testng.CucumberFeatureWrapper;
4 | import com.stratio.qa.cucumber.testng.PickleEventWrapper;
5 | import com.stratio.qa.utils.BaseGTest;
6 | import cucumber.api.CucumberOptions;
7 | import org.testng.annotations.Test;
8 |
9 | @CucumberOptions(features = {
10 | // "src/test/resources/features/functionalAT/010_Installation/001_installationCCT_IT.feature",
11 | "src/test/resources/features/functionalAT/010_Installation/002_checkDeployment_IT.feature",
12 | "src/test/resources/features/functionalAT/020_Certificates/01_MARATHONLB_1386_AppCertificate.feature",
13 | "src/test/resources/features/functionalAT/020_Certificates/02_MARATHONLB_1386_ClientCertificate.feature",
14 | "src/test/resources/features/functionalAT/020_Certificates/03_QATM_2113_Check_Invalid_AppCertificate.feature",
15 | "src/test/resources/features/functionalAT/020_Certificates/04_QATM_2113_Certificates_MarathonLB_Service.feature",
16 | "src/test/resources/features/functionalAT/030_Logs/01_MARATHONLB_1388_CentralizedLogs.feature",
17 | "src/test/resources/features/functionalAT/030_Logs/02_QATM_2113_Log_Haproxy_Wrapper_Debug.feature",
18 | "src/test/resources/features/functionalAT/030_Logs/03_QATM_2113_Vault_Renewal_Token.feature",
19 | "src/test/resources/features/functionalAT/050_check_haproxy_host_path/01_EOS_2920_check_multiple_deployments.feature",
20 | "src/test/resources/features/functionalAT/060_monitoring/01_EOS_3139_monitoring_IT.feature"
21 | }, plugin = "json:target/cucumber.json")
22 | public class Nightly_IT extends BaseGTest {
23 |
24 | public Nightly_IT() {
25 | }
26 |
27 | @Test(enabled = true, groups = {"nightly"}, dataProvider = "scenarios")
28 | public void nightly(PickleEventWrapper pickleWrapper, CucumberFeatureWrapper featureWrapper) throws Throwable {
29 | runScenario(pickleWrapper, featureWrapper);
30 | }
31 | }
--------------------------------------------------------------------------------
/testsAT/src/test/resources/features/functionalAT/030_Logs/01_MARATHONLB_1388_CentralizedLogs.feature:
--------------------------------------------------------------------------------
1 | @rest @dcos
2 | @mandatory(BOOTSTRAP_IP,REMOTE_USER,PEM_FILE_PATH,DCOS_PASSWORD,DCOS_CLI_HOST,DCOS_CLI_USER,DCOS_CLI_PASSWORD)
3 | Feature:[MARATHONLB-1388] Centralized logs
4 |
5 | Background:[Setup] Get MarathonLB task id
6 | Given I open a ssh connection to '${DCOS_CLI_HOST}' with user '${DCOS_CLI_USER}' and password '${DCOS_CLI_PASSWORD}'
7 | When I run 'dcos task | grep marathon.*lb.* | tail -1 | awk '{print $5}'' in the ssh connection and save the value in environment variable 'TaskID'
8 |
9 | Scenario:[01] Check marathon-lb logs format
10 | And I run 'expr `dcos task log --lines 10000 !{TaskID} 2>&1 | wc -l` \* 5 / 100' in the ssh connection and save the value in environment variable 'totalLinesThreshold'
11 | And I run 'export LANG=en_US.UTF-8 && dcos task log --lines 10000 !{TaskID} 2>&1 | grep -P "^\d{4}-\d{2}-\d{2}T\d{2}:\d{2}:\d{2}((.|,)\d{3})?((\+|-)\d{2}:?\d{2}|Z) (FATAL|ERROR|WARN|WARNING|NOTICE|INFO|DEBUG|TRACE|AUDIT) (-|\w+) (-|0|1) ([0-9A-Za-z\/\.\-_\$:]+) ([0-9A-Za-z\/\.\-_\$:-\[\]]+)(:\d+)? .*$" | wc -l' in the ssh connection and save the value in environment variable 'formatedLines'
12 | Then '!{formatedLines}' is higher than '!{totalLinesThreshold}'
13 |
14 | Scenario:[02] Check stdout/stderr is logged correctly
15 | And I run 'export LANG=en_US.UTF-8 && dcos task log --lines 10000 !{TaskID} stdout 2>&1 | grep -e "\(INFO\|WARN\|DEBUG\|TRACE\)" | wc -l' in the ssh connection and save the value in environment variable 'StdoutFormatedLines'
16 | And '!{StdoutFormatedLines}' is higher than '0'
17 | And I run 'export LANG=en_US.UTF-8 && dcos task log --lines 10000 !{TaskID} stderr 2>&1 | grep -v "\(INFO\|WARN\|DEBUG\|TRACE\)" | wc -l' in the ssh connection and save the value in environment variable 'StderrFormatedLines'
18 | Then '!{StderrFormatedLines}' is higher than '0'
19 |
20 | Scenario:[03] Check marathonLb stdout/stderr is logged correctly
21 | When I run 'export LANG=en_US.UTF-8 && dcos task log --lines 10000 !{TaskID} | grep !{VAULT_TOKEN}' in the ssh connection with exit status '1'
22 |
--------------------------------------------------------------------------------
/testsAT/src/test/java/com/stratio/marathonlbsec/specs/WhenSpec.java:
--------------------------------------------------------------------------------
1 | /*
2 | * Copyright (C) 2015 Stratio (http://stratio.com)
3 | *
4 | * Licensed under the Apache License, Version 2.0 (the "License");
5 | * you may not use this file except in compliance with the License.
6 | * You may obtain a copy of the License at
7 | *
8 | * http://www.apache.org/licenses/LICENSE-2.0
9 | *
10 | * Unless required by applicable law or agreed to in writing, software
11 | * distributed under the License is distributed on an "AS IS" BASIS,
12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 | * See the License for the specific language governing permissions and
14 | * limitations under the License.
15 | */
16 | package com.stratio.marathonlbsec.specs;
17 |
18 | import cucumber.api.java.en.When;
19 |
20 | import java.io.BufferedReader;
21 | import java.io.FileReader;
22 | import java.io.PrintStream;
23 | import java.net.InetSocketAddress;
24 | import java.nio.channels.ServerSocketChannel;
25 |
26 | import static com.stratio.qa.assertions.Assertions.assertThat;
27 |
28 | public class WhenSpec extends BaseSpec {
29 |
30 | public WhenSpec(Common spec) {
31 | this.commonspec = spec;
32 | }
33 |
34 | @When("^I start a socket in '([^:]+?):(.+?)?'$")
35 | public void startSocket(String socketHost, String socketPort) throws java.io.IOException, java.net.UnknownHostException {
36 | assertThat(socketHost).isNotEmpty();
37 | assertThat(socketPort).isNotEmpty();
38 | commonspec.getLogger().info("Creating socket at: " + socketHost + ":" + socketPort);
39 | commonspec.setServerSocket(ServerSocketChannel.open());
40 | commonspec.getServerSocket().socket().bind(new InetSocketAddress(socketHost, Integer.parseInt(socketPort)));
41 | }
42 |
43 |
44 | @When("^I send data from file '([^:]+?)' to socket$")
45 | public void sendDataToSocket(String baseData) throws java.io.IOException, java.io.FileNotFoundException {
46 | String line = "";
47 | PrintStream out = new PrintStream(commonspec.getServerSocket().socket().accept().getOutputStream());
48 | BufferedReader br = new BufferedReader(new FileReader(baseData));
49 |
50 | while ((line = br.readLine()) != null) {
51 | // use comma as separator
52 | String[] data = line.split(",");
53 | out.println(line);
54 | }
55 | out.flush();
56 | }
57 |
58 | }
59 |
--------------------------------------------------------------------------------
/tests/marathon15_apps.json:
--------------------------------------------------------------------------------
1 | {
2 | "apps": [
3 | {
4 | "id": "/pywebserver",
5 | "backoffFactor": 1.15,
6 | "backoffSeconds": 1,
7 | "cmd": "echo \"host $HOST and port $PORT\" > index.html && python -m http.server 80",
8 | "container": {
9 | "type": "DOCKER",
10 | "docker": {
11 | "forcePullImage": false,
12 | "image": "python:3",
13 | "parameters": [],
14 | "privileged": false
15 | },
16 | "volumes": [],
17 | "portMappings": [
18 | {
19 | "containerPort": 80,
20 | "hostPort": 0,
21 | "labels": {},
22 | "name": "test",
23 | "protocol": "tcp",
24 | "servicePort": 10101
25 | }
26 | ]
27 | },
28 | "cpus": 0.1,
29 | "disk": 0,
30 | "executor": "",
31 | "instances": 1,
32 | "labels": {
33 | "HAPROXY_GROUP": "external",
34 | "HAPROXY_0_VHOST": "myvhost.com"
35 | },
36 | "maxLaunchDelaySeconds": 3600,
37 | "mem": 128,
38 | "gpus": 0,
39 | "networks": [
40 | {
41 | "mode": "container/bridge"
42 | }
43 | ],
44 | "requirePorts": false,
45 | "upgradeStrategy": {
46 | "maximumOverCapacity": 1,
47 | "minimumHealthCapacity": 1
48 | },
49 | "version": "2017-07-19T17:34:41.967Z",
50 | "versionInfo": {
51 | "lastScalingAt": "2017-07-19T17:34:41.967Z",
52 | "lastConfigChangeAt": "2017-07-19T17:34:41.967Z"
53 | },
54 | "killSelection": "YOUNGEST_FIRST",
55 | "unreachableStrategy": {
56 | "inactiveAfterSeconds": 300,
57 | "expungeAfterSeconds": 600
58 | },
59 | "tasksStaged": 0,
60 | "tasksRunning": 1,
61 | "tasksHealthy": 0,
62 | "tasksUnhealthy": 0,
63 | "deployments": [],
64 | "tasks": [
65 | {
66 | "ipAddresses": [
67 | {
68 | "ipAddress": "172.17.0.2",
69 | "protocol": "IPv4"
70 | }
71 | ],
72 | "stagedAt": "2017-07-19T17:34:43.039Z",
73 | "state": "TASK_RUNNING",
74 | "ports": [
75 | 1565
76 | ],
77 | "startedAt": "2017-07-19T17:35:15.654Z",
78 | "version": "2017-07-19T17:34:41.967Z",
79 | "id": "pywebserver.8cad6a69-6ca8-11e7-beb2-0e2beceebfcc",
80 | "appId": "/pywebserver",
81 | "slaveId": "db7b40e2-791c-445f-b373-183e2a648a86-S1",
82 | "host": "10.0.2.148"
83 | }
84 | ]
85 | }
86 | ]
87 | }
88 |
--------------------------------------------------------------------------------
/testsAT/src/test/resources/features/functionalAT/030_Logs/03_QATM_2113_Vault_Renewal_Token.feature:
--------------------------------------------------------------------------------
1 | @rest @dcos
2 | @mandatory(BOOTSTRAP_IP,REMOTE_USER,PEM_FILE_PATH,DCOS_PASSWORD,DCOS_CLI_HOST,DCOS_CLI_USER,DCOS_CLI_PASSWORD)
3 | Feature: [QATM-2113] Vault Renewal Token
4 |
5 | Scenario:[01] Check marathon-lb logs
6 | Given I open a ssh connection to '${DCOS_CLI_HOST}' with user '${DCOS_CLI_USER}' and password '${DCOS_CLI_PASSWORD}'
7 | And I run 'dcos task | grep marathon.*lb.* | tail -1 | awk '{print $5}'' in the ssh connection and save the value in environment variable 'TaskID'
8 | When in less than '300' seconds, checking each '10' seconds, the command output 'dcos task log --lines 10 !{TaskID} 2>/dev/null' contains 'INFO - 0 python marathon-lb.token_renewal.py {"@message": "Checking if token needs renewal"}'
9 |
10 | Scenario:[02] Change token period and check renewal
11 | Given I open a ssh connection to '${BOOTSTRAP_IP}' in port '${EOS_NEW_SSH_PORT:-22}' with user '${REMOTE_USER}' using pem file '${PEM_FILE_PATH}'
12 | When I run 'sudo docker exec -it paas-bootstrap curl -k -H "X-Vault-Token:!{VAULT_TOKEN}" https://vault.service.!{EOS_INTERNAL_DOMAIN}:${EOS_VAULT_PORT:-8200}/v1/auth/approle/role/open -k -XPOST -d '{"period": 60}' | jq .' in the ssh connection
13 | And I wait '5' seconds
14 | Then in less than '300' seconds, checking each '10' seconds, the command output 'sudo docker exec -it paas-bootstrap curl -k -s -XGET -H 'X-Vault-Token:!{VAULT_TOKEN}' https://vault.service.!{EOS_INTERNAL_DOMAIN}:${EOS_VAULT_PORT:-8200}/v1/auth/approle/role/open | jq -rMc .data.period' contains '60'
15 | When I run 'sudo docker exec -it paas-bootstrap curl -k -H "X-Vault-Token:!{VAULT_TOKEN}" https://vault.service.!{EOS_INTERNAL_DOMAIN}:${EOS_VAULT_PORT:-8200}/v1/auth/approle/role/open -k -XPOST -d '{"period": 600}' | jq .' in the ssh connection
16 | And I wait '5' seconds
17 | Then in less than '300' seconds, checking each '10' seconds, the command output 'sudo docker exec -it paas-bootstrap curl -k -s -XGET -H 'X-Vault-Token:!{VAULT_TOKEN}' https://vault.service.!{EOS_INTERNAL_DOMAIN}:${EOS_VAULT_PORT:-8200}/v1/auth/approle/role/open | jq -rMc .data.period' contains '600'
18 |
19 | Scenario:[03] Check marathon-lb logs
20 | Given I open a ssh connection to '${DCOS_CLI_HOST}' with user '${DCOS_CLI_USER}' and password '${DCOS_CLI_PASSWORD}'
21 | And I run 'dcos task | grep marathon.*lb.* | tail -1 | awk '{print $5}'' in the ssh connection and save the value in environment variable 'TaskID'
22 | When in less than '300' seconds, checking each '10' seconds, the command output 'dcos task log --lines 10 !{TaskID} 2>/dev/null' contains 'INFO - 0 python marathon-lb.token_renewal.py {"@message": "Checking if token needs renewal"}'
23 |
--------------------------------------------------------------------------------
/zdd_exceptions.py:
--------------------------------------------------------------------------------
1 | """ Exit Status 1 is already used in the script.
2 | Zdd returns with exit status 1 when app is not force
3 | deleted either through argument or through prompt.
4 | Exit Status 2 is used for Unknown Exceptions.
5 | """
6 |
7 |
8 | class InvalidArgException(Exception):
9 | """ This exception indicates invalid combination of arguments
10 | passed to zdd"""
11 | def __init__(self, msg):
12 | super(InvalidArgException, self).__init__(msg)
13 | self.error = msg
14 | self.zdd_exit_status = 3
15 |
16 |
17 | class MissingFieldException(Exception):
18 | """ This exception indicates required fields which are missing
19 | in JSON payload passed to zdd"""
20 | def __init__(self, msg, field):
21 | super(MissingFieldException, self).__init__(msg)
22 | self.error = msg
23 | self.missing_field = field
24 | self.zdd_exit_status = 4
25 |
26 |
27 | class MarathonLbEndpointException(Exception):
28 | """ This excaption indicates issue with one of the marathonlb
29 | endpoints specified as argument to Zdd"""
30 | def __init__(self, msg, url, error):
31 | super(MarathonLbEndpointException, self).__init__(msg)
32 | self.msg = msg
33 | self.url = url
34 | self.error = error
35 | self.zdd_exit_status = 5
36 |
37 |
38 | class MarathonEndpointException(Exception):
39 | """ This excaption indicates issue with marathon endpoint
40 | specified as argument to Zdd"""
41 | def __init__(self, msg, url, error):
42 | super(MarathonEndpointException, self).__init__(msg)
43 | self.msg = msg
44 | self.url = url
45 | self.error = error
46 | self.zdd_exit_status = 6
47 |
48 |
49 | class AppCreateException(Exception):
50 | """ This exception indicates there was a error while creating the
51 | new App and hence it was not created."""
52 | def __init__(self, msg, url, payload, error):
53 | super(AppCreateException, self).__init__(msg)
54 | self.msg = msg
55 | self.error = error
56 | self.url = url
57 | self.payload = payload
58 | self.zdd_exit_status = 7
59 |
60 |
61 | class AppDeleteException(Exception):
62 | """ This exception indicates there was a error while deleting the
63 | old App and hence it was not deleted """
64 | def __init__(self, msg, url, appid, error):
65 | super(AppDeleteException, self).__init__(msg)
66 | self.msg = msg
67 | self.error = error
68 | self.url = url
69 | self.zdd_exit_status = 8
70 |
71 |
72 | class AppScaleException(Exception):
73 | """ This exception indicated there was a error while either scaling up
74 | new app or while scaling down old app"""
75 | def __init__(self, msg, url, payload, error):
76 | super(AppScaleException, self).__init__(msg)
77 | self.msg = msg
78 | self.error = error
79 | self.url = url
80 | self.payload = payload
81 | self.zdd_exit_status = 9
82 |
--------------------------------------------------------------------------------
/CHANGELOG.md:
--------------------------------------------------------------------------------
1 | # Changelog
2 |
3 | ## 0.8.0 (upcoming)
4 |
5 | * [EOS-3882] Include dcos-spartan/net resolvers in default template
6 |
7 | ## 0.7.0-62c453c (Built: June 01, 2020 | Released: June 19, 2020)
8 |
9 | * Change Anchore policy
10 | * [EOS-3590] Fix Vault token renewal
11 |
12 | ## 0.6.0-458dce6 (Built: January 14, 2020 | Released: January 27, 2020)
13 |
14 | * [EOS-3126] Fix PyJWT vulnerability
15 | * [EOS-3126] Fix and activate Anchore tests
16 |
17 | ## 0.5.0-96093b8 (Built: October 03, 2019 | Released: November 18, 2019)
18 |
19 | * [EOS-3031] Bump kms_utils to version 0.4.6
20 | * [EOS-2970] Treat HAPROXY_RSYSLOG as a boolean parameter
21 | * [EOS-2939] Update error message for Marathon-LB default certificate
22 | * Improve exception handling and logging
23 | * [EOS-2913] Replace healthchecking lua mechanism
24 | * [EOS-2879] Bump HAproxy to 2.0.5 and enable metrics endpoint
25 |
26 | ## 0.4.0-aa77ef6 (Built: July 02, 2019 | Released: July 04, 2019)
27 |
28 | * [EOS-2578] Fold all certifiates, keys and CAs to files with lines of length 64
29 | * [EOS-2579] Download certs only when an app's backends increase from 0 and the cert is not already present
30 | * [EOS-2425] Error when logging app id of not found Vault cert
31 | * [EOS-2395] New label in marathon-lb to specify certs location
32 | * Adapt repo to new versioning flow
33 | * [EOS-1819] Download certificates only of new deployed apps
34 | * [EOS-1817] Look for marathon-lb own cert following multitenant convention, fall back to "default" path if not found
35 | * [EOS-1825] Fix logger in haproxy_wrapper.py
36 | * [EOS-1816] Add new thread to renew vault token and fix token expire_time calculation
37 | * [EOS-1810] Include checking vault token state in healthcheck
38 | * Fix curl dependency and gpg keyserver in Dockerfile
39 | * [EOS-1074] Fix to tcplog format in tcp backends
40 | * Fix isolate failed backends when regenerating config
41 |
42 | ## 0.3.1 (March 06, 2018)
43 |
44 | * [EOS-1074] Fix to tcplog format in tcp backends
45 | * Fix isolate failed backends when regenerating config
46 |
47 | ## 0.3.0 (February 20, 2018)
48 |
49 | * [EOS-987] Marathon-lb-sec logging format should comply with standard
50 | * [EOS-987] Included b-log version 0.4.0
51 | * [EOS-987] Python, bash, and HAproxy with standard centralized log format
52 | * [EOS-1023] Bug fixing with dead connections to Vault
53 | * [EOS-1038] Output marathon-lb-sec logs to stdout
54 | * [EOS-1067] Ensure the default marathon-lb certificate to be present by SNI if there's no certificate for the concrete app
55 | * [EOS-1068] Updated kms_utils version to 0.4.0
56 | * [EOS-1069] Add CA-bundle to the container
57 | * Add iptables rules in position 2 if a calico rule is present
58 | * Updated Marathon-LB main version v1.11.3
59 | * Bug fixing with race conditions
60 |
61 | ## 0.2.0 (December 19, 2017)
62 |
63 | * [EOS-852] Expose certificates per app
64 | * Python kms_utils wrapper
65 | * Updated kms_utils version to 0.3.0
66 |
67 | ## 0.1.0 (November 22, 2017)
68 |
69 | * [EOS-568] Implement dynamic authentication in Marathon-lb entrypoint
70 | * Marathon-LB main version v1.10.3
71 |
--------------------------------------------------------------------------------
/tests/run-benchmark.rb:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env ruby
2 |
3 | # Note: before you run this test, make sure you set:
4 | # sysctl -w net.ipv4.tcp_max_syn_backlog=20000
5 | # To make sure that all the TCP connections are retried correctly.
6 |
7 | require 'cassandra'
8 | require 'securerandom'
9 |
10 | output = `ab -r -s 60 -c 1000 -n 50000 http://marathon-lb.marathon.mesos:10000/`
11 | exit_code = $?.to_i
12 | puts output
13 |
14 | CASSANDRA_HOSTS = ['cassandra-dcos-node.cassandra.dcos.mesos']
15 | CASSANDRA_PORT = 9042
16 |
17 | cluster = Cassandra.cluster(hosts: CASSANDRA_HOSTS, port: CASSANDRA_PORT)
18 |
19 | session = cluster.connect
20 |
21 | session.execute("CREATE KEYSPACE IF NOT EXISTS " +
22 | "benchmark WITH REPLICATION = " +
23 | "{ 'class' : 'SimpleStrategy', 'replication_factor' : 1 }")
24 | session.execute("USE benchmark")
25 |
26 | session.execute("CREATE TABLE IF NOT EXISTS results (" +
27 | "id uuid PRIMARY KEY," +
28 | "ts timestamp," +
29 | "req_completed int," +
30 | "req_failed int," +
31 | "non_2xxresponses int," +
32 | "failed_on_connect int," +
33 | "failed_on_receive int," +
34 | "failed_on_length int," +
35 | "failed_on_exception int," +
36 | "ab_exit_code int)"
37 | )
38 |
39 | lines = output.split(/\r?\n/)
40 |
41 | result = {
42 | :req_completed => 0,
43 | :req_failed => 0,
44 | :non_2xxresponses => 0,
45 | :failed_on_connect => 0,
46 | :failed_on_receive => 0,
47 | :failed_on_length => 0,
48 | :failed_on_exception => 0,
49 | :ab_exit_code => 0,
50 | }
51 |
52 | result[:ab_exit_code] = exit_code
53 |
54 | lines.each do |line|
55 | /Complete requests:\s+(\d+)/.match(line) do |m|
56 | result[:req_completed] = m[1].to_i
57 | end
58 | /Failed requests:\s+(\d+)/.match(line) do |m|
59 | result[:req_failed] = m[1].to_i
60 | end
61 | /Connect: (\d+), Receive: (\d+), Length: (\d+), Exceptions: (\d+)/.match(line) do |m|
62 | result[:failed_on_connect] = m[1].to_i
63 | result[:failed_on_receive] = m[2].to_i
64 | result[:failed_on_length] = m[3].to_i
65 | result[:failed_on_exception] = m[4].to_i
66 | end
67 | /Failed requests:\s+(\d+)/.match(line) do |m|
68 | result[:req_failed] = m[1].to_i
69 | end
70 | end
71 |
72 | statement = session.prepare('INSERT INTO results ' +
73 | '(id, ts, req_completed, req_failed, non_2xxresponses,' +
74 | ' failed_on_connect, failed_on_receive,' +
75 | ' failed_on_length, failed_on_exception, ab_exit_code)' +
76 | ' VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?)')
77 |
78 | session.execute(statement, arguments: [
79 | Cassandra::Uuid::Generator.new.uuid,
80 | Time.now,
81 | result[:req_completed],
82 | result[:req_failed],
83 | result[:non_2xxresponses],
84 | result[:failed_on_connect],
85 | result[:failed_on_receive],
86 | result[:failed_on_length],
87 | result[:failed_on_exception],
88 | result[:ab_exit_code],
89 | ])
90 |
91 | session.close
92 |
93 | exit exit_code
94 |
--------------------------------------------------------------------------------
/testsAT/src/test/resources/features/functionalAT/020_Certificates/QATM_1685_Invalid_Password.feature:
--------------------------------------------------------------------------------
1 | @rest
2 | Feature: Marathon-lb not able to run without valid password in Vault for Marathon - mesos and rest
3 |
4 | Scenario:[01] Delete valid marathon password in vcli
5 | Given I open a ssh connection to '${BOOTSTRAP_IP}' in port '${EOS_NEW_SSH_PORT:-22}' with user '${REMOTE_USER}' using pem file '${PEM_FILE_PATH}'
6 | Then I outbound copy 'src/test/resources/scripts/marathon-lb-invalid-password.sh' through a ssh connection to '/tmp'
7 | And I run 'cd /tmp && sudo chmod +x marathon-lb-invalid-password.sh' in the ssh connection
8 | And I run 'sudo mv /tmp/marathon-lb-invalid-password.sh /stratio_volume/marathon-lb-invalid-password.sh' in the ssh connection
9 | And I run 'sudo docker ps | grep eos-installer | awk '{print $1}'' in the ssh connection and save the value in environment variable 'containerId'
10 | And I run 'sudo docker exec -t !{containerId} /stratio_volume/marathon-lb-invalid-password.sh' in the ssh connection
11 |
12 | @include(feature:../010_Installation/010_installation.feature,scenario:[Install Marathon-lb][01])
13 | Scenario:[02]Prepare configuration to install Marathon-lb
14 | Then I wait '5' seconds
15 |
16 | Scenario:[03] Install using config file and cli
17 | #Copy DEPLOY JSON to DCOS-CLI
18 | Given I open a ssh connection to '${DCOS_CLI_HOST}' with user '${DCOS_CLI_USER}' and password '${DCOS_CLI_PASSWORD}'
19 | And I run 'dcos marathon app show marathonlb | jq -r .container.docker.image | sed 's/.*://g'' in the ssh connection and save the value in environment variable 'marathonlbversion'
20 | When I outbound copy 'target/test-classes/config.${MARATHON_LB_VERSION:-0.3.1}.json' through a ssh connection to '/tmp/'
21 | And I run 'dcos package install --yes --package-version=!{marathonlbversion} --options=/tmp/config.!{marathonlbversion}.json ${PACKAGE_MARATHON_LB:-marathon-lb-sec}' in the ssh connection
22 | Then the command output contains 'Marathon-lb DC/OS Service has been successfully installed!'
23 | And I run 'rm -f /tmp/config.!{marathonlbversion}.json' in the ssh connection
24 | And I wait '45' seconds
25 | # Marathon-lb-sec is not installed because passwords for marathon are incorrect
26 | And in less than '300' seconds, checking each '20' seconds, the command output 'dcos task | grep -w marathonlb. | wc -l' contains '0'
27 |
28 | Scenario: Restore Password for Marathon - mesos and rest
29 | Given I open a ssh connection to '${BOOTSTRAP_IP}' in port '${EOS_NEW_SSH_PORT:-22}' with user '${REMOTE_USER}' using pem file '${PEM_FILE_PATH}'
30 | Then I outbound copy 'src/test/resources/scripts/marathon-lb-restore-password.sh' through a ssh connection to '/tmp'
31 | And I run 'cd /tmp && sudo chmod +x marathon-lb-restore-password.sh' in the ssh connection
32 | And I run 'sudo mv /tmp/marathon-lb-restore-password.sh /stratio_volume/marathon-lb-restore-password.sh' in the ssh connection
33 | And I run 'sudo docker ps | grep eos-installer | awk '{print $1}'' in the ssh connection and save the value in environment variable 'containerId'
34 | And I run 'sudo docker exec -t !{containerId} /stratio_volume/marathon-lb-restore-password.sh' in the ssh connection
35 |
36 | #Uninstalling marathon-lb-sec
37 | @include(feature:../099_Uninstall/purge.feature,scenario:marathon-lb-sec can be uninstalled using cli)
38 | Scenario: Uninstall marathon-lb-sec
39 | Then I wait '5' seconds
40 |
--------------------------------------------------------------------------------
/testsAT/src/test/resources/features/functionalAT/020_Certificates/QATM_1685_Invalid_Certificates_IT.feature:
--------------------------------------------------------------------------------
1 | @rest
2 | Feature: Marathon-lb not able to run without valid certificates in Vault
3 |
4 | Scenario: [01] Delete valid marathon-lb certificate in vcli
5 | Given I open a ssh connection to '${BOOTSTRAP_IP}' in port '${EOS_NEW_SSH_PORT:-22}' with user '${REMOTE_USER}' using pem file '${PEM_FILE_PATH}'
6 | Then I outbound copy 'src/test/resources/scripts/marathon-lb-invalid-certs.sh' through a ssh connection to '/tmp'
7 | And I run 'cp /stratio_volume/certs.list certs_custom_app_marathonlb.list' in the ssh connection
8 | And I run 'cd /tmp && sudo chmod +x marathon-lb-invalid-certs.sh' in the ssh connection
9 | And I run 'sudo mv /tmp/marathon-lb-invalid-certs.sh /stratio_volume/marathon-lb-invalid-certs.sh' in the ssh connection
10 | And I run 'sudo docker ps | grep eos-installer | awk '{print $1}'' in the ssh connection and save the value in environment variable 'containerId'
11 | And I run 'sudo docker exec -t !{containerId} /stratio_volume/marathon-lb-invalid-certs.sh' in the ssh connection
12 |
13 | @include(feature:../010_Installation/010_installation.feature,scenario:[02] Create installation config file)
14 | Scenario: Prepare configuration to install Marathon-lb
15 | Then I wait '5' seconds
16 |
17 | Scenario: [03] Install using config file and cli
18 | #Copy DEPLOY JSON to DCOS-CLI
19 | Given I open a ssh connection to '${DCOS_CLI_HOST}' with user '${DCOS_CLI_USER}' and password '${DCOS_CLI_PASSWORD}'
20 | And I run 'dcos marathon app show marathonlb | jq -r .container.docker.image | sed 's/.*://g'' in the ssh connection and save the value in environment variable 'marathonlbversion'
21 | When I outbound copy 'target/test-classes/config.!{marathonlbversion}.json' through a ssh connection to '/tmp/'
22 | And I run 'dcos package install --yes --package-version=!{marathonlbversion} --options=/tmp/config.!{marathonlbversion}.json ${PACKAGE_MARATHON_LB:-marathon-lb-sec}' in the ssh connection
23 | Then the command output contains 'Marathon-lb DC/OS Service has been successfully installed!'
24 | And I run 'rm -f /tmp/config.!{marathonlbversion}.json' in the ssh connection
25 | And I wait '45' seconds
26 | # Marathon-lb-sec is not installed because certificates are incorrect
27 | And in less than '300' seconds, checking each '20' seconds, the command output 'dcos task | grep -w marathonlb. | wc -l' contains '0'
28 |
29 | Scenario: Restore Certificates for Marathon-lb-sec
30 | Given I open a ssh connection to '${BOOTSTRAP_IP}' in port '${EOS_NEW_SSH_PORT:-22}' with user '${REMOTE_USER}' using pem file '${PEM_FILE_PATH}'
31 | Then I outbound copy 'src/test/resources/scripts/marathon-lb-restore-certs.sh' through a ssh connection to '/tmp'
32 | And I run 'cp /stratio_volume/certs.list certs_custom_app_marathonlb.list' in the ssh connection
33 | And I run 'cd /tmp && sudo chmod +x marathon-lb-restore-certs.sh' in the ssh connection
34 | And I run 'sudo mv /tmp/marathon-lb-restore-certs.sh /stratio_volume/marathon-lb-restore-certs.sh' in the ssh connection
35 | And I run 'sudo docker ps | grep eos-installer | awk '{print $1}'' in the ssh connection and save the value in environment variable 'containerId'
36 | And I run 'sudo docker exec -t !{containerId} /stratio_volume/marathon-lb-restore-certs.sh' in the ssh connection
37 |
38 | #Uninstalling marathon-lb-sec
39 | @include(feature:../099_Uninstall/purge.feature,scenario:[01] marathon-lb-sec can be uninstalled using cli)
40 | Scenario: Uninstall marathon-lb-sec
41 | Then I wait '5' seconds
42 |
--------------------------------------------------------------------------------
/testsAT/src/test/resources/features/functionalAT/020_Certificates/02_MARATHONLB_1386_ClientCertificate.feature:
--------------------------------------------------------------------------------
1 | @rest @dcos
2 | @mandatory(BOOTSTRAP_IP,REMOTE_USER,PEM_FILE_PATH,EOS_INSTALLER_VERSION,DCOS_PASSWORD,DCOS_CLI_HOST,DCOS_CLI_USER,DCOS_CLI_PASSWORD)
3 | Feature: Deploying marathon-lb-sec with client certificate
4 |
5 | Scenario:[01] Deploying marathon-lb-sec with a clients certificate
6 | Given I run 'sudo cp /etc/hosts /tmp/hostbackup' locally
7 | And I run 'cat /etc/hosts | grep nginx-qa.!{EOS_DNS_SEARCH} || echo "!{PUBLIC_NODE} nginx-qa.!{EOS_DNS_SEARCH}" | sudo tee -a /etc/hosts' locally
8 | Then I open a ssh connection to '${BOOTSTRAP_IP}' in port '${EOS_NEW_SSH_PORT:-22}' with user '${REMOTE_USER}' using pem file '${PEM_FILE_PATH}'
9 | And I outbound copy 'src/test/resources/scripts/marathon-lb-manage-certs.sh' through a ssh connection to '/tmp'
10 | And I run 'sed -i s/"DNS:nginx-qa.labs.stratio.com"/"DNS:nginx-qa.!{EOS_DNS_SEARCH}"/g /tmp/marathon-lb-manage-certs.sh' in the ssh connection
11 | And I run 'cd /tmp && sudo chmod +x marathon-lb-manage-certs.sh' in the ssh connection
12 | And I run 'sudo mv /tmp/marathon-lb-manage-certs.sh /stratio_volume/marathon-lb-manage-certs.sh' in the ssh connection
13 | And I run 'sudo docker exec -t paas-bootstrap /stratio_volume/marathon-lb-manage-certs.sh' in the ssh connection
14 | And I wait '60' seconds
15 | And I open a ssh connection to '${DCOS_CLI_HOST}' with user '${DCOS_CLI_USER}' and password '${DCOS_CLI_PASSWORD}'
16 | And I outbound copy 'src/test/resources/schemas/nginx-qa-config.json' through a ssh connection to '/tmp'
17 | And I run 'sed -i -e 's/qa.stratio.com/!{EXTERNAL_DOCKER_REGISTRY}/g' -e '/"HAPROXY_0_PATH": null,/d' -e 's/"HAPROXY_0_VHOST": "nginx-qa.labs.stratio.com"/"HAPROXY_0_VHOST": "nginx-qa.!{EOS_DNS_SEARCH}"/g' /tmp/nginx-qa-config.json ; dcos marathon app add /tmp/nginx-qa-config.json ; rm -f /tmp/nginx-qa-config.json' in the ssh connection
18 | Then in less than '300' seconds, checking each '10' seconds, the command output 'dcos task | grep nginx-qa | grep R | wc -l' contains '1'
19 | When I run 'dcos task | grep marathonlb | tail -1 | awk '{print $5}'' in the ssh connection and save the value in environment variable 'TaskID'
20 | And I run 'dcos marathon task list nginx-qa | awk '{print $5}' | grep nginx-qa' in the ssh connection and save the value in environment variable 'nginx-qaTaskId'
21 | Then in less than '300' seconds, checking each '10' seconds, the command output 'dcos marathon task show !{nginx-qaTaskId} | grep TASK_RUNNING | wc -l' contains '1'
22 | Then in less than '300' seconds, checking each '10' seconds, the command output 'dcos marathon task show !{nginx-qaTaskId} | grep healthCheckResults | wc -l' contains '1'
23 | Then in less than '300' seconds, checking each '10' seconds, the command output 'dcos marathon task show !{nginx-qaTaskId} | grep '"alive": true' | wc -l' contains '1'
24 | Given I run 'dcos marathon app remove --force nginx-qa' in the ssh connection
25 | Then in less than '300' seconds, checking each '10' seconds, the command output 'dcos task | awk '{print $1}' | grep -c nginx-qa' contains '0'
26 | And I run 'sudo cp /tmp/hostbackup /etc/hosts' locally
27 |
28 | Scenario:[02] Deleting files
29 | Then I open a ssh connection to '${BOOTSTRAP_IP}' in port '${EOS_NEW_SSH_PORT:-22}' with user '${REMOTE_USER}' using pem file '${PEM_FILE_PATH}'
30 | And I run 'sudo rm -rf /stratio_volume/certs_client_marathonlb.list ; sudo rm -rf /stratio_volume/marathon-lb-cert-backup.json ; sudo rm -rf /stratio_volume/marathon-lb-manage-certs.sh' in the ssh connection
31 |
32 |
--------------------------------------------------------------------------------
/testsAT/src/test/resources/features/functionalAT/060_monitoring/01_EOS_3139_monitoring_IT.feature:
--------------------------------------------------------------------------------
1 | @rest @dcos
2 | @mandatory(BOOTSTRAP_IP,REMOTE_USER,PEM_FILE_PATH,DCOS_PASSWORD,EOS_MONITOR_CLUSTER,UNIVERSE_VERSION,EOS_PUBLIC_AGENTS_LIST,DCOS_CLI_HOST,DCOS_CLI_USER,DCOS_CLI_PASSWORD)
3 | Feature: Check multiple deployments which share vhost
4 |
5 | @runOnEnv(EOS_MONITOR_CLUSTER=yes)
6 | @runOnEnv(UNIVERSE_VERSION>0.6.0||UNIVERSE_VERSION=0.6.0)
7 | Scenario:[01] Check monitoring solution is installed
8 | Given I open a ssh connection to '${DCOS_CLI_HOST}' with user '${DCOS_CLI_USER}' and password '${DCOS_CLI_PASSWORD}'
9 | And I run 'dcos task | grep grafana-eos.* | grep R' in the ssh connection
10 | And I run 'dcos task | grep prometheus-eos.* | grep R' in the ssh connection
11 | And I run 'dcos task | grep marathon-exporter.* | grep R' in the ssh connection
12 | And I run 'dcos marathon task list | grep .*/grafana-eos | awk '{print $5}'' in the ssh connection and save the value in environment variable 'grafanaTaskId'
13 | And I run 'dcos marathon task list | grep .*/prometheus-eos | awk '{print $5}'' in the ssh connection and save the value in environment variable 'prometheusTaskId'
14 | And I run 'dcos marathon task list | grep .*/marathon-exporter | awk '{print $5}'' in the ssh connection and save the value in environment variable 'marathonExporterTaskId'
15 | And I run 'dcos marathon app show grafana-eos | jq -r .networks[].name | grep -w metrics' in the ssh connection
16 | And I run 'dcos marathon app show prometheus-eos | jq -r .networks[].name | grep -w metrics' in the ssh connection
17 | And I run 'dcos marathon app show marathon-exporter | jq -r .networks[].name | grep -w metrics' in the ssh connection
18 | And I run 'dcos marathon task show !{grafanaTaskId} | grep -A50 healthCheckResults | grep -A50 '"alive": true' | grep '"state": "TASK_RUNNING"'' in the ssh connection
19 | And I run 'dcos marathon task show !{prometheusTaskId} | grep -A50 healthCheckResults | grep -A50 '"alive": true' | grep '"state": "TASK_RUNNING"'' in the ssh connection
20 | And I run 'dcos marathon task show !{prometheusTaskId} | jq -r .ipAddresses[].ipAddress' in the ssh connection and save the value in environment variable 'prometheusCalicoIP'
21 | And I run 'dcos marathon task show !{marathonExporterTaskId} | grep -A50 healthCheckResults | grep -A50 '"alive": true' | grep '"state": "TASK_RUNNING"'' in the ssh connection
22 | And I run 'dcos marathon task show !{marathonExporterTaskId} | jq -r .ipAddresses[].ipAddress' in the ssh connection and save the value in environment variable 'marathonExporterCalicoIP'
23 |
24 | @runOnEnv(UNIVERSE_VERSION>0.6.0||UNIVERSE_VERSION=0.6.0)
25 | @runOnEnv(EOS_MONITOR_CLUSTER=yes)
26 | @loop(EOS_PUBLIC_AGENTS_LIST,PUBLIC_IP)
27 | Scenario:[02] Check exporter in Prometheus-EOS
28 | Given I securely send requests to '!{EOS_ACCESS_POINT}'
29 | And I set sso token using host '!{EOS_ACCESS_POINT}' with user '!{DCOS_USER}' and password '${DCOS_PASSWORD}' and tenant '!{DCOS_TENANT}'
30 | When I send a 'GET' request to '/service/prometheus-eos/api/v1/targets'
31 | Then the service response status must be '200'
32 | And I save element '$' in environment variable 'exporters'
33 | And I run 'echo '!{exporters}' | jq '.data.activeTargets[] | select(.discoveredLabels.job=="services" and .health=="up" and .scrapeUrl=="http://'':9090/metrics" and .discoveredLabels.name=="marathonlb" and .discoveredLabels.task_id=="marathonlb" and .labels.name=="marathonlb" and .labels.task_id=="marathonlb")'' locally
34 | And I run 'curl -XGET '':9090/metrics' locally
35 | And the service response status must be '200'
36 |
--------------------------------------------------------------------------------
/tests/test_marathon_lb_haproxy_options.py:
--------------------------------------------------------------------------------
1 | import os
2 |
3 | from tests.test_marathon_lb import TestMarathonUpdateHaproxy
4 |
5 |
6 | def template_option(opt):
7 | return ' option {opt}\n'.format(opt=opt)
8 |
9 |
10 | base_config_prefix = '''global
11 | daemon
12 | log /dev/log local0
13 | log /dev/log local1 notice
14 | spread-checks 5
15 | max-spread-checks 15000
16 | maxconn 50000
17 | tune.ssl.default-dh-param 2048
18 | ssl-default-bind-ciphers ECDHE-ECDSA-AES128-GCM-SHA256:\
19 | ECDHE-RSA-AES128-GCM-SHA256:ECDHE-ECDSA-AES256-GCM-SHA384:\
20 | ECDHE-RSA-AES256-GCM-SHA384:DHE-RSA-AES128-GCM-SHA256:\
21 | DHE-RSA-AES256-GCM-SHA384:ECDHE-ECDSA-AES128-SHA256:ECDHE-RSA-AES128-SHA256:\
22 | ECDHE-ECDSA-AES256-SHA384:ECDHE-RSA-AES256-SHA384:DHE-RSA-AES128-SHA256:\
23 | DHE-RSA-AES256-SHA256:AES128-GCM-SHA256:AES256-GCM-SHA384:AES128-SHA256:\
24 | AES256-SHA256:!aNULL:!MD5:!DSS
25 | ssl-default-bind-options no-sslv3 no-tlsv10 no-tls-tickets
26 | ssl-default-server-ciphers ECDHE-ECDSA-AES128-GCM-SHA256:\
27 | ECDHE-RSA-AES128-GCM-SHA256:ECDHE-ECDSA-AES256-GCM-SHA384:\
28 | ECDHE-RSA-AES256-GCM-SHA384:DHE-RSA-AES128-GCM-SHA256:\
29 | DHE-RSA-AES256-GCM-SHA384:ECDHE-ECDSA-AES128-SHA256:ECDHE-RSA-AES128-SHA256:\
30 | ECDHE-ECDSA-AES256-SHA384:ECDHE-RSA-AES256-SHA384:DHE-RSA-AES128-SHA256:\
31 | DHE-RSA-AES256-SHA256:AES128-GCM-SHA256:AES256-GCM-SHA384:AES128-SHA256:\
32 | AES256-SHA256:!aNULL:!MD5:!DSS
33 | ssl-default-server-options no-sslv3 no-tlsv10 no-tls-tickets
34 | stats socket /var/run/haproxy/socket
35 | server-state-file global
36 | server-state-base /var/state/haproxy/
37 | lua-load /marathon-lb/getpids.lua
38 | lua-load /marathon-lb/getconfig.lua
39 | lua-load /marathon-lb/getmaps.lua
40 | lua-load /marathon-lb/signalmlb.lua
41 | defaults
42 | load-server-state-from-file global
43 | log global
44 | retries 3
45 | backlog 10000
46 | maxconn 10000
47 | timeout connect 3s
48 | timeout client 30s
49 | timeout server 30s
50 | timeout tunnel 3600s
51 | timeout http-keep-alive 1s
52 | timeout http-request 15s
53 | timeout queue 30s
54 | timeout tarpit 60s
55 | '''
56 |
57 | base_config_suffix = '''\
58 | listen stats
59 | bind 0.0.0.0:9090
60 | balance
61 | mode http
62 | stats enable
63 | monitor-uri /_haproxy_health_check
64 | acl getpid path /_haproxy_getpids
65 | http-request use-service lua.getpids if getpid
66 | acl getvhostmap path /_haproxy_getvhostmap
67 | http-request use-service lua.getvhostmap if getvhostmap
68 | acl getappmap path /_haproxy_getappmap
69 | http-request use-service lua.getappmap if getappmap
70 | acl getconfig path /_haproxy_getconfig
71 | http-request use-service lua.getconfig if getconfig
72 |
73 | acl signalmlbhup path /_mlb_signal/hup
74 | http-request use-service lua.signalmlbhup if signalmlbhup
75 | acl signalmlbusr1 path /_mlb_signal/usr1
76 | http-request use-service lua.signalmlbusr1 if signalmlbusr1
77 | '''
78 |
79 |
80 | class TestAdditionalOptions(TestMarathonUpdateHaproxy):
81 |
82 | def setUp(self):
83 | self.maxDiff = None
84 | os.environ['HAPROXY_GLOBAL_DEFAULT_OPTIONS'] = 'httplog,tcplog'
85 | base_config = base_config_prefix
86 | base_config += template_option('httplog')
87 | base_config += template_option('tcplog')
88 | base_config += base_config_suffix
89 | self.base_config = base_config
90 |
91 |
92 | class TestDuplicatedOptions(TestMarathonUpdateHaproxy):
93 |
94 | def setUp(self):
95 | self.maxDiff = None
96 | os.environ['HAPROXY_GLOBAL_DEFAULT_OPTIONS'] = \
97 | 'httplog,tcplog,dontlognull,tcplog'
98 | base_config = base_config_prefix
99 | base_config += template_option('dontlognull')
100 | base_config += template_option('httplog')
101 | base_config += template_option('tcplog')
102 | base_config += base_config_suffix
103 | self.base_config = base_config
104 |
--------------------------------------------------------------------------------
/Dockerfile:
--------------------------------------------------------------------------------
1 | FROM debian:buster
2 |
3 | ARG VERSION
4 |
5 | # artifacts versions
6 | ARG SEC_UTILS_VERSION=0.4.6
7 |
8 | # nexus repository artifacts
9 | ADD http://sodio.stratio.com/repository/paas/kms_utils/${SEC_UTILS_VERSION}/kms_utils-${SEC_UTILS_VERSION}.sh /usr/sbin/kms_utils.sh
10 | ADD http://sodio.stratio.com/repository/paas/log_utils/${SEC_UTILS_VERSION}/b-log-${SEC_UTILS_VERSION}.sh /usr/sbin/b-log.sh
11 |
12 | # runtime dependencies
13 | RUN apt-get update && apt-get install -y --no-install-recommends \
14 | ca-certificates \
15 | inetutils-syslogd \
16 | libcurl4 \
17 | liblua5.3-0 \
18 | libssl1.1 \
19 | openssl \
20 | procps \
21 | python3 \
22 | runit \
23 | gnupg-agent \
24 | socat \
25 | curl \
26 | jq \
27 | && apt-get install -y --no-install-recommends rsyslog \
28 | && rm -rf /var/lib/apt/lists/*
29 |
30 | ENV TINI_VERSION=v0.16.1 \
31 | TINI_GPG_KEY=595E85A6B1B4779EA4DAAEC70B588DFF0527A9B7
32 | RUN set -x \
33 | && apt-get update && apt-get install -y --no-install-recommends dirmngr gpg wget \
34 | && rm -rf /var/lib/apt/lists/* \
35 | && wget -O tini "https://github.com/krallin/tini/releases/download/$TINI_VERSION/tini-amd64" \
36 | && wget -O tini.asc "https://github.com/krallin/tini/releases/download/$TINI_VERSION/tini-amd64.asc" \
37 | && export GNUPGHOME="$(mktemp -d)" \
38 | && gpg --keyserver ha.pool.sks-keyservers.net --recv-keys "$TINI_GPG_KEY" \
39 | || gpg --keyserver pool.sks-keyservers.net --recv-keys "$TINI_GPG_KEY" \
40 | || gpg --keyserver keyserver.pgp.com --recv-keys "$TINI_GPG_KEY" \
41 | || gpg --keyserver pgp.mit.edu --recv-keys "$TINI_GPG_KEY" \
42 | && gpg --batch --verify tini.asc tini \
43 | && rm -rf "$GNUPGHOME" tini.asc \
44 | && mv tini /usr/bin/tini \
45 | && chmod +x /usr/bin/tini \
46 | && chmod +x /usr/sbin/kms_utils.sh \
47 | && tini -- true \
48 | && apt-get purge -y --auto-remove dirmngr gpg wget
49 |
50 |
51 | ENV HAPROXY_MAJOR=2.0 \
52 | HAPROXY_VERSION=2.0.5 \
53 | HAPROXY_MD5=497c716adf4b056484601a887f34d152
54 |
55 | COPY requirements.txt /marathon-lb/
56 |
57 | COPY MARATHON-LB-VERSION /marathon-lb/
58 |
59 | RUN set -x \
60 | && buildDeps=' \
61 | build-essential \
62 | gcc \
63 | libcurl4-openssl-dev \
64 | libffi-dev \
65 | liblua5.3-dev \
66 | libpcre3-dev \
67 | libssl-dev \
68 | make \
69 | python3-dev \
70 | python3-pip \
71 | python3-setuptools \
72 | wget \
73 | zlib1g-dev \
74 | ' \
75 | && apt-get update \
76 | && apt-get install -y --no-install-recommends $buildDeps \
77 | && rm -rf /var/lib/apt/lists/* \
78 | \
79 | # Build HAProxy
80 | && wget -O haproxy.tar.gz "https://www.haproxy.org/download/$HAPROXY_MAJOR/src/haproxy-$HAPROXY_VERSION.tar.gz" \
81 | && echo "$HAPROXY_MD5 haproxy.tar.gz" | md5sum -c \
82 | && mkdir -p /usr/src/haproxy \
83 | && tar -xzf haproxy.tar.gz -C /usr/src/haproxy --strip-components=1 \
84 | && rm haproxy.tar.gz \
85 | && make -C /usr/src/haproxy \
86 | TARGET=linux-glibc \
87 | ARCH=x86_64 \
88 | USE_LUA=1 \
89 | LUA_INC=/usr/include/lua5.3/ \
90 | USE_OPENSSL=1 \
91 | USE_PCRE_JIT=1 \
92 | USE_PCRE=1 \
93 | USE_REGPARM=1 \
94 | USE_STATIC_PCRE=1 \
95 | USE_ZLIB=1 \
96 | EXTRA_OBJS="contrib/prometheus-exporter/service-prometheus.o" \
97 | all \
98 | install-bin \
99 | && rm -rf /usr/src/haproxy \
100 | \
101 | # Install Python dependencies
102 | # Install Python packages with --upgrade so we get new packages even if a system
103 | # package is already installed. Combine with --force-reinstall to ensure we get
104 | # a local package even if the system package is up-to-date as the system package
105 | # will probably be uninstalled with the build dependencies.
106 | && pip3 install --no-cache --upgrade --force-reinstall -r /marathon-lb/requirements.txt \
107 | \
108 | && apt-get purge -y --auto-remove $buildDeps
109 |
110 | COPY . /marathon-lb
111 |
112 | WORKDIR /marathon-lb
113 |
114 | ENTRYPOINT [ "tini", "-g", "--", "/marathon-lb/run" ]
115 | CMD [ "sse", "--health-check", "--group", "external" ]
116 |
117 | EXPOSE 80 443 9090 9091
118 |
--------------------------------------------------------------------------------
/Jenkinsfile:
--------------------------------------------------------------------------------
1 | @Library('libpipelines@master') _
2 |
3 | hose {
4 | EMAIL = 'qa'
5 | DEVTIMEOUT = 20
6 | RELEASETIMEOUT = 20
7 | MODULE = 'marathon-lb'
8 | REPOSITORY = 'marathon-lb-sec'
9 | PKGMODULESNAMES = ['marathon-lb-sec']
10 | QA_ISSUE_PROJECT = 'EOS'
11 | BUILDTOOL = 'make'
12 | NEW_VERSIONING = true
13 | GENERATE_QA_ISSUE = true
14 | INSTALLTIMEOUT = 120
15 |
16 | ANCHORE_POLICY = "marathon-lb-sec"
17 |
18 | INSTALLSERVICES = [
19 | ['DCOSCLIHETZNER': ['image': 'stratio/dcos-cli:0.4.15-SNAPSHOT',
20 | 'volumes': [
21 | '\$PEM_FILE_DIR:/tmp'
22 | ],
23 | 'env': ['DCOS_IP=\$DCOS_IP',
24 | 'SSL=true',
25 | 'SSH=true',
26 | 'TOKEN_AUTHENTICATION=true',
27 | 'DCOS_USER=\$DCOS_USER',
28 | 'DCOS_PASSWORD=\$DCOS_PASSWORD',
29 | 'CLI_BOOTSTRAP_USER=\$CLI_BOOTSTRAP_USER',
30 | 'PEM_PATH=/tmp/\${CLI_BOOTSTRAP_USER}_rsa'
31 | ],
32 | 'sleep': 120,
33 | 'healthcheck': 5000
34 | ]
35 | ],
36 | ['DCOSCLIVMWARE': ['image': 'stratio/dcos-cli:0.4.15-SNAPSHOT',
37 | 'volumes': ['stratio/paasintegrationpem:0.1.0'],
38 | 'env': ['DCOS_IP=\$DCOS_IP',
39 | 'SSL=true',
40 | 'SSH=true',
41 | 'TOKEN_AUTHENTICATION=true',
42 | 'DCOS_USER=\$DCOS_USER',
43 | 'DCOS_PASSWORD=\$DCOS_PASSWORD',
44 | 'CLI_BOOTSTRAP_USER=\$CLI_BOOTSTRAP_USER',
45 | 'PEM_PATH=/paascerts/PaasIntegration.pem'
46 | ],
47 | 'sleep': 120,
48 | 'healthcheck': 5000
49 | ]
50 | ]
51 | ]
52 |
53 | ATCREDENTIALS = [[TYPE:'sshKey', ID:'PEM_VMWARE']]
54 |
55 | INSTALLPARAMETERS = """
56 | | -DREMOTE_USER=\$PEM_VMWARE_USER
57 | | -DEOS_VAULT_PORT=8200
58 | | """.stripMargin().stripIndent()
59 |
60 | DEV = { config ->
61 | doDocker(config)
62 | }
63 |
64 | INSTALL = { config, params ->
65 | def ENVIRONMENTMAP = stringToMap(params.ENVIRONMENT)
66 |
67 | def pempathhetzner = ""
68 | pempathhetzner = """${params.ENVIRONMENT}
69 | |PEM_FILE_PATH=\$PEM_VMWARE_PATH
70 | |DCOS_CLI_HOST=%%DCOSCLIHETZNER#0
71 | |""".stripMargin().stripIndent()
72 |
73 | def PATHHETZNER = stringToMap(pempathhetzner)
74 | def PATHHETZNERINSTALL = doReplaceTokens(INSTALLPARAMETERS.replaceAll(/\n/, ''), PATHHETZNER)
75 |
76 | def pempathvmware = ""
77 | pempathvmware = """${params.ENVIRONMENT}
78 | |PEM_FILE_PATH=\$PEM_VMWARE_KEY
79 | |DCOS_CLI_HOST=%%DCOSCLIVMWARE#0
80 | |""".stripMargin().stripIndent()
81 |
82 | def PATHVMWARE = stringToMap(pempathvmware)
83 | def PATHVMWAREINSTALL = doReplaceTokens(INSTALLPARAMETERS.replaceAll(/\n/, ' '), PATHVMWARE)
84 |
85 |
86 | if (config.INSTALLPARAMETERS.contains('GROUPS_MARATHONLB')) {
87 | if (params.ENVIRONMENT.contains('HETZNER_CLUSTER')) {
88 | PATHHETZNERINSTALL = "${PATHHETZNERINSTALL}".replaceAll('-DGROUPS_MARATHONLB', '-Dgroups')
89 | doAT(conf: config, parameters: PATHHETZNERINSTALL, environmentAuth: ENVIRONMENTMAP['HETZNER_CLUSTER'])
90 | } else {
91 | PATHVMWAREINSTALL = "${PATHVMWAREINSTALL}".replaceAll('-DGROUPS_MARATHONLB', '-Dgroups')
92 | doAT(conf: config, parameters: PATHVMWAREINSTALL)
93 | }
94 | } else {
95 | if (params.ENVIRONMENT.contains('HETZNER_CLUSTER')) {
96 | doAT(conf: config, groups: ['nightly'], parameters: PATHHETZNERINSTALL, environmentAuth: ENVIRONMENTMAP['HETZNER_CLUSTER'])
97 | } else {
98 | doAT(conf: config, groups: ['nightly'], parameters: PATHVMWAREINSTALL)
99 | }
100 | }
101 |
102 | }
103 |
104 | }
105 |
--------------------------------------------------------------------------------
/testsAT/src/test/resources/features/functionalAT/050_check_haproxy_host_path/01_EOS_2920_check_multiple_deployments.feature:
--------------------------------------------------------------------------------
1 | @rest @dcos
2 | @mandatory(DCOS_CLI_HOST,DCOS_CLI_USER,DCOS_CLI_PASSWORD,BOOTSTRAP_IP,REMOTE_USER,PEM_FILE_PATH,DCOS_PASSWORD)
3 | Feature: Check multiple deployments which share vhost
4 |
5 | Scenario Outline:[01] Deploy different services nginx
6 | Given I create file '-config.json' based on 'schemas/nginx-qa-config.json' as 'json' with:
7 | | $.id | REPLACE | | string |
8 | | $.labels.HAPROXY_0_VHOST | REPLACE | | string |
9 | | $.labels.HAPROXY_0_PATH | REPLACE | | string |
10 | | $.labels.HAPROXY_0_BACKEND_WEIGHT | REPLACE | | string |
11 | | $.labels.DCOS_PACKAGE_NAME | REPLACE | | string |
12 | | $.labels.DCOS_SERVICE_NAME | REPLACE | | string |
13 | | $.container.docker.image | UPDATE | !{EXTERNAL_DOCKER_REGISTRY}/nginx:1.10.3-alpine | n/a |
14 | Given I open a ssh connection to '${DCOS_CLI_HOST}' with user '${DCOS_CLI_USER}' and password '${DCOS_CLI_PASSWORD}'
15 | And I outbound copy 'target/test-classes/-config.json' through a ssh connection to '/tmp'
16 | And I run 'dcos marathon app add /tmp/-config.json' in the ssh connection
17 | And I run 'rm -f /tmp/-config.json' in the ssh connection
18 | Examples:
19 | | id | vhost | path | weight |
20 | | nginx-qa-testqa | nginx-qa.!{EOS_DNS_SEARCH} | | 0 |
21 | | nginx-qa-testqa1 | nginx-qa.!{EOS_DNS_SEARCH} | testqa1 | 1 |
22 | | nginx-qa-testqa2 | nginx-qa.!{EOS_DNS_SEARCH} | testqa2 | 2 |
23 | | nginx-qa-testqa3 | nginx-qa.!{EOS_DNS_SEARCH} | testqa3 | 3 |
24 |
25 | Scenario Outline:[02] Check deployment for different services nginx
26 | Given I open a ssh connection to '${DCOS_CLI_HOST}' with user '${DCOS_CLI_USER}' and password '${DCOS_CLI_PASSWORD}'
27 | And in less than '100' seconds, checking each '10' seconds, the command output 'dcos task | grep -w '' | awk '{print $4}' | grep R | wc -l' contains '1' with exit status '0'
28 | And I run 'dcos marathon task list | grep -w /'' | grep True | awk '{print $5}'' in the ssh connection with exit status '0' and save the value in environment variable 'nginxTaskId'
29 | And in less than '100' seconds, checking each '10' seconds, the command output 'dcos marathon task show !{nginxTaskId} | jq -c 'select(.state=="TASK_RUNNING" and .healthCheckResults[].alive==true)' | wc -l' contains '1' with exit status '0'
30 | Examples:
31 | | id |
32 | | nginx-qa-testqa |
33 | | nginx-qa-testqa1 |
34 | | nginx-qa-testqa2 |
35 | | nginx-qa-testqa3 |
36 |
37 | Scenario:[03] Check rules in MarathonLB
38 | Given I open a ssh connection to '${DCOS_CLI_HOST}' with user '${DCOS_CLI_USER}' and password '${DCOS_CLI_PASSWORD}'
39 | And I run 'curl -XGET http://!{PUBLIC_NODE}:9090/_haproxy_getconfig' in the ssh connection with exit status '0' and save the value in environment variable 'haproxy_getConfig'
40 | And I run 'echo '!{haproxy_getConfig}' | grep -A12 'frontend marathon_http_in' > /tmp/rules.txt' locally
41 | And I run 'echo !{EOS_DNS_SEARCH} | sed 's/\./\_/g'' locally and save the value in environment variable 'dnsSearchParsed'
42 | And I run 'sed -i -e 's/_nginx-qa_labs_stratio_com/_nginx-qa_!{dnsSearchParsed}/g' -e 's/nginx-qa.labs.stratio.com/nginx-qa.!{EOS_DNS_SEARCH}/g' target/test-classes/schemas/marathonlb_http_rules.txt' locally
43 | And I run 'sed -i 's/nginx-qa.labs.stratio.com/nginx-qa.!{EOS_DNS_SEARCH}/g' target/test-classes/schemas/marathonlb_https_rules.txt' locally
44 | And I run 'diff target/test-classes/schemas/marathonlb_http_rules.txt /tmp/rules.txt' locally with exit status '0'
45 | And I run 'echo '!{haproxy_getConfig}' | grep -A9 'frontend marathon_https_in' > /tmp/rules.txt' locally
46 | And I run 'diff target/test-classes/schemas/marathonlb_https_rules.txt /tmp/rules.txt' locally with exit status '0'
47 | And I run 'rm -f /tmp/rules.txt' locally
48 |
49 | Scenario Outline:[04] Check deployment for different services nginx
50 | Given I open a ssh connection to '${DCOS_CLI_HOST}' with user '${DCOS_CLI_USER}' and password '${DCOS_CLI_PASSWORD}'
51 | And I run 'dcos marathon app remove ' in the ssh connection
52 | And I run 'dcos task | grep -w '' | awk '{print $4}' | grep R' in the ssh connection with exit status '1'
53 | Examples:
54 | | id |
55 | | nginx-qa-testqa |
56 | | nginx-qa-testqa1 |
57 | | nginx-qa-testqa2 |
58 | | nginx-qa-testqa3 |
59 |
--------------------------------------------------------------------------------
/kms_utils.py:
--------------------------------------------------------------------------------
1 | import common
2 | import json
3 | import logging
4 | import os
5 | import subprocess
6 | import time
7 |
8 | from dateutil import parser
9 |
10 | head_vault_hosts = 'OLD_IFS=${IFS};IFS=\',\' read -r -a VAULT_HOSTS <<< \"$STRING_VAULT_HOST\";IFS=${OLD_IFS};'
11 | source_kms_utils = '. /usr/sbin/kms_utils.sh;'
12 |
13 | global vault_token
14 | global vault_accessor
15 | global MAX_PERCENTAGE_EXPIRATION
16 |
17 | vault_token = os.getenv('VAULT_TOKEN', '')
18 | vault_accessor = os.getenv('ACCESSOR_TOKEN','')
19 | MIN_PERCENTAGE_EXPIRATION = 0.2
20 |
21 | logger = None
22 | def init_log():
23 | global logger
24 | logger = common.marathon_lb_logger.getChild('kms_utils.py')
25 |
26 | def login():
27 | global vault_token
28 | global vault_accessor
29 | resp,_ = exec_with_kms_utils('', 'login', 'echo "{\\\"vaulttoken\\\": \\\"$VAULT_TOKEN\\\",\\\"accessor\\\": \\\"$ACCESSOR_TOKEN\\\"}"')
30 | jsonVal = json.loads(resp.decode("utf-8"))
31 | vault_accessor = (jsonVal['accessor'])
32 | vault_token = (jsonVal['vaulttoken'])
33 |
34 | def get_cert(cluster, instance, fqdn, o_format, store_path):
35 | variables = ''.join(['export VAULT_TOKEN=', vault_token, ';'])
36 | command = ' '.join(['getCert', cluster, instance, fqdn, o_format, store_path])
37 | resp,returncode = exec_with_kms_utils(variables, command , '')
38 | logger.debug('get_cert for ' + instance + ' returned ' + str(returncode) + ' and ' + resp.decode("utf-8"))
39 |
40 | return returncode == 0
41 |
42 | def get_token_info():
43 | variables = ''.join(['export VAULT_TOKEN=', vault_token, ';', 'export ACCESSOR_TOKEN=', vault_accessor, ';'])
44 | command = 'token_info'
45 | resp,_ = exec_with_kms_utils(variables, command, '')
46 | respArr = resp.decode("utf-8").split(',')
47 | jsonValue = json.loads(','.join(respArr[1:]))
48 | logger.debug('status ' + respArr[0])
49 | logger.debug(jsonValue)
50 |
51 | return jsonValue
52 |
53 | def check_token_needs_renewal(force):
54 | jsonInfo = get_token_info()
55 | creationTime = jsonInfo['data']['creation_time']
56 |
57 | #Convert time as given from Vault to epoch time
58 | expire_time_vault = jsonInfo['data']['expire_time']
59 | expire_time = int(parser.parse(expire_time_vault).timestamp())
60 |
61 | ttl = jsonInfo['data']['ttl']
62 |
63 | lastRenewalTime = 0
64 | try:
65 | lastRenewalTime = jsonInfo['data']['last_renewal_time']
66 | except KeyError: pass
67 |
68 | if (lastRenewalTime > 0):
69 | percentage = ttl / (expire_time - lastRenewalTime)
70 | else:
71 | percentage = ttl / (expire_time - creationTime)
72 |
73 | logger.debug('Checked token expiration: percentage -> ' + str(percentage))
74 |
75 | if (percentage <= MIN_PERCENTAGE_EXPIRATION and percentage > 0):
76 | logger.info('Token about to expire... needs renewal')
77 | jsonInfo = renewal_token()
78 | lease_duration_vault = jsonInfo['auth']['lease_duration']
79 | expire_time = int(time.time()) + int(lease_duration_vault)
80 |
81 | elif (percentage <= 0):
82 | logger.info('Token expired!!')
83 | return False
84 | elif force:
85 | logger.info('Forced renewal')
86 | jsonInfo = renewal_token()
87 | lease_duration_vault = jsonInfo['auth']['lease_duration']
88 | expire_time = int(time.time()) + int(lease_duration_vault)
89 |
90 | #Write expire_time to file
91 | with open('/marathon-lb/token-status', 'w') as fd:
92 | fd.write(str(int(expire_time)))
93 |
94 | return True
95 |
96 | def renewal_token():
97 | variables = ''.join(['export VAULT_TOKEN=', vault_token, ';'])
98 | command = 'token_renewal'
99 | resp,_ = exec_with_kms_utils(variables, command, '')
100 | respArr = resp.decode("utf-8").split(',')
101 | # Due to kms_utils.sh issue, response could contain a spurious status_code as follows
102 | #
103 | # 000{request_response}
104 | #
105 | # This 000 spurious status code is caused by an empty parameter set by kms_utils.sh
106 | # which results in an additional curl to an empty URL.
107 | #
108 | # As fixing kms_utils.sh could generate strong side effects, we need to strip this
109 | # spurious response code from the request response here
110 | spurious_status_code = '000'
111 | if respArr[1].startswith(spurious_status_code):
112 | respArr[1] = respArr[1][len(spurious_status_code):]
113 | jsonValue = json.loads(','.join(respArr[1:]))
114 | logger.debug('status ' + respArr[0])
115 | logger.debug(jsonValue)
116 |
117 | return jsonValue
118 |
119 | def exec_with_kms_utils(variables, command, extra_command):
120 | logger.debug('>>> exec_with_kms_utils: [COMM:'+command+', VARS:'+variables+', EXTRA_COMM:'+extra_command+']')
121 | proc = subprocess.Popen(['bash', '-c', head_vault_hosts + variables + source_kms_utils + command + ';' + extra_command], shell=False, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
122 |
123 | try:
124 | resp,_ = proc.communicate(timeout=10)
125 | except subprocess.TimeoutExpired as e:
126 | proc.kill()
127 | raise e
128 |
129 | return resp, proc.returncode
130 |
131 |
--------------------------------------------------------------------------------
/testsAT/pom.xml:
--------------------------------------------------------------------------------
1 |
2 |
5 | 4.0.0
6 |
7 |
8 | 0.18.0
9 | UTF-8
10 | UTF-8
11 | 4.1.1
12 |
13 |
14 |
15 | com.stratio
16 | parent
17 | 0.11.0
18 |
19 |
20 | com.stratio.paas
21 | marathon-lb-sec-AT
22 | 0.6.0-SNAPSHOT
23 | marathon-lb-sec-AT
24 |
25 |
26 |
27 |
28 | org.apache.maven.plugins
29 | maven-compiler-plugin
30 |
31 | 1.7
32 | 1.7
33 |
34 |
35 |
36 | org.apache.maven.plugins
37 | maven-surefire-plugin
38 |
39 | true
40 | true
41 |
42 |
43 |
44 | net.masterthought
45 | maven-cucumber-reporting
46 | ${cucumber-reporting.version}
47 |
48 |
49 | execution
50 | verify
51 |
52 | generate
53 |
54 |
55 | marathon-lb-sec-AT
56 | ${project.build.directory}
57 | ${project.build.directory}
58 |
59 | cucumber.json
60 |
61 | ${project.build.directory}/
62 |
63 | notused.properties
64 |
65 | false
66 |
67 |
68 |
69 |
70 |
71 | org.apache.maven.plugins
72 | maven-failsafe-plugin
73 |
74 |
75 |
76 | usedefaultlisteners
77 | false
78 |
79 |
80 | listener
81 | com.stratio.qa.utils.JaCoCoClient
82 |
83 |
84 | -Xmx1024m -XX:+HeapDumpOnOutOfMemoryError
85 | -javaagent:${settings.localRepository}/org/aspectj/aspectjweaver/1.8.8/aspectjweaver-1.8.8.jar
86 | true
87 |
88 |
89 |
90 |
91 |
92 |
93 |
94 | com.stratio.qa
95 | bdt
96 | ${stratio-test-bdd.version}
97 |
98 |
99 | org.apache.logging.log4j
100 | log4j-slf4j-impl
101 | 2.0
102 |
103 |
104 | org.apache.logging.log4j
105 | log4j-core
106 | 2.0
107 |
108 |
109 | org.apache.logging.log4j
110 | log4j-1.2-api
111 | 2.0
112 |
113 |
114 | org.testng
115 | testng
116 | 6.9.9
117 |
118 |
119 |
120 |
121 |
--------------------------------------------------------------------------------
/testsAT/src/test/resources/features/functionalAT/020_Certificates/01_MARATHONLB_1386_AppCertificate.feature:
--------------------------------------------------------------------------------
1 | @dcos
2 | @mandatory(BOOTSTRAP_IP,REMOTE_USER,PEM_FILE_PATH,EOS_INSTALLER_VERSION,DCOS_PASSWORD,DCOS_CLI_HOST,DCOS_CLI_USER,DCOS_CLI_PASSWORD)
3 | Feature:[MARATHONLB-1386] Deploying marathon-lb-sec with an nginx certificate
4 |
5 | @runOnEnv(EOS_INSTALLER_VERSION<0.22.11)
6 | Scenario:[01a] Preparing files
7 | Then I open a ssh connection to '${BOOTSTRAP_IP}' in port '${EOS_NEW_SSH_PORT:-22}' with user '${REMOTE_USER}' using pem file '${PEM_FILE_PATH}'
8 | And I outbound copy 'src/test/resources/scripts/marathon-lb-app-certs.sh' through a ssh connection to '/tmp'
9 | And I run 'cp /stratio_volume/certs.list certs_custom_app_marathonlb.list' in the ssh connection
10 |
11 | @runOnEnv(EOS_INSTALLER_VERSION=0.22.11||EOS_INSTALLER_VERSION>0.22.11)
12 | Scenario:[01b] Preparing files
13 | Then I open a ssh connection to '${BOOTSTRAP_IP}' in port '${EOS_NEW_SSH_PORT:-22}' with user '${REMOTE_USER}' using pem file '${PEM_FILE_PATH}'
14 | And I run 'sudo touch /stratio_volume/certs.list' in the ssh connection
15 | And I outbound copy 'src/test/resources/scripts/marathon-lb-app-certs.sh' through a ssh connection to '/tmp'
16 | And I run 'sed -i s/"DNS:nginx-qa.labs.stratio.com"/"DNS:nginx-qa.!{EOS_DNS_SEARCH}"/g /tmp/marathon-lb-app-certs.sh' in the ssh connection
17 | And I run 'cp /stratio_volume/certs.list certs_custom_app_marathonlb.list' in the ssh connection
18 |
19 | Scenario:[02] Deploying marathon-lb-sec with an nginx certificate
20 | Given I run 'sudo cp /etc/hosts /tmp/hostbackup' locally
21 | And I run 'cat /etc/hosts | grep nginx-qa.!{EOS_DNS_SEARCH} || echo "!{PUBLIC_NODE} nginx-qa.!{EOS_DNS_SEARCH}" | sudo tee -a /etc/hosts' locally
22 | Then I open a ssh connection to '${BOOTSTRAP_IP}' in port '${EOS_NEW_SSH_PORT:-22}' with user '${REMOTE_USER}' using pem file '${PEM_FILE_PATH}'
23 | And I run 'cd /tmp && sudo chmod +x marathon-lb-app-certs.sh' in the ssh connection
24 | And I run 'sudo mv /tmp/marathon-lb-app-certs.sh /stratio_volume/marathon-lb-app-certs.sh' in the ssh connection
25 | And I run 'sudo docker ps | grep eos-installer | awk '{print $1}'' in the ssh connection and save the value in environment variable 'containerId'
26 | And I run 'sudo docker exec -t !{containerId} /stratio_volume/marathon-lb-app-certs.sh' in the ssh connection
27 | And I wait '60' seconds
28 | And I open a ssh connection to '${DCOS_CLI_HOST}' with user '${DCOS_CLI_USER}' and password '${DCOS_CLI_PASSWORD}'
29 | And I outbound copy 'src/test/resources/schemas/nginx-qa-config.json' through a ssh connection to '/tmp'
30 | And I run 'sed -i -e 's/qa.stratio.com/!{EXTERNAL_DOCKER_REGISTRY}/g' -e '/"HAPROXY_0_PATH": null,/d' -e 's/"HAPROXY_0_VHOST": "nginx-qa.labs.stratio.com"/"HAPROXY_0_VHOST": "nginx-qa.!{EOS_DNS_SEARCH}"/g' /tmp/nginx-qa-config.json ; dcos marathon app add /tmp/nginx-qa-config.json ; rm -f /tmp/nginx-qa-config.json' in the ssh connection
31 | Then in less than '300' seconds, checking each '20' seconds, the command output 'dcos task | grep nginx-qa | grep R | wc -l' contains '1'
32 | When I run 'dcos task | grep marathonlb | tail -1 | awk '{print $5}'' in the ssh connection and save the value in environment variable 'TaskID'
33 | And I run 'dcos marathon task list nginx-qa | awk '{print $5}' | grep nginx-qa' in the ssh connection and save the value in environment variable 'nginx-qaTaskId'
34 | Then in less than '300' seconds, checking each '10' seconds, the command output 'dcos marathon task show !{nginx-qaTaskId} | grep TASK_RUNNING | wc -l' contains '1'
35 | Then in less than '300' seconds, checking each '10' seconds, the command output 'dcos marathon task show !{nginx-qaTaskId} | grep healthCheckResults | wc -l' contains '1'
36 | Then in less than '300' seconds, checking each '10' seconds, the command output 'dcos marathon task show !{nginx-qaTaskId} | grep '"alive": true' | wc -l' contains '1'
37 | Then in less than '300' seconds, checking each '10' seconds, the command output 'dcos task log --lines 500 !{TaskID} 2>/dev/null | grep 'Downloaded certificate nginx-qa.pem' | wc -l' contains '1'
38 | Given I run 'dcos marathon app remove --force nginx-qa' in the ssh connection
39 | Then in less than '300' seconds, checking each '10' seconds, the command output 'dcos task | awk '{print $1}' | grep -c nginx-qa' contains '0'
40 | And in less than '300' seconds, checking each '10' seconds, the command output 'dcos task log --lines 500 !{TaskID} 2>/dev/null | grep 'Deleted certificate nginx-qa.pem' | wc -l' contains '1'
41 | And I run 'sudo cp /tmp/hostbackup /etc/hosts' locally
42 |
43 | @runOnEnv(EOS_INSTALLER_VERSION<0.22.11)
44 | Scenario:[03a] Deleting files
45 | Then I open a ssh connection to '${BOOTSTRAP_IP}' in port '${EOS_NEW_SSH_PORT:-22}' with user '${REMOTE_USER}' using pem file '${PEM_FILE_PATH}'
46 | And I run 'sudo rm -rf /stratio_volume/certs_custom_app_marathonlb.list ; sudo rm -rf /stratio_volume/marathon-lb-app-certs.sh' in the ssh connection
47 |
48 | @runOnEnv(EOS_INSTALLER_VERSION=0.22.11||EOS_INSTALLER_VERSION>0.22.11)
49 | Scenario:[03b] Deleting files
50 | Then I open a ssh connection to '${BOOTSTRAP_IP}' in port '${EOS_NEW_SSH_PORT:-22}' with user '${REMOTE_USER}' using pem file '${PEM_FILE_PATH}'
51 | And I run 'sudo rm -rf /stratio_volume/certs.list ; sudo rm -rf /stratio_volume/certs_custom_app_marathonlb.list ; sudo rm -rf /stratio_volume/marathon-lb-app-certs.sh' in the ssh connection
--------------------------------------------------------------------------------
/testsAT/src/test/resources/features/functionalAT/020_Certificates/03_QATM_2113_Check_Invalid_AppCertificate.feature:
--------------------------------------------------------------------------------
1 | @rest @dcos
2 | @mandatory(BOOTSTRAP_IP,REMOTE_USER,PEM_FILE_PATH,EOS_INSTALLER_VERSION,DCOS_PASSWORD,DCOS_CLI_HOST,DCOS_CLI_USER,DCOS_CLI_PASSWORD)
3 | Feature:[QATM-2113] Download certificates only of new deployed apps - Invalid certs for nginx-qa
4 |
5 | @runOnEnv(EOS_INSTALLER_VERSION<0.22.11)
6 | Scenario:[01a] Preparing files
7 | Then I open a ssh connection to '${BOOTSTRAP_IP}' in port '${EOS_NEW_SSH_PORT:-22}' with user '${REMOTE_USER}' using pem file '${PEM_FILE_PATH}'
8 | Then I outbound copy 'src/test/resources/scripts/nginx-qa-invalid-certs.sh' through a ssh connection to '/tmp'
9 | And I run 'cp /stratio_volume/certs.list certs_custom_app_nginxqa.list' in the ssh connection
10 |
11 | @runOnEnv(EOS_INSTALLER_VERSION=0.22.11||EOS_INSTALLER_VERSION>0.22.11)
12 | Scenario:[01b] Preparing files
13 | Then I open a ssh connection to '${BOOTSTRAP_IP}' in port '${EOS_NEW_SSH_PORT:-22}' with user '${REMOTE_USER}' using pem file '${PEM_FILE_PATH}'
14 | And I run 'sudo touch /stratio_volume/certs.list' in the ssh connection
15 | Then I outbound copy 'src/test/resources/scripts/nginx-qa-invalid-certs.sh' through a ssh connection to '/tmp'
16 | And I run 'cp /stratio_volume/certs.list certs_custom_app_nginxqa.list' in the ssh connection
17 |
18 | Scenario:[02] Delete valid nginx-qa certificate in vcli
19 | Then I open a ssh connection to '${BOOTSTRAP_IP}' in port '${EOS_NEW_SSH_PORT:-22}' with user '${REMOTE_USER}' using pem file '${PEM_FILE_PATH}'
20 | And I run 'cd /tmp && sudo chmod +x nginx-qa-invalid-certs.sh' in the ssh connection
21 | And I run 'sudo mv /tmp/nginx-qa-invalid-certs.sh /stratio_volume/nginx-qa-invalid-certs.sh' in the ssh connection
22 | And I run 'sudo docker exec -t paas-bootstrap /stratio_volume/nginx-qa-invalid-certs.sh' in the ssh connection
23 |
24 | Scenario:[03] Deploying nginx-qa without valid certificate
25 | Given I open a ssh connection to '${DCOS_CLI_HOST}' with user '${DCOS_CLI_USER}' and password '${DCOS_CLI_PASSWORD}'
26 | And I outbound copy 'src/test/resources/schemas/nginx-qa-config.json' through a ssh connection to '/tmp'
27 | And I run 'sed -i -e 's/qa.stratio.com/!{EXTERNAL_DOCKER_REGISTRY}/g' -e '/"HAPROXY_0_PATH": null,/d' /tmp/nginx-qa-config.json ; dcos marathon app add /tmp/nginx-qa-config.json ; rm -f /tmp/nginx-qa-config.json' in the ssh connection
28 | Then in less than '300' seconds, checking each '10' seconds, the command output 'dcos task | grep nginx-qa | grep R | wc -l' contains '1'
29 | When I run 'dcos task | grep marathon.*lb.* | tail -1 | awk '{print $5}'' in the ssh connection and save the value in environment variable 'TaskID'
30 | And I run 'dcos marathon task list nginx-qa | awk '{print $5}' | grep nginx-qa' in the ssh connection and save the value in environment variable 'nginx-qaTaskId'
31 | Then in less than '300' seconds, checking each '10' seconds, the command output 'dcos marathon task show !{nginx-qaTaskId} | grep TASK_RUNNING | wc -l' contains '1'
32 | Then in less than '300' seconds, checking each '10' seconds, the command output 'dcos marathon task show !{nginx-qaTaskId} | grep healthCheckResults | wc -l' contains '1'
33 | Then in less than '300' seconds, checking each '10' seconds, the command output 'dcos marathon task show !{nginx-qaTaskId} | grep '"alive": true' | wc -l' contains '1'
34 | Then in less than '300' seconds, checking each '10' seconds, the command output 'dcos task log --lines 500 !{TaskID} 2>/dev/null | grep 'Does not exists certificate for /nginx-qa' | wc -l' contains '1'
35 |
36 | Scenario:[04] Uninstall nginx-qa
37 | Given I open a ssh connection to '${DCOS_CLI_HOST}' with user '${DCOS_CLI_USER}' and password '${DCOS_CLI_PASSWORD}'
38 | Given I run 'dcos marathon app remove --force nginx-qa' in the ssh connection
39 | Then in less than '300' seconds, checking each '10' seconds, the command output 'dcos task | awk '{print $1}' | grep -c nginx-qa' contains '0'
40 |
41 | Scenario:[05] Restore Certificates for nginx-qa and remove files
42 | Then I open a ssh connection to '${BOOTSTRAP_IP}' in port '${EOS_NEW_SSH_PORT:-22}' with user '${REMOTE_USER}' using pem file '${PEM_FILE_PATH}'
43 | Then I outbound copy 'src/test/resources/scripts/nginx-qa-restore-certs.sh' through a ssh connection to '/tmp'
44 | And I run 'sed -i s/"DNS:nginx-qa.labs.stratio.com"/"DNS:nginx-qa.!{EOS_DNS_SEARCH}"/g /tmp/nginx-qa-restore-certs.sh' in the ssh connection
45 | And I run 'cp /stratio_volume/certs.list certs_custom_app_nginxqa.list' in the ssh connection
46 | And I run 'cd /tmp && sudo chmod +x nginx-qa-restore-certs.sh' in the ssh connection
47 | And I run 'sudo mv /tmp/nginx-qa-restore-certs.sh /stratio_volume/nginx-qa-restore-certs.sh' in the ssh connection
48 | And I run 'sudo docker exec -t paas-bootstrap /stratio_volume/nginx-qa-restore-certs.sh' in the ssh connection
49 | And I run 'sudo rm -rf /stratio_volume/nginx-qa-cert-backup.json ; sudo rm -rf /stratio_volume/certs_restore_nginx-qa.list ; sudo rm -rf /stratio_volume/nginx-qa-restore-certs.sh ; sudo rm -rf /tmp/nginx-qa-restore-certs.sh' in the ssh connection
50 |
51 | @runOnEnv(EOS_INSTALLER_VERSION=0.22.11||EOS_INSTALLER_VERSION>0.22.11)
52 | Scenario:[06] Remove files
53 | Then I open a ssh connection to '${BOOTSTRAP_IP}' in port '${EOS_NEW_SSH_PORT:-22}' with user '${REMOTE_USER}' using pem file '${PEM_FILE_PATH}'
54 | And I run 'sudo rm -rf /stratio_volume/certs.list ; sudo rm -rf /stratio_volume/nginx-qa-invalid-certs.sh ' in the ssh connection
--------------------------------------------------------------------------------
/tests/haproxy_stats.csv:
--------------------------------------------------------------------------------
1 | # pxname,svname,qcur,qmax,scur,smax,slim,stot,bin,bout,dreq,dresp,ereq,econ,eresp,wretr,wredis,status,weight,act,bck,chkfail,chkdown,lastchg,downtime,qlimit,pid,iid,sid,throttle,lbtot,tracked,type,rate,rate_lim,rate_max,check_status,check_code,check_duration,hrsp_1xx,hrsp_2xx,hrsp_3xx,hrsp_4xx,hrsp_5xx,hrsp_other,hanafail,req_rate,req_rate_max,req_tot,cli_abrt,srv_abrt,comp_in,comp_out,comp_byp,comp_rsp,lastsess,last_chk,last_agt,qtime,ctime,rtime,ttime,
2 | http-in,FRONTEND,,,1,100,100,1956342,548244383,42792606510,11265,0,22992,,,,,OPEN,,,,,,,,,1,2,0,,,,0,1,0,132,,,,0,1487508,513463,121619,56028,10152,,1,207,2178698,,,26454652356,12904138260,50936,460937,,,,,,,,
3 | http-in,IPv4-direct,,,1,98,100,534002,87052338,1878877542,10928,0,2,,,,,OPEN,,,,,,,,,1,2,1,,,,3,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,
4 | http-out,IPv4-cached,,,0,32,100,996580,350233992,35609409013,72,0,0,,,,,OPEN,,,,,,,,,1,2,2,,,,3,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,
5 | http-in,10_0_6_25_16916,,,0,30,100,325347,100108803,5248781681,57,0,22990,,,,,MAINT,,,,,,,,,1,2,3,,,,3,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,
6 | http-in,10_0_6_25_31184,,,0,30,100,325347,100108803,5248781681,57,0,22990,,,,,UP,,,,,,,,,1,2,3,,,,3,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,
7 | http-in,10_0_6_25_23336,,,0,30,100,325347,100108803,5248781681,57,0,22990,,,,,MAINT,,,,,,,,,1,2,3,,,,3,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,
8 | foobar_8080,local,,,0,0,100,0,0,0,0,0,0,,,,,OPEN,,,,,,,,,1,2,4,,,,3,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,
9 | http-in,local-https,,,0,5,100,105433,10849250,55538274,208,0,0,,,,,OPEN,,,,,,,,,1,2,5,,,,3,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,
10 | www,www,0,0,0,20,20,1343634,440969164,41418239264,,0,,0,163,27,0,UP,1,1,0,15,2,1942980,243,,1,3,1,,1342257,,2,0,,207,L7OK,200,3,19,1060778,131614,75605,4,0,0,,,,233543,663,,,,,3,OK,,0,1,16,1126,
11 | www,bck,0,0,0,3,10,73,26857,1961294,,0,,0,0,0,0,UP,1,0,1,2,1,2949913,6,,1,3,2,,71,,2,0,,11,L7OK,200,2,0,29,13,31,0,0,0,,,,2,0,,,,,1942983,OK,,33,1,3,289,
12 | www,BACKEND,0,37,0,58,100,1345945,442029652,41420675530,58,0,,12,163,27,0,UP,1,1,1,,1,2949913,6,,1,3,0,,1342328,,1,0,,207,,,,0,1060807,131627,97412,56023,74,,,,,233566,663,26041561563,12793472977,50936,445329,3,,,0,1,16,1126,
13 | git,www,0,0,0,2,2,9388,4296645,114593316,,0,,0,0,1,0,UP,1,1,0,14,2,1942980,245,,1,4,1,,7921,,2,0,,2,L7OK,200,3,0,3565,5721,101,0,0,0,,,,208,1,,,,,267,OK,,1237,1,2460,4133,
14 | git,bck,0,0,0,0,2,0,0,0,,0,,0,0,0,0,UP,1,0,1,2,0,2949920,0,,1,4,2,,0,,2,0,,0,L7OK,200,2,0,0,0,0,0,0,0,,,,0,0,,,,,-1,OK,,0,0,0,0,
15 | git,BACKEND,0,8,0,12,2,9387,5117860,114593316,0,0,,0,0,1,0,UP,1,1,1,,0,2949920,0,,1,4,0,,7921,,1,0,,6,,,,0,3565,5721,101,0,0,,,,,208,1,62048839,19144902,0,2647,267,,,1237,1,2460,4133,
16 | demo,BACKEND,0,0,1,94,20,227681,27090709,1170845273,0,0,,5,0,0,0,UP,0,0,0,,0,2949920,0,,1,17,0,,0,,1,1,,40,,,,0,227392,280,0,5,3,,,,,45,0,351041954,91520381,0,12961,0,,,2,0,0,44,
17 | # pxname,svname,qcur,qmax,scur,smax,slim,stot,bin,bout,dreq,dresp,ereq,econ,eresp,wretr,wredis,status,weight,act,bck,chkfail,chkdown,lastchg,downtime,qlimit,pid,iid,sid,throttle,lbtot,tracked,type,rate,rate_lim,rate_max,check_status,check_code,check_duration,hrsp_1xx,hrsp_2xx,hrsp_3xx,hrsp_4xx,hrsp_5xx,hrsp_other,hanafail,req_rate,req_rate_max,req_tot,cli_abrt,srv_abrt,comp_in,comp_out,comp_byp,comp_rsp,lastsess,last_chk,last_agt,qtime,ctime,rtime,ttime,
18 | http-in,FRONTEND,,,1,100,100,1956342,548244383,42792606510,11265,0,22992,,,,,OPEN,,,,,,,,,1,2,0,,,,0,1,0,132,,,,0,1487508,513463,121619,56028,10152,,1,207,2178698,,,26454652356,12904138260,50936,460937,,,,,,,,
19 | http-in,IPv4-direct,,,1,98,100,534002,87052338,1878877542,10928,0,2,,,,,OPEN,,,,,,,,,1,2,1,,,,3,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,
20 | http-in,IPv4-cached,,,0,32,100,996580,350233992,35609409013,72,0,0,,,,,OPEN,,,,,,,,,1,2,2,,,,3,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,
21 | http-in,IPv6-direct,,,0,30,100,325347,100108803,5248781681,57,0,22990,,,,,OPEN,,,,,,,,,1,2,3,,,,3,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,
22 | http-in,local,,,0,0,100,0,0,0,0,0,0,,,,,OPEN,,,,,,,,,1,2,4,,,,3,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,
23 | http-in,local-https,,,0,5,100,105433,10849250,55538274,208,0,0,,,,,OPEN,,,,,,,,,1,2,5,,,,3,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,
24 | www,www,0,0,0,20,20,1343634,440969164,41418239264,,0,,0,163,27,0,UP,1,1,0,15,2,1942980,243,,1,3,1,,1342257,,2,0,,207,L7OK,200,3,19,1060778,131614,75605,4,0,0,,,,233543,663,,,,,3,OK,,0,1,16,1126,
25 | www,bck,0,0,0,3,10,73,26857,1961294,,0,,0,0,0,0,UP,1,0,1,2,1,2949913,6,,1,3,2,,71,,2,0,,11,L7OK,200,2,0,29,13,31,0,0,0,,,,2,0,,,,,1942983,OK,,33,1,3,289,
26 | www,BACKEND,0,37,0,58,100,1345945,442029652,41420675530,58,0,,12,163,27,0,UP,1,1,1,,1,2949913,6,,1,3,0,,1342328,,1,0,,207,,,,0,1060807,131627,97412,56023,74,,,,,233566,663,26041561563,12793472977,50936,445329,3,,,0,1,16,1126,
27 | git,www,0,0,0,2,2,9388,4296645,114593316,,0,,0,0,1,0,UP,1,1,0,14,2,1942980,245,,1,4,1,,7921,,2,0,,2,L7OK,200,3,0,3565,5721,101,0,0,0,,,,208,1,,,,,267,OK,,1237,1,2460,4133,
28 | git,bck,0,0,0,0,2,0,0,0,,0,,0,0,0,0,UP,1,0,1,2,0,2949920,0,,1,4,2,,0,,2,0,,0,L7OK,200,2,0,0,0,0,0,0,0,,,,0,0,,,,,-1,OK,,0,0,0,0,
29 | git,BACKEND,0,8,0,12,2,9387,5117860,114593316,0,0,,0,0,1,0,UP,1,1,1,,0,2949920,0,,1,4,0,,7921,,1,0,,6,,,,0,3565,5721,101,0,0,,,,,208,1,62048839,19144902,0,2647,267,,,1237,1,2460,4133,
30 | demo,BACKEND,0,0,1,94,20,227681,27090709,1170845273,0,0,,5,0,0,0,UP,0,0,0,,0,2949920,0,,1,17,0,,0,,1,1,,40,,,,0,227392,280,0,5,3,,,,,45,0,351041954,91520381,0,12961,0,,,2,0,0,44,
31 | http-in,10_0_6_25_31184,,,0,30,100,325347,100108803,5248781681,57,0,22990,,,,,MAINT,,,,,,,,,1,2,3,,,,3,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,
32 | http-in,10_0_6_25_16916,,,0,30,100,325347,100108803,5248781681,57,0,22990,,,,,MAINT,,,,,,,,,1,2,3,,,,3,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,
33 | http-in,10_0_6_25_23336,1,2,0,30,100,325347,100108803,5248781681,57,0,22990,,,,,MAINT,,,,,,,,,1,2,3,,,,3,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,
34 |
--------------------------------------------------------------------------------
/tests/zdd_app_blue.json:
--------------------------------------------------------------------------------
1 | {
2 | "apps":[
3 | {
4 | "id":"/nginx-green",
5 | "cmd":null,
6 | "args":null,
7 | "user":null,
8 | "env":{
9 |
10 | },
11 | "instances":3,
12 | "cpus":0.1,
13 | "mem":65,
14 | "disk":0,
15 | "executor":"",
16 | "constraints":[
17 |
18 | ],
19 | "uris":[
20 |
21 | ],
22 | "storeUrls":[
23 |
24 | ],
25 | "ports":[
26 | 10000
27 | ],
28 | "requirePorts":false,
29 | "backoffSeconds":1,
30 | "backoffFactor":1.15,
31 | "maxLaunchDelaySeconds":3600,
32 | "container":{
33 | "type":"DOCKER",
34 | "volumes":[
35 |
36 | ],
37 | "docker":{
38 | "image":"brndnmtthws/nginx-echo-sleep",
39 | "network":"BRIDGE",
40 | "portMappings":[
41 | {
42 | "containerPort":8080,
43 | "hostPort":0,
44 | "servicePort":10000,
45 | "protocol":"tcp"
46 | }
47 | ],
48 | "privileged":false,
49 | "parameters":[
50 |
51 | ],
52 | "forcePullImage":true
53 | }
54 | },
55 | "healthChecks":[
56 | {
57 | "path":"/",
58 | "protocol":"HTTP",
59 | "portIndex":0,
60 | "gracePeriodSeconds":15,
61 | "intervalSeconds":3,
62 | "timeoutSeconds":15,
63 | "maxConsecutiveFailures":10
64 | }
65 | ],
66 | "dependencies":[
67 |
68 | ],
69 | "upgradeStrategy":{
70 | "minimumHealthCapacity":1,
71 | "maximumOverCapacity":1
72 | },
73 | "labels":{
74 | "HAPROXY_DEPLOYMENT_GROUP":"nginx",
75 | "HAPROXY_GROUP":"external",
76 | "HAPROXY_DEPLOYMENT_COLOUR":"green",
77 | "HAPROXY_DEPLOYMENT_TARGET_INSTANCES":"20",
78 | "HAPROXY_DEPLOYMENT_STARTED_AT":"2016-02-01T14:13:42.499089",
79 | "HAPROXY_DEPLOYMENT_ALT_PORT":"10001",
80 | "HAPROXY_0_PORT":"10000",
81 | "HAPROXY_APP_ID":"nginx"
82 | },
83 | "acceptedResourceRoles":[
84 | "*",
85 | "slave_public"
86 | ],
87 | "ipAddress":null,
88 | "version":"2016-02-01T22:57:48.784Z",
89 | "versionInfo":{
90 | "lastScalingAt":"2016-02-01T22:57:48.784Z",
91 | "lastConfigChangeAt":"2016-02-01T22:13:42.538Z"
92 | },
93 | "tasksStaged":0,
94 | "tasksRunning":3,
95 | "tasksHealthy":3,
96 | "tasksUnhealthy":0,
97 | "deployments":[
98 |
99 | ],
100 | "tasks":[
101 | {
102 | "id":"nginx-green.1883a887-c931-11e5-9836-026ee08d9f6f",
103 | "host":"10.0.6.25",
104 | "ipAddresses":[
105 | {
106 | "ipAddress":"172.17.1.72",
107 | "protocol":"IPv4"
108 | }
109 | ],
110 | "ports":[
111 | 16916
112 | ],
113 | "startedAt":"2016-02-01T22:14:03.432Z",
114 | "stagedAt":"2016-02-01T22:14:00.653Z",
115 | "version":"2016-02-01T22:14:00.625Z",
116 | "slaveId":"1f09399d-0cb0-4484-afd7-f08e99d2acbb-S1",
117 | "appId":"/nginx-green",
118 | "healthCheckResults":[
119 | {
120 | "alive":true,
121 | "consecutiveFailures":0,
122 | "firstSuccess":"2016-02-01T22:14:08.666Z",
123 | "lastFailure":null,
124 | "lastSuccess":"2016-02-01T23:10:30.986Z",
125 | "taskId":"nginx-green.1883a887-c931-11e5-9836-026ee08d9f6f"
126 | }
127 | ]
128 | },
129 | {
130 | "id":"nginx-green.0dbabba6-c931-11e5-9836-026ee08d9f6f",
131 | "host":"10.0.6.25",
132 | "ipAddresses":[
133 | {
134 | "ipAddress":"172.17.1.71",
135 | "protocol":"IPv4"
136 | }
137 | ],
138 | "ports":[
139 | 31184
140 | ],
141 | "startedAt":"2016-02-01T22:13:44.850Z",
142 | "stagedAt":"2016-02-01T22:13:42.559Z",
143 | "version":"2016-02-01T22:13:42.538Z",
144 | "slaveId":"1f09399d-0cb0-4484-afd7-f08e99d2acbb-S1",
145 | "appId":"/nginx-green",
146 | "healthCheckResults":[
147 | {
148 | "alive":true,
149 | "consecutiveFailures":0,
150 | "firstSuccess":"2016-02-01T22:13:50.565Z",
151 | "lastFailure":null,
152 | "lastSuccess":"2016-02-01T23:10:31.005Z",
153 | "taskId":"nginx-green.0dbabba6-c931-11e5-9836-026ee08d9f6f"
154 | }
155 | ]
156 | },
157 | {
158 | "id":"nginx-green.23532518-c931-11e5-9836-026ee08d9f6f",
159 | "host":"10.0.6.25",
160 | "ports":[
161 | 23336
162 | ],
163 | "startedAt":"2016-02-01T22:14:21.314Z",
164 | "stagedAt":"2016-02-01T22:14:18.790Z",
165 | "version":"2016-02-01T22:14:18.768Z",
166 | "slaveId":"1f09399d-0cb0-4484-afd7-f08e99d2acbb-S1",
167 | "appId":"/nginx-green",
168 | "healthCheckResults":[
169 | {
170 | "alive":true,
171 | "consecutiveFailures":0,
172 | "firstSuccess":"2016-02-01T22:14:26.805Z",
173 | "lastFailure":null,
174 | "lastSuccess":"2016-02-01T23:10:30.986Z",
175 | "taskId":"nginx-green.23532518-c931-11e5-9836-026ee08d9f6f"
176 | }
177 | ]
178 | }
179 | ]
180 | }
181 | ]
182 | }
183 |
--------------------------------------------------------------------------------
/tests/zdd_app_blue_marathon1.5.json:
--------------------------------------------------------------------------------
1 | {
2 | "apps": [
3 | {
4 | "id": "/nginx-green",
5 | "acceptedResourceRoles": [
6 | "*",
7 | "slave_public"
8 | ],
9 | "backoffFactor": 1.15,
10 | "backoffSeconds": 1,
11 | "container": {
12 | "type": "DOCKER",
13 | "docker": {
14 | "forcePullImage": true,
15 | "image": "brndnmtthws/nginx-echo-sleep",
16 | "parameters": [],
17 | "privileged": false
18 | },
19 | "volumes": [],
20 | "portMappings": [
21 | {
22 | "containerPort": 8080,
23 | "hostPort": 0,
24 | "labels": {},
25 | "protocol": "tcp",
26 | "servicePort": 10000
27 | }
28 | ]
29 | },
30 | "cpus": 0.1,
31 | "disk": 0,
32 | "executor": "",
33 | "healthChecks": [
34 | {
35 | "gracePeriodSeconds": 15,
36 | "intervalSeconds": 3,
37 | "maxConsecutiveFailures": 10,
38 | "path": "/",
39 | "portIndex": 0,
40 | "protocol": "MESOS_HTTP",
41 | "timeoutSeconds": 15,
42 | "delaySeconds": 15
43 | }
44 | ],
45 | "instances": 3,
46 | "labels": {
47 | "HAPROXY_DEPLOYMENT_GROUP": "nginx",
48 | "HAPROXY_GROUP": "external",
49 | "HAPROXY_DEPLOYMENT_COLOUR": "green",
50 | "HAPROXY_DEPLOYMENT_TARGET_INSTANCES": "20",
51 | "HAPROXY_DEPLOYMENT_STARTED_AT": "2016-02-01T14:13:42.499089",
52 | "HAPROXY_DEPLOYMENT_ALT_PORT": "10001",
53 | "HAPROXY_0_PORT": "10000",
54 | "HAPROXY_APP_ID": "nginx"
55 | },
56 | "maxLaunchDelaySeconds": 3600,
57 | "mem": 65,
58 | "gpus": 0,
59 | "networks": [
60 | {
61 | "mode": "container/bridge"
62 | }
63 | ],
64 | "requirePorts": false,
65 | "upgradeStrategy": {
66 | "maximumOverCapacity": 1,
67 | "minimumHealthCapacity": 1
68 | },
69 | "version": "2017-07-14T15:13:34.402Z",
70 | "versionInfo": {
71 | "lastScalingAt": "2017-07-14T15:13:34.402Z",
72 | "lastConfigChangeAt": "2017-07-14T15:13:34.402Z"
73 | },
74 | "killSelection": "YOUNGEST_FIRST",
75 | "unreachableStrategy": {
76 | "inactiveAfterSeconds": 300,
77 | "expungeAfterSeconds": 600
78 | },
79 | "tasksStaged": 0,
80 | "tasksRunning": 3,
81 | "tasksHealthy": 3,
82 | "tasksUnhealthy": 0,
83 | "deployments": [],
84 | "tasks": [
85 | {
86 | "ipAddresses": [
87 | {
88 | "ipAddress": "172.17.0.2",
89 | "protocol": "IPv4"
90 | }
91 | ],
92 | "stagedAt": "2017-07-14T15:13:35.804Z",
93 | "state": "TASK_RUNNING",
94 | "ports": [
95 | 16916
96 | ],
97 | "startedAt": "2017-07-14T15:13:58.326Z",
98 | "version": "2017-07-14T15:13:34.402Z",
99 | "id": "nginx-green.01da9a15-68a7-11e7-a229-e6a514ca0c21",
100 | "appId": "/nginx-green",
101 | "slaveId": "c28a2184-c702-482c-91c8-5af9318434d4-S0",
102 | "host": "10.0.6.25",
103 | "healthCheckResults": [
104 | {
105 | "alive": true,
106 | "consecutiveFailures": 0,
107 | "firstSuccess": "2017-07-14T15:14:18.395Z",
108 | "lastFailure": null,
109 | "lastSuccess": "2017-07-14T15:14:18.395Z",
110 | "lastFailureCause": null,
111 | "instanceId": "nginx-green.marathon-01da9a15-68a7-11e7-a229-e6a514ca0c21"
112 | }
113 | ]
114 | },
115 | {
116 | "ipAddresses": [
117 | {
118 | "ipAddress": "172.17.0.2",
119 | "protocol": "IPv4"
120 | }
121 | ],
122 | "stagedAt": "2017-07-14T15:13:35.816Z",
123 | "state": "TASK_RUNNING",
124 | "ports": [
125 | 31184
126 | ],
127 | "startedAt": "2017-07-14T15:13:57.633Z",
128 | "version": "2017-07-14T15:13:34.402Z",
129 | "id": "nginx-green.01dc95e6-68a7-11e7-a229-e6a514ca0c21",
130 | "appId": "/nginx-green",
131 | "slaveId": "c28a2184-c702-482c-91c8-5af9318434d4-S1",
132 | "host": "10.0.6.25",
133 | "healthCheckResults": [
134 | {
135 | "alive": true,
136 | "consecutiveFailures": 0,
137 | "firstSuccess": "2017-07-14T15:14:17.698Z",
138 | "lastFailure": null,
139 | "lastSuccess": "2017-07-14T15:14:17.698Z",
140 | "lastFailureCause": null,
141 | "instanceId": "nginx-green.marathon-01dc95e6-68a7-11e7-a229-e6a514ca0c21"
142 | }
143 | ]
144 | },
145 | {
146 | "ipAddresses": [
147 | {
148 | "ipAddress": "172.17.0.3",
149 | "protocol": "IPv4"
150 | }
151 | ],
152 | "stagedAt": "2017-07-14T15:13:35.760Z",
153 | "state": "TASK_RUNNING",
154 | "ports": [
155 | 23336
156 | ],
157 | "startedAt": "2017-07-14T15:13:57.639Z",
158 | "version": "2017-07-14T15:13:34.402Z",
159 | "id": "nginx-green.01bf22d4-68a7-11e7-a229-e6a514ca0c21",
160 | "appId": "/nginx-green",
161 | "slaveId": "c28a2184-c702-482c-91c8-5af9318434d4-S1",
162 | "host": "10.0.6.25",
163 | "healthCheckResults": [
164 | {
165 | "alive": true,
166 | "consecutiveFailures": 0,
167 | "firstSuccess": "2017-07-14T15:14:17.702Z",
168 | "lastFailure": null,
169 | "lastSuccess": "2017-07-14T15:14:17.702Z",
170 | "lastFailureCause": null,
171 | "instanceId": "nginx-green.marathon-01bf22d4-68a7-11e7-a229-e6a514ca0c21"
172 | }
173 | ]
174 | }
175 | ]
176 | }
177 | ]
178 | }
179 |
--------------------------------------------------------------------------------
/common.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python3
2 |
3 | import datetime
4 | import json
5 | import logging
6 | import os
7 | import sys
8 | import time
9 | from logging.handlers import SysLogHandler
10 |
11 | import jwt
12 | import requests
13 | from requests.auth import AuthBase
14 |
15 | PARENT_LOG_NAME = 'marathon-lb'
16 | TIME_ZONE_LOG_FORMAT='%(timezoneiso8601)'
17 |
18 | def init_logger(syslog_socket, log_format, log_level):
19 | global marathon_lb_logger
20 | marathon_lb_logger = get_logger(PARENT_LOG_NAME, syslog_socket, log_format, log_level)
21 |
22 | def get_logger(logger_name, syslog_socket, log_format, log_level='DEBUG'):
23 | if TIME_ZONE_LOG_FORMAT in log_format:
24 | timezone = datetime.datetime.utcnow().replace(tzinfo=datetime.timezone.utc).isoformat()[-6:]
25 | log_format = log_format.replace(TIME_ZONE_LOG_FORMAT,timezone)
26 | logging.basicConfig(format=log_format, datefmt='%Y-%m-%dT%H:%M:%S')
27 |
28 | logger = logging.getLogger(logger_name)
29 |
30 | log_level = log_level.upper()
31 |
32 | if log_level not in ['CRITICAL', 'ERROR', 'WARNING',
33 | 'INFO', 'DEBUG', 'NOTSET']:
34 | raise Exception('Invalid log level: {}'.format(log_level.upper()))
35 |
36 | logger.setLevel(getattr(logging, log_level))
37 |
38 | if syslog_socket != '/dev/null':
39 | syslogHandler = SysLogHandler(syslog_socket)
40 | syslogHandler.setFormatter(formatter)
41 | logger.addHandler(syslogHandler)
42 | return logger
43 |
44 | def setup_logging(logger, syslog_socket, log_format, log_level='DEBUG'):
45 | log_level = log_level.upper()
46 |
47 | if log_level not in ['CRITICAL', 'ERROR', 'WARNING',
48 | 'INFO', 'DEBUG', 'NOTSET']:
49 | raise Exception('Invalid log level: {}'.format(log_level.upper()))
50 |
51 | logger.setLevel(getattr(logging, log_level))
52 |
53 | formatter = logging.Formatter(log_format)
54 |
55 | consoleHandler = logging.StreamHandler()
56 | consoleHandler.setFormatter(formatter)
57 | logger.addHandler(consoleHandler)
58 |
59 | if syslog_socket != '/dev/null':
60 | syslogHandler = SysLogHandler(syslog_socket)
61 | syslogHandler.setFormatter(formatter)
62 | logger.addHandler(syslogHandler)
63 |
64 |
65 | def set_marathon_auth_args(parser):
66 | parser.add_argument("--marathon-auth-credential-file",
67 | help="Path to file containing a user/pass for the "
68 | "Marathon HTTP API in the format of 'user:pass'.")
69 | parser.add_argument("--auth-credentials",
70 | help="user/pass for the Marathon HTTP API in the "
71 | "format of 'user:pass'.")
72 | parser.add_argument("--dcos-auth-credentials",
73 | default=os.getenv('DCOS_SERVICE_ACCOUNT_CREDENTIAL'),
74 | help="DC/OS service account credentials")
75 | parser.add_argument("--marathon-ca-cert",
76 | help="CA certificate for Marathon HTTPS connections")
77 |
78 | return parser
79 |
80 |
81 | class DCOSAuth(AuthBase):
82 | def __init__(self, credentials, ca_cert):
83 | creds = cleanup_json(json.loads(credentials))
84 | self.uid = creds['uid']
85 | self.private_key = creds['private_key']
86 | self.login_endpoint = creds['login_endpoint']
87 | self.verify = False
88 | self.auth_header = None
89 | self.expiry = 0
90 | if ca_cert:
91 | self.verify = ca_cert
92 |
93 | def __call__(self, auth_request):
94 | self.refresh_auth_header()
95 | auth_request.headers['Authorization'] = self.auth_header
96 | return auth_request
97 |
98 | def refresh_auth_header(self):
99 | now = int(time.time())
100 | if not self.auth_header or now >= self.expiry - 10:
101 | self.expiry = now + 3600
102 | payload = {
103 | 'uid': self.uid,
104 | # This is the expiry of the auth request params
105 | 'exp': now + 60,
106 | }
107 | token = jwt.encode(payload, self.private_key, 'RS256')
108 |
109 | data = {
110 | 'uid': self.uid,
111 | 'token': token.decode('ascii'),
112 | # This is the expiry for the token itself
113 | 'exp': self.expiry,
114 | }
115 | r = requests.post(self.login_endpoint,
116 | json=data,
117 | timeout=(3.05, 46),
118 | verify=self.verify)
119 | r.raise_for_status()
120 |
121 | self.auth_header = 'token=' + r.cookies['dcos-acs-auth-cookie']
122 |
123 |
124 | def get_marathon_auth_params(args):
125 | marathon_auth = None
126 | if args.marathon_auth_credential_file:
127 | with open(args.marathon_auth_credential_file, 'r') as f:
128 | line = f.readline().rstrip('\r\n')
129 |
130 | if line:
131 | marathon_auth = tuple(line.split(':'))
132 | elif args.auth_credentials:
133 | marathon_auth = tuple(args.auth_credentials.split(':'))
134 | elif args.dcos_auth_credentials:
135 | return DCOSAuth(args.dcos_auth_credentials, args.marathon_ca_cert)
136 |
137 | if marathon_auth and len(marathon_auth) != 2:
138 | print("Please provide marathon credentials in user:pass format")
139 | sys.exit(1)
140 |
141 | return marathon_auth
142 |
143 |
144 | def set_logging_args(parser):
145 | default_log_socket = "/dev/log"
146 | if sys.platform == "darwin":
147 | default_log_socket = "/var/run/syslog"
148 |
149 | parser.add_argument("--syslog-socket",
150 | help="Socket to write syslog messages to. "
151 | "Use '/dev/null' to disable logging to syslog",
152 | default=default_log_socket)
153 | parser.add_argument("--log-format",
154 | help="Set log message format",
155 | default="%(asctime)-15s %(name)s: %(message)s")
156 | parser.add_argument("--log-level",
157 | help="Set log level",
158 | default="DEBUG")
159 | return parser
160 |
161 |
162 | def cleanup_json(data):
163 | if isinstance(data, dict):
164 | return {k: cleanup_json(v) for k, v in data.items() if v is not None}
165 | if isinstance(data, list):
166 | return [cleanup_json(e) for e in data]
167 | return data
168 |
--------------------------------------------------------------------------------
/testsAT/README.md:
--------------------------------------------------------------------------------
1 | # README
2 |
3 | ## ACCEPTANCE TESTS
4 |
5 | Cucumber automated acceptance tests.
6 | This module depends on a QA library (stratio-test-bdd), where common logic and steps are implemented.
7 |
8 | ## EXECUTION TEST
9 |
10 | These tests will be executed as part of the continuous integration flow as follows:
11 |
12 | `mvn verify [-D=] [-Dit.test=|-Dgroups=]`
13 |
14 | ## TESTS
15 |
16 | 1. [Nightly](#nightly)
17 | 2. [Check Invalid App Certificates](#check-invalid-app-certificates)
18 | 3. [Centralized Logs](#centralized-logs)
19 | 4. [Log HAProxy Wrapper Debug](#log-haproxy-wrapper-debug)
20 | 5. [Vault Renewal Token](#vault-renewal-token)
21 | 6. [Certs MarathonLB Serv](#certs-marathonlb-serv)
22 | 7. [Certificates](#certificates)
23 |
24 | ### Nightly
25 | - Pre-requisites:
26 | - N/A
27 | - Description:
28 | - A marathonLB service is installed using the flavour passed as a parameter.
29 | - All checks executed during Nightly to make sure service is working fine.
30 | - Needed:
31 | - DCOS_IP: IP from the cluster
32 | - BOOTSTRAP_IP: IP from the bootstrap
33 | - DCOS_PASSWORD: DCOS cluster password
34 | - DCOS_CLI_HOST: name/IP of the dcos-cli docker container
35 | - DCOS_CLI_USER: dcos-cli docker user
36 | - DCOS_CLI_PASSWORD: dcos-cli docker password
37 | - REMOTE_USER: operational user for cluster machines
38 | - PEM_FILE_PATH: local path to pem file for cluster machines
39 | - EOS_INSTALLER_VERSION: EOS version
40 | - Usage example:
41 | `mvn clean verify -Dgroups=nightly -DBOOTSTRAP_IP=XXX.XXX.XXX.XXX -DREMOTE_USER=remote_user -DPEM_FILE_PATH= -DDCOS_CLI_HOST=XXX.XXX.XXX.XXX -DDCOS_CLI_USER=user -DDCOS_CLI_PASSWORD=password -DEOS_INSTALLER_VERSION= -DDCOS_PASSWORD=password -DlogLevel=DEBUG`
42 |
43 | ### Check Invalid App Certificates
44 | - Pre-requisites:
45 | - A marathonLB service must be installed.
46 | - Description:
47 | - Some checks are run to make sure it has been installed correctly.
48 | - Needed:
49 | - DCOS_CLI_HOST: name/IP of the dcos-cli docker container
50 | - DCOS_CLI_USER: dcos-cli docker user
51 | - DCOS_CLI_PASSWORD: dcos-cli docker password
52 | - BOOTSTRAP_IP: IP from the bootstrap
53 | - REMOTE_USER: operational user for cluster machines
54 | - PEM_FILE_PATH: local path to pem file for cluster machines
55 | - EOS_INSTALLER_VERSION: EOS version
56 | - DCOS_PASSWORD: DCOS cluster password
57 | - Usage example:
58 | `mvn clean verify -Dgroups=checkInvalidAppCerts -DDCOS_CLI_HOST=XXX.XXX.XXX.XXX -DDCOS_CLI_USER=user -DDCOS_CLI_PASSWORD=password -DREMOTE_USER=remote_user -DPEM_FILE_PATH= -DlogLevel=DEBUG -DBOOTSTRAP_IP=XXX.XXX.XXX.XXX -DEOS_INSTALLER_VERSION= -DDCOS_PASSWORD=password`
59 |
60 | ### Centralized Logs
61 | - Pre-requisites:
62 | - A marathonLB service must be installed.
63 | - Description:
64 | - Some checks are run to make sure it has been installed correctly.
65 | - Needed:
66 | - DCOS_CLI_HOST: name/IP of the dcos-cli docker container
67 | - DCOS_CLI_USER: dcos-cli docker user
68 | - DCOS_CLI_PASSWORD: dcos-cli docker password
69 | - BOOTSTRAP_IP: IP from the bootstrap
70 | - REMOTE_USER: operational user for cluster machines
71 | - PEM_FILE_PATH: local path to pem file for cluster machines
72 | - DCOS_PASSWORD: DCOS cluster password
73 | - Usage example:
74 | `mvn clean verify -Dgroups=centralizedlogs -DDCOS_CLI_HOST=XXX.XXX.XXX.XXX -DDCOS_CLI_USER=user -DDCOS_CLI_PASSWORD=password -DREMOTE_USER=remote_user -DPEM_FILE_PATH= -DlogLevel=DEBUG -DBOOTSTRAP_IP=XXX.XXX.XXX.XXX -DDCOS_PASSWORD=password`
75 |
76 | ### Log HAProxy Wrapper Debug
77 | - Pre-requisites:
78 | - A marathonLB service must be installed.
79 | - Description:
80 | - Some checks are run to make sure it has been installed correctly.
81 | - Needed:
82 | - DCOS_CLI_HOST: name/IP of the dcos-cli docker container
83 | - DCOS_CLI_USER: dcos-cli docker user
84 | - DCOS_CLI_PASSWORD: dcos-cli docker password
85 | - Usage example:
86 | `mvn clean verify -Dgroups=haproxyWrapperDebug -DDCOS_CLI_HOST=XXX.XXX.XXX.XXX -DDCOS_CLI_USER=user -DDCOS_CLI_PASSWORD=password`
87 |
88 | ### Vault Renewal Token
89 | - Pre-requisites:
90 | - A marathonLB service must be installed.
91 | - Description:
92 | - Some checks are run to make sure it has been installed correctly.
93 | - Needed:
94 | - DCOS_CLI_HOST: name/IP of the dcos-cli docker container
95 | - DCOS_CLI_USER: dcos-cli docker user
96 | - DCOS_CLI_PASSWORD: dcos-cli docker password
97 | - BOOTSTRAP_IP: IP from the bootstrap
98 | - REMOTE_USER: operational user for cluster machines
99 | - PEM_FILE_PATH: local path to pem file for cluster machines
100 | - DCOS_PASSWORD: DCOS cluster password
101 | - Optional:
102 | - EOS_VAULT_PORT: vault port (default: 8200)
103 | - Usage example:
104 | `mvn clean verify -Dgroups=vaultRenewalToken -DDCOS_CLI_HOST=XXX.XXX.XXX.XXX -DDCOS_CLI_USER=user -DDCOS_CLI_PASSWORD=password -DBOOTSTRAP_IP=XXX.XXX.XXX.XXX -DREMOTE_USER=remote_user -DPEM_FILE_PATH= -DDCOS_PASSWORD=password`
105 |
106 | ### Certs MarathonLB Serv
107 | - Pre-requisites:
108 | - A marathonLB service must be installed.
109 | - Description:
110 | - Some checks are run to make sure it has been installed correctly.
111 | - Needed:
112 | - BOOTSTRAP_IP: IP from the bootstrap
113 | - REMOTE_USER: operational user for cluster machines
114 | - PEM_FILE_PATH: local path to pem file for cluster machines
115 | - DCOS_PASSWORD: DCOS cluster password
116 | - Optional:
117 | - EOS_VAULT_PORT: vault port (default: 8200)
118 | - Usage example:
119 | `mvn clean verify -Dgroups=certsMarathonLBServ -DBOOTSTRAP_IP=XXX.XXX.XXX.XXX -DREMOTE_USER=remote_user -DPEM_FILE_PATH= -DlogLevel=DEBUG -DDCOS_PASSWORD=password`
120 |
121 | ### Certificates
122 | - Pre-requisites:
123 | - A marathonLB service must be installed.
124 | - Description:
125 | - Some checks are run to make sure it has been installed correctly.
126 | - Needed:
127 | - BOOTSTRAP_IP: IP from the bootstrap
128 | - REMOTE_USER: operational user for cluster machines
129 | - PEM_FILE_PATH: local path to pem file for cluster machines
130 | - DCOS_CLI_HOST: name/IP of the dcos-cli docker container
131 | - DCOS_CLI_USER: dcos-cli docker user
132 | - DCOS_CLI_PASSWORD: dcos-cli docker password
133 | - EOS_INSTALLER_VERSION: EOS version
134 | - DCOS_PASSWORD: DCOS cluster password
135 | - Usage example:
136 | `mvn clean verify -Dgroups=app_client_certificates -DBOOTSTRAP_IP=XXX.XXX.XXX.XXX -DREMOTE_USER=remote_user -DPEM_FILE_PATH= -DDCOS_CLI_HOST=XXX.XXX.XXX.XXX -DDCOS_CLI_USER=user -DDCOS_CLI_PASSWORD=password -DEOS_INSTALLER_VERSION= -DlogLevel=DEBUG -DDCOS_PASSWORD=password`
--------------------------------------------------------------------------------
/tests/zdd_apps.json:
--------------------------------------------------------------------------------
1 | {
2 | "apps":[
3 | {
4 | "id":"/nginx-green",
5 | "cmd":null,
6 | "args":null,
7 | "user":null,
8 | "env":{
9 |
10 | },
11 | "instances":3,
12 | "cpus":0.1,
13 | "mem":65,
14 | "disk":0,
15 | "executor":"",
16 | "constraints":[
17 |
18 | ],
19 | "uris":[
20 |
21 | ],
22 | "storeUrls":[
23 |
24 | ],
25 | "portDefinitions":[
26 | {"port":10000}
27 | ],
28 | "requirePorts":false,
29 | "backoffSeconds":1,
30 | "backoffFactor":1.15,
31 | "maxLaunchDelaySeconds":3600,
32 | "container":{
33 | "type":"DOCKER",
34 | "volumes":[
35 |
36 | ],
37 | "docker":{
38 | "image":"brndnmtthws/nginx-echo-sleep",
39 | "network":"BRIDGE",
40 | "portMappings":[
41 | {
42 | "containerPort":8080,
43 | "hostPort":0,
44 | "servicePort":10000,
45 | "protocol":"tcp"
46 | }
47 | ],
48 | "privileged":false,
49 | "parameters":[
50 |
51 | ],
52 | "forcePullImage":true
53 | }
54 | },
55 | "healthChecks":[
56 | {
57 | "path":"/",
58 | "protocol":"HTTP",
59 | "portIndex":0,
60 | "gracePeriodSeconds":15,
61 | "intervalSeconds":3,
62 | "timeoutSeconds":15,
63 | "maxConsecutiveFailures":10
64 | }
65 | ],
66 | "dependencies":[
67 |
68 | ],
69 | "upgradeStrategy":{
70 | "minimumHealthCapacity":1,
71 | "maximumOverCapacity":1
72 | },
73 | "labels":{
74 | "HAPROXY_DEPLOYMENT_GROUP":"nginx",
75 | "HAPROXY_GROUP":"external",
76 | "HAPROXY_DEPLOYMENT_COLOUR":"green",
77 | "HAPROXY_DEPLOYMENT_TARGET_INSTANCES":"20",
78 | "HAPROXY_DEPLOYMENT_STARTED_AT":"2016-02-01T14:13:42.499089",
79 | "HAPROXY_DEPLOYMENT_ALT_PORT":"10001",
80 | "HAPROXY_0_PORT":"10000",
81 | "HAPROXY_APP_ID":"nginx"
82 | },
83 | "acceptedResourceRoles":[
84 | "*",
85 | "slave_public"
86 | ],
87 | "ipAddress":null,
88 | "version":"2016-02-01T22:57:48.784Z",
89 | "versionInfo":{
90 | "lastScalingAt":"2016-02-01T22:57:48.784Z",
91 | "lastConfigChangeAt":"2016-02-01T22:13:42.538Z"
92 | },
93 | "tasksStaged":0,
94 | "tasksRunning":3,
95 | "tasksHealthy":3,
96 | "tasksUnhealthy":0,
97 | "deployments":[
98 |
99 | ],
100 | "tasks":[
101 | {
102 | "id":"nginx-green.1883a887-c931-11e5-9836-026ee08d9f6f",
103 | "host":"10.0.6.25",
104 | "ipAddresses":[
105 | {
106 | "ipAddress":"172.17.1.72",
107 | "protocol":"IPv4"
108 | }
109 | ],
110 | "ports":[
111 | 16916
112 | ],
113 | "startedAt":"2016-02-01T22:14:03.432Z",
114 | "stagedAt":"2016-02-01T22:14:00.653Z",
115 | "version":"2016-02-01T22:14:00.625Z",
116 | "slaveId":"1f09399d-0cb0-4484-afd7-f08e99d2acbb-S1",
117 | "appId":"/nginx-green",
118 | "healthCheckResults":[
119 | {
120 | "alive":true,
121 | "consecutiveFailures":0,
122 | "firstSuccess":"2016-02-01T22:14:08.666Z",
123 | "lastFailure":null,
124 | "lastSuccess":"2016-02-01T23:10:30.986Z",
125 | "taskId":"nginx-green.1883a887-c931-11e5-9836-026ee08d9f6f"
126 | }
127 | ]
128 | },
129 | {
130 | "id":"nginx-green.0dbabba6-c931-11e5-9836-026ee08d9f6f",
131 | "host":"10.0.6.25",
132 | "ipAddresses":[
133 | {
134 | "ipAddress":"172.17.1.71",
135 | "protocol":"IPv4"
136 | }
137 | ],
138 | "ports":[
139 | 31184
140 | ],
141 | "startedAt":"2016-02-01T22:13:44.850Z",
142 | "stagedAt":"2016-02-01T22:13:42.559Z",
143 | "version":"2016-02-01T22:13:42.538Z",
144 | "slaveId":"1f09399d-0cb0-4484-afd7-f08e99d2acbb-S1",
145 | "appId":"/nginx-green",
146 | "healthCheckResults":[
147 | {
148 | "alive":true,
149 | "consecutiveFailures":0,
150 | "firstSuccess":"2016-02-01T22:13:50.565Z",
151 | "lastFailure":null,
152 | "lastSuccess":"2016-02-01T23:10:31.005Z",
153 | "taskId":"nginx-green.0dbabba6-c931-11e5-9836-026ee08d9f6f"
154 | }
155 | ]
156 | },
157 | {
158 | "id":"nginx-green.23532518-c931-11e5-9836-026ee08d9f6f",
159 | "host":"10.0.6.25",
160 | "ipAddresses":[
161 | {
162 | "ipAddress":"172.17.1.73",
163 | "protocol":"IPv4"
164 | }
165 | ],
166 | "ports":[
167 | 23336
168 | ],
169 | "startedAt":"2016-02-01T22:14:21.314Z",
170 | "stagedAt":"2016-02-01T22:14:18.790Z",
171 | "version":"2016-02-01T22:14:18.768Z",
172 | "slaveId":"1f09399d-0cb0-4484-afd7-f08e99d2acbb-S1",
173 | "appId":"/nginx-green",
174 | "healthCheckResults":[
175 | {
176 | "alive":true,
177 | "consecutiveFailures":0,
178 | "firstSuccess":"2016-02-01T22:14:26.805Z",
179 | "lastFailure":null,
180 | "lastSuccess":"2016-02-01T23:10:30.986Z",
181 | "taskId":"nginx-green.23532518-c931-11e5-9836-026ee08d9f6f"
182 | }
183 | ]
184 | }
185 | ]
186 | },
187 | {
188 | "id":"/nginx-blue",
189 | "cmd":null,
190 | "args":null,
191 | "user":null,
192 | "env":{
193 |
194 | },
195 | "instances":1,
196 | "cpus":0.1,
197 | "mem":65,
198 | "disk":0,
199 | "executor":"",
200 | "constraints":[
201 |
202 | ],
203 | "uris":[
204 |
205 | ],
206 | "storeUrls":[
207 |
208 | ],
209 | "ports":[
210 | 10001
211 | ],
212 | "requirePorts":false,
213 | "backoffSeconds":1,
214 | "backoffFactor":1.15,
215 | "maxLaunchDelaySeconds":3600,
216 | "container":{
217 | "type":"DOCKER",
218 | "volumes":[
219 |
220 | ],
221 | "docker":{
222 | "image":"brndnmtthws/nginx-echo-sleep",
223 | "network":"BRIDGE",
224 | "portMappings":[
225 | {
226 | "containerPort":8080,
227 | "hostPort":0,
228 | "servicePort":10001,
229 | "protocol":"tcp"
230 | }
231 | ],
232 | "privileged":false,
233 | "parameters":[
234 |
235 | ],
236 | "forcePullImage":true
237 | }
238 | },
239 | "healthChecks":[
240 | {
241 | "path":"/",
242 | "protocol":"HTTP",
243 | "portIndex":0,
244 | "gracePeriodSeconds":15,
245 | "intervalSeconds":3,
246 | "timeoutSeconds":15,
247 | "maxConsecutiveFailures":10
248 | }
249 | ],
250 | "dependencies":[
251 |
252 | ],
253 | "upgradeStrategy":{
254 | "minimumHealthCapacity":1,
255 | "maximumOverCapacity":1
256 | },
257 | "labels":{
258 | "HAPROXY_DEPLOYMENT_GROUP":"nginx",
259 | "HAPROXY_GROUP":"external",
260 | "HAPROXY_DEPLOYMENT_COLOUR":"blue",
261 | "HAPROXY_DEPLOYMENT_TARGET_INSTANCES":"3",
262 | "HAPROXY_DEPLOYMENT_STARTED_AT":"2016-02-01T14:57:54.371005",
263 | "HAPROXY_DEPLOYMENT_ALT_PORT":"10001",
264 | "HAPROXY_0_PORT":"10000",
265 | "HAPROXY_APP_ID":"nginx"
266 | },
267 | "acceptedResourceRoles":[
268 | "*",
269 | "slave_public"
270 | ],
271 | "ipAddress":null,
272 | "version":"2016-02-01T22:57:54.370Z",
273 | "versionInfo":{
274 | "lastScalingAt":"2016-02-01T22:57:54.370Z",
275 | "lastConfigChangeAt":"2016-02-01T22:57:54.370Z"
276 | },
277 | "tasksStaged":0,
278 | "tasksRunning":1,
279 | "tasksHealthy":1,
280 | "tasksUnhealthy":0,
281 | "deployments":[
282 |
283 | ],
284 | "tasks":[
285 | {
286 | "id":"nginx-blue.3a58a46a-c937-11e5-9836-026ee08d9f6f",
287 | "host":"10.0.1.147",
288 | "ipAddresses":[
289 | {
290 | "ipAddress":"172.17.0.45",
291 | "protocol":"IPv4"
292 | }
293 | ],
294 | "ports":[
295 | 25724
296 | ],
297 | "startedAt":"2016-02-01T22:57:56.889Z",
298 | "stagedAt":"2016-02-01T22:57:54.394Z",
299 | "version":"2016-02-01T22:57:54.370Z",
300 | "slaveId":"1f09399d-0cb0-4484-afd7-f08e99d2acbb-S3",
301 | "appId":"/nginx-blue",
302 | "healthCheckResults":[
303 | {
304 | "alive":true,
305 | "consecutiveFailures":0,
306 | "firstSuccess":"2016-02-01T22:58:02.405Z",
307 | "lastFailure":null,
308 | "lastSuccess":"2016-02-01T23:10:31.356Z",
309 | "taskId":"nginx-blue.3a58a46a-c937-11e5-9836-026ee08d9f6f"
310 | }
311 | ]
312 | }
313 | ]
314 | }
315 | ]
316 | }
317 |
--------------------------------------------------------------------------------
/run:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 | set -euo pipefail
3 |
4 | source /usr/sbin/b-log.sh
5 | B_LOG --stdout true
6 | DOCKER_LOG_LEVEL=${DOCKER_LOG_LEVEL:-INFO}
7 | eval LOG_LEVEL_${DOCKER_LOG_LEVEL}
8 |
9 |
10 | # # # # # # # # # #
11 | # Vault config:
12 | # STRING_VAULT_HOST
13 | # VAULT_PORT
14 | #
15 | # Token Auth:
16 | # VAULT_TOKEN
17 | #
18 | # Dynamic Auth:
19 | # VAULT_ROLE_ID
20 | # VAULT_SECRET_ID
21 | # # # # # # # # #
22 | VAULT_ENABLED=0
23 |
24 | MARATHON_LB_CERT_NAME="000_marathon-lb.pem"
25 |
26 | # KMS_UTILS & Dynamic authentication
27 | if [ -n "${STRING_VAULT_HOST-}" ] && [ -n "${VAULT_PORT-}" ]; then
28 | OLD_IFS=${IFS}
29 | IFS=',' read -r -a VAULT_HOSTS <<< "$STRING_VAULT_HOST"
30 | IFS=${OLD_IFS}
31 |
32 | if [ -n "${VAULT_ROLE_ID-}" ] && [ -n "${VAULT_SECRET_ID-}" ]; then
33 | source /usr/sbin/kms_utils.sh
34 | VAULT_ENABLED=1
35 | INFO "VAULT - Dynamic authentication provided"
36 | INFO "Dynamic login with vault..."
37 | login
38 | INFO "Login success"
39 |
40 | elif [ -n "${VAULT_TOKEN-}" ]; then
41 | source /usr/sbin/kms_utils.sh
42 | VAULT_ENABLED=1
43 | INFO "VAULT - Token authentication provided"
44 | else
45 | INFO "VAULT - ERROR: Vault host configuration provided, but no token nor dynamic authentication configuration provided"
46 | exit 1
47 | fi
48 | fi
49 |
50 | #
51 | LOG_PREFIX="$(pwd) $0"
52 | log() {
53 | logline=$(echo "[$LOG_PREFIX] $1" | sed "s/%/%%/g")
54 | INFO "$logline" >&1
55 | }
56 | log_error() {
57 | logline="[$LOG_PREFIX] $1"
58 | ERROR "$logline"
59 | }
60 |
61 | if [ -n "${HAPROXY_SYSLOGD-}" ]; then
62 | SYSLOGD_SERVICE="/marathon-lb/service/syslogd"
63 | mkdir -p $SYSLOGD_SERVICE
64 | cp /marathon-lb/syslogd/run "$SYSLOGD_SERVICE/"
65 | fi
66 |
67 | if [ "${HAPROXY_RSYSLOG:-true}" = true ]; then
68 | RSYSLOG_SERVICE="/marathon-lb/service/rsyslog"
69 | mkdir -p $RSYSLOG_SERVICE
70 | cp /marathon-lb/rsyslog/run "$RSYSLOG_SERVICE/"
71 | fi
72 |
73 | # Custom syslog socket for marathon-lb.py logging
74 | SYSLOG_SOCKET=${SYSLOG_SOCKET:-/dev/null}
75 |
76 | LB_SERVICE="/marathon-lb/service/lb"
77 | mkdir -p $LB_SERVICE
78 |
79 | HAPROXY_SERVICE="/marathon-lb/service/haproxy"
80 | mkdir -p $HAPROXY_SERVICE/env
81 |
82 | if [ -n "${PORTS-}" ]; then
83 | log "$PORTS > $HAPROXY_SERVICE/env/PORTS"
84 | else
85 | log_error "Define $PORTS with a comma-separated list of ports to which HAProxy binds"
86 | exit 1
87 | fi
88 |
89 | # Find the --ssl-certs arg if one was provided,
90 | # get the certs and remove them and the arg from the list
91 | # of positional parameters so we don't duplicate them
92 | # further down when we pass $@ to marathon_lb.py
93 | declare -i ssl_certs_pos=0
94 | for ((i=1; i<=$#; i++)); do
95 | if [ "${!i}" = '--ssl-certs' ]; then
96 | ssl_certs_pos=$(($i+1))
97 | break
98 | fi
99 | done
100 | if [ $ssl_certs_pos -gt 0 ]; then
101 | SSL_CERTS=${!ssl_certs_pos}
102 | set -- "${@:1:$(($ssl_certs_pos-2))}" "${@:$(($ssl_certs_pos+1))}"
103 | [ -n "${HAPROXY_SSL_CERT-}" ] && SSL_CERTS+=",/etc/ssl/cert.pem"
104 | else
105 | SSL_CERTS="/etc/ssl/cert.pem"
106 | fi
107 |
108 | if [ ${VAULT_ENABLED} -eq 1 ]; then
109 | INFO "Downloading certificates from vault..."
110 | SSL_CERTS="/marathon-lb/vault_certs"
111 | mkdir -p "$SSL_CERTS"
112 |
113 | # Try to download cert from MARATHON_APP_ID
114 | # MARATHON_APP_ID can be of the form "// ... /name", e.g. /test/nginx or /nginx
115 | # The multitenant convention replaces "/" for "." and reverses the order,
116 | # For example, if MARATHON_APP_ID is /test/nginx, CONV_MARATHON_APP_ID should be nginx.test
117 | # if MARATHON_APP_ID is /test1/test2/nginx, CONV_MARATHON_APP_ID should be nginx.test2.test1
118 | OLD_IFS=${IFS}
119 | IFS='/' read -r -a MARATHON_APP_ID_ARRAY <<< "$MARATHON_APP_ID"
120 | IFS=${OLD_IFS}
121 |
122 | unset "MARATHON_APP_ID_ARRAY[0]"
123 | CONV_MARATHON_APP_ID=$(printf '%s\n' "${MARATHON_APP_ID_ARRAY[@]}" | tac | tr '\n' '.')
124 | CONV_MARATHON_APP_ID=${CONV_MARATHON_APP_ID%?}
125 |
126 | INFO "Trying to download certificate from /userland/certificates/${CONV_MARATHON_APP_ID}"
127 | set +e
128 | getCert userland "${CONV_MARATHON_APP_ID}" "${CONV_MARATHON_APP_ID}" PEM "${SSL_CERTS}" 2>&1
129 | if [ $? -ne 0 ]
130 | then
131 | set -e
132 | INFO "Certificate in /userland/certificates/${CONV_MARATHON_APP_ID} does not exist, falling back to default route /userland/certificates/marathon-lb"
133 | getCert userland marathon-lb marathon-lb PEM "$SSL_CERTS"
134 | CONV_MARATHON_APP_ID="marathon-lb"
135 | fi
136 | set -e
137 |
138 | # if provided Vault credentials, use it.
139 | chmod 644 "$SSL_CERTS/$CONV_MARATHON_APP_ID.pem"
140 | cat "$SSL_CERTS/$CONV_MARATHON_APP_ID.key" >> "$SSL_CERTS/$CONV_MARATHON_APP_ID.pem"
141 | rm -f "$SSL_CERTS/$CONV_MARATHON_APP_ID.key"
142 | # Change the name to be the first alphabetical order certificate (SNI)
143 | mv "$SSL_CERTS/$CONV_MARATHON_APP_ID.pem" "$SSL_CERTS/$MARATHON_LB_CERT_NAME"
144 | src_file_path=/marathon-lb/vault_certs/$MARATHON_LB_CERT_NAME.orig
145 | dest_file_path=/marathon-lb/vault_certs/$MARATHON_LB_CERT_NAME
146 | mv $dest_file_path $src_file_path
147 | fold -w 64 $src_file_path > $dest_file_path
148 | rm -f $src_file_path
149 | # Copy the certificate to make it available in the old default path
150 | cp "$SSL_CERTS/$MARATHON_LB_CERT_NAME" /etc/ssl/cert.pem
151 |
152 | SSL_CERTS="$SSL_CERTS/"
153 | INFO "Downloaded"
154 |
155 | INFO "Downloading ca-bundle from vault..."
156 | getCAbundle /etc/ssl PEM
157 | INFO "Downloaded"
158 | src_file_path=/etc/ssl/ca-bundle.pem.orig
159 | dest_file_path=/etc/ssl/ca-bundle.pem
160 | mv $dest_file_path $src_file_path
161 | fold -w 64 $src_file_path > $dest_file_path
162 |
163 | elif [ -n "${HAPROXY_SSL_CERT-}" ]; then
164 | # if provided via environment variable, use it.
165 | echo -e "$HAPROXY_SSL_CERT" > /etc/ssl/cert.pem
166 |
167 | # if additional certs were provided as $HAPROXY_SSL_CERT0 .. 100
168 | for i in {0..100}; do
169 | certenv="HAPROXY_SSL_CERT$i"
170 | if [ -n "${!certenv-}" ]; then
171 | certfile="/etc/ssl/cert$i.pem"
172 | echo -e "${!certenv}" > $certfile
173 | SSL_CERTS+=",$certfile"
174 | fi
175 | done
176 | elif [ $ssl_certs_pos -eq 0 ]; then # if --ssl-certs wasn't passed as arg to this script
177 | # if no environment variable or command line argument is provided,
178 | # create self-signed ssl certificate
179 | openssl genrsa -out /tmp/server-key.pem 2048
180 | openssl req -new -key /tmp/server-key.pem -out /tmp/server-csr.pem -subj /CN=*/
181 | openssl x509 -req -in /tmp/server-csr.pem -out /tmp/server-cert.pem -signkey /tmp/server-key.pem -days 3650
182 | cat /tmp/server-cert.pem /tmp/server-key.pem > /etc/ssl/cert.pem
183 | rm /tmp/server-*.pem
184 | fi
185 |
186 | if [ -n "${MESOS_SANDBOX-}" ] && [ -d "$MESOS_SANDBOX/templates" ]; then
187 | mkdir -p templates
188 | cp -v "$MESOS_SANDBOX/templates/"* templates/
189 | fi
190 |
191 | if [ -n "${HAPROXY_SYSCTL_PARAMS-}" ]; then
192 | log "setting sysctl params to: ${HAPROXY_SYSCTL_PARAMS}"
193 | if [ -n "${HAPROXY_SYSCTL_NONSTRICT-}" ]; then
194 | # ignore errors
195 | sysctl -w $HAPROXY_SYSCTL_PARAMS || true
196 | else
197 | sysctl -w $HAPROXY_SYSCTL_PARAMS
198 | fi
199 | fi
200 |
201 | MODE=$1; shift
202 | case "$MODE" in
203 | poll)
204 | POLL_INTERVAL="${POLL_INTERVAL:-60}"
205 | ARGS=""
206 | ;;
207 | sse)
208 | ARGS="--sse"
209 | ;;
210 | *)
211 | log_error "Unknown mode $MODE. Synopsis: $0 poll|sse [marathon_lb.py args]"
212 | exit 1
213 | ;;
214 | esac
215 |
216 | if [ ${VAULT_ENABLED} -eq 1 ]; then
217 | INFO "Downloading marathon credentials from Vault..."
218 | getPass dcs marathon rest
219 | CREDENTIALS="$MARATHON_REST_USER:$MARATHON_REST_PASS"
220 | ARGS="$ARGS --auth-credentials $CREDENTIALS"
221 | INFO "Downloaded"
222 | fi
223 |
224 | export WRAPPER_SYSLOG_SOCKET=$SYSLOG_SOCKET
225 | WRAPPER_LOG_FORMAT_NEXT="false"
226 | WRAPPER_LOG_LEVEL_NEXT="false"
227 | for arg in "$@"; do
228 | escaped=$(printf %q "$arg")
229 | ARGS="$ARGS $escaped"
230 |
231 | if [ $WRAPPER_LOG_FORMAT_NEXT = "true" ]; then
232 | WRAPPER_LOG_FORMAT=$arg
233 | fi
234 | if [ $WRAPPER_LOG_LEVEL_NEXT = "true" ]; then
235 | WRAPPER_LOG_LEVEL=$arg
236 | fi
237 |
238 | if [ "$arg" = "--log-format" ]; then
239 | WRAPPER_LOG_FORMAT_NEXT="true"
240 | else
241 | WRAPPER_LOG_FORMAT_NEXT="false"
242 | fi
243 | if [ "$arg" = "--log-level" ]; then
244 | WRAPPER_LOG_LEVEL_NEXT="true"
245 | else
246 | WRAPPER_LOG_LEVEL_NEXT="false"
247 | fi
248 |
249 | done
250 |
251 | WRAPPER_LOG_FORMAT=${WRAPPER_LOG_FORMAT:-%(asctime)-15s %(name)s: %(message)s}
252 | export WRAPPER_LOG_FORMAT
253 | WRAPPER_LOG_LEVEL=${WRAPPER_LOG_LEVEL:-DEBUG}
254 | export WRAPPER_LOG_LEVEL
255 |
256 | grep -q -F -w "sv reload ${HAPROXY_SERVICE}" /marathon-lb/reload_haproxy.sh || echo "sv reload ${HAPROXY_SERVICE}" >> /marathon-lb/reload_haproxy.sh
257 |
258 | cat > $LB_SERVICE/run << EOF
259 | #!/bin/sh
260 | exec 2>&1
261 | cd /marathon-lb
262 | exec /marathon-lb/marathon_lb.py \
263 | --marathon-lb-cert-name "$MARATHON_LB_CERT_NAME" \
264 | --syslog-socket $SYSLOG_SOCKET \
265 | --haproxy-config /marathon-lb/haproxy.cfg \
266 | --ssl-certs "${SSL_CERTS}" \
267 | --command "/marathon-lb/reload_haproxy.sh" \
268 | $ARGS
269 | EOF
270 | chmod 755 $LB_SERVICE/run
271 |
272 | log "Created $LB_SERVICE/run with contents:"
273 | LB_RUN=$(cat $LB_SERVICE/run)
274 | log "$LB_RUN"
275 |
276 | if [ "${MODE}" == "poll" ]; then
277 |
278 | cat > $LB_SERVICE/finish << EOF
279 | #!/bin/sh
280 | sleep ${POLL_INTERVAL}
281 |
282 | EOF
283 | chmod 755 $LB_SERVICE/finish
284 |
285 | fi
286 |
287 | runsvdir -P /marathon-lb/service &
288 | trap "kill -s 1 $!" TERM INT
289 | wait
290 |
--------------------------------------------------------------------------------
/LICENSE:
--------------------------------------------------------------------------------
1 |
2 | Apache License
3 | Version 2.0, January 2004
4 | http://www.apache.org/licenses/
5 |
6 | TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
7 |
8 | 1. Definitions.
9 |
10 | "License" shall mean the terms and conditions for use, reproduction,
11 | and distribution as defined by Sections 1 through 9 of this document.
12 |
13 | "Licensor" shall mean the copyright owner or entity authorized by
14 | the copyright owner that is granting the License.
15 |
16 | "Legal Entity" shall mean the union of the acting entity and all
17 | other entities that control, are controlled by, or are under common
18 | control with that entity. For the purposes of this definition,
19 | "control" means (i) the power, direct or indirect, to cause the
20 | direction or management of such entity, whether by contract or
21 | otherwise, or (ii) ownership of fifty percent (50%) or more of the
22 | outstanding shares, or (iii) beneficial ownership of such entity.
23 |
24 | "You" (or "Your") shall mean an individual or Legal Entity
25 | exercising permissions granted by this License.
26 |
27 | "Source" form shall mean the preferred form for making modifications,
28 | including but not limited to software source code, documentation
29 | source, and configuration files.
30 |
31 | "Object" form shall mean any form resulting from mechanical
32 | transformation or translation of a Source form, including but
33 | not limited to compiled object code, generated documentation,
34 | and conversions to other media types.
35 |
36 | "Work" shall mean the work of authorship, whether in Source or
37 | Object form, made available under the License, as indicated by a
38 | copyright notice that is included in or attached to the work
39 | (an example is provided in the Appendix below).
40 |
41 | "Derivative Works" shall mean any work, whether in Source or Object
42 | form, that is based on (or derived from) the Work and for which the
43 | editorial revisions, annotations, elaborations, or other modifications
44 | represent, as a whole, an original work of authorship. For the purposes
45 | of this License, Derivative Works shall not include works that remain
46 | separable from, or merely link (or bind by name) to the interfaces of,
47 | the Work and Derivative Works thereof.
48 |
49 | "Contribution" shall mean any work of authorship, including
50 | the original version of the Work and any modifications or additions
51 | to that Work or Derivative Works thereof, that is intentionally
52 | submitted to Licensor for inclusion in the Work by the copyright owner
53 | or by an individual or Legal Entity authorized to submit on behalf of
54 | the copyright owner. For the purposes of this definition, "submitted"
55 | means any form of electronic, verbal, or written communication sent
56 | to the Licensor or its representatives, including but not limited to
57 | communication on electronic mailing lists, source code control systems,
58 | and issue tracking systems that are managed by, or on behalf of, the
59 | Licensor for the purpose of discussing and improving the Work, but
60 | excluding communication that is conspicuously marked or otherwise
61 | designated in writing by the copyright owner as "Not a Contribution."
62 |
63 | "Contributor" shall mean Licensor and any individual or Legal Entity
64 | on behalf of whom a Contribution has been received by Licensor and
65 | subsequently incorporated within the Work.
66 |
67 | 2. Grant of Copyright License. Subject to the terms and conditions of
68 | this License, each Contributor hereby grants to You a perpetual,
69 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable
70 | copyright license to reproduce, prepare Derivative Works of,
71 | publicly display, publicly perform, sublicense, and distribute the
72 | Work and such Derivative Works in Source or Object form.
73 |
74 | 3. Grant of Patent License. Subject to the terms and conditions of
75 | this License, each Contributor hereby grants to You a perpetual,
76 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable
77 | (except as stated in this section) patent license to make, have made,
78 | use, offer to sell, sell, import, and otherwise transfer the Work,
79 | where such license applies only to those patent claims licensable
80 | by such Contributor that are necessarily infringed by their
81 | Contribution(s) alone or by combination of their Contribution(s)
82 | with the Work to which such Contribution(s) was submitted. If You
83 | institute patent litigation against any entity (including a
84 | cross-claim or counterclaim in a lawsuit) alleging that the Work
85 | or a Contribution incorporated within the Work constitutes direct
86 | or contributory patent infringement, then any patent licenses
87 | granted to You under this License for that Work shall terminate
88 | as of the date such litigation is filed.
89 |
90 | 4. Redistribution. You may reproduce and distribute copies of the
91 | Work or Derivative Works thereof in any medium, with or without
92 | modifications, and in Source or Object form, provided that You
93 | meet the following conditions:
94 |
95 | (a) You must give any other recipients of the Work or
96 | Derivative Works a copy of this License; and
97 |
98 | (b) You must cause any modified files to carry prominent notices
99 | stating that You changed the files; and
100 |
101 | (c) You must retain, in the Source form of any Derivative Works
102 | that You distribute, all copyright, patent, trademark, and
103 | attribution notices from the Source form of the Work,
104 | excluding those notices that do not pertain to any part of
105 | the Derivative Works; and
106 |
107 | (d) If the Work includes a "NOTICE" text file as part of its
108 | distribution, then any Derivative Works that You distribute must
109 | include a readable copy of the attribution notices contained
110 | within such NOTICE file, excluding those notices that do not
111 | pertain to any part of the Derivative Works, in at least one
112 | of the following places: within a NOTICE text file distributed
113 | as part of the Derivative Works; within the Source form or
114 | documentation, if provided along with the Derivative Works; or,
115 | within a display generated by the Derivative Works, if and
116 | wherever such third-party notices normally appear. The contents
117 | of the NOTICE file are for informational purposes only and
118 | do not modify the License. You may add Your own attribution
119 | notices within Derivative Works that You distribute, alongside
120 | or as an addendum to the NOTICE text from the Work, provided
121 | that such additional attribution notices cannot be construed
122 | as modifying the License.
123 |
124 | You may add Your own copyright statement to Your modifications and
125 | may provide additional or different license terms and conditions
126 | for use, reproduction, or distribution of Your modifications, or
127 | for any such Derivative Works as a whole, provided Your use,
128 | reproduction, and distribution of the Work otherwise complies with
129 | the conditions stated in this License.
130 |
131 | 5. Submission of Contributions. Unless You explicitly state otherwise,
132 | any Contribution intentionally submitted for inclusion in the Work
133 | by You to the Licensor shall be under the terms and conditions of
134 | this License, without any additional terms or conditions.
135 | Notwithstanding the above, nothing herein shall supersede or modify
136 | the terms of any separate license agreement you may have executed
137 | with Licensor regarding such Contributions.
138 |
139 | 6. Trademarks. This License does not grant permission to use the trade
140 | names, trademarks, service marks, or product names of the Licensor,
141 | except as required for reasonable and customary use in describing the
142 | origin of the Work and reproducing the content of the NOTICE file.
143 |
144 | 7. Disclaimer of Warranty. Unless required by applicable law or
145 | agreed to in writing, Licensor provides the Work (and each
146 | Contributor provides its Contributions) on an "AS IS" BASIS,
147 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
148 | implied, including, without limitation, any warranties or conditions
149 | of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
150 | PARTICULAR PURPOSE. You are solely responsible for determining the
151 | appropriateness of using or redistributing the Work and assume any
152 | risks associated with Your exercise of permissions under this License.
153 |
154 | 8. Limitation of Liability. In no event and under no legal theory,
155 | whether in tort (including negligence), contract, or otherwise,
156 | unless required by applicable law (such as deliberate and grossly
157 | negligent acts) or agreed to in writing, shall any Contributor be
158 | liable to You for damages, including any direct, indirect, special,
159 | incidental, or consequential damages of any character arising as a
160 | result of this License or out of the use or inability to use the
161 | Work (including but not limited to damages for loss of goodwill,
162 | work stoppage, computer failure or malfunction, or any and all
163 | other commercial damages or losses), even if such Contributor
164 | has been advised of the possibility of such damages.
165 |
166 | 9. Accepting Warranty or Additional Liability. While redistributing
167 | the Work or Derivative Works thereof, You may choose to offer,
168 | and charge a fee for, acceptance of support, warranty, indemnity,
169 | or other liability obligations and/or rights consistent with this
170 | License. However, in accepting such obligations, You may act only
171 | on Your own behalf and on Your sole responsibility, not on behalf
172 | of any other Contributor, and only if You agree to indemnify,
173 | defend, and hold each Contributor harmless for any liability
174 | incurred by, or claims asserted against, such Contributor by reason
175 | of your accepting any such warranty or additional liability.
176 |
177 | END OF TERMS AND CONDITIONS
178 |
179 | APPENDIX: How to apply the Apache License to your work.
180 |
181 | To apply the Apache License to your work, attach the following
182 | boilerplate notice, with the fields enclosed by brackets "[]"
183 | replaced with your own identifying information. (Don't include
184 | the brackets!) The text should be enclosed in the appropriate
185 | comment syntax for the file format. We also recommend that a
186 | file or class name and description of purpose be included on the
187 | same "printed page" as the copyright notice for easier
188 | identification within third-party archives.
189 |
190 | Copyright [yyyy] [name of copyright owner]
191 |
192 | Licensed under the Apache License, Version 2.0 (the "License");
193 | you may not use this file except in compliance with the License.
194 | You may obtain a copy of the License at
195 |
196 | http://www.apache.org/licenses/LICENSE-2.0
197 |
198 | Unless required by applicable law or agreed to in writing, software
199 | distributed under the License is distributed on an "AS IS" BASIS,
200 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
201 | See the License for the specific language governing permissions and
202 | limitations under the License.
203 |
--------------------------------------------------------------------------------
/utils.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python3
2 |
3 | import hashlib
4 | from io import BytesIO
5 | import logging
6 | import socket
7 |
8 | import pycurl
9 |
10 | import common
11 | from common import DCOSAuth
12 | from lrucache import LRUCache
13 |
14 | logger = None
15 | def init_log():
16 | global logger
17 | logger = common.marathon_lb_logger.getChild('utils.py')
18 |
19 | # The maximum number of clashes to allow when assigning a port.
20 | MAX_CLASHES = 50
21 |
22 |
23 | class ServicePortAssigner(object):
24 | """
25 | Helper class to assign service ports.
26 |
27 | Ordinarily Marathon should assign the service ports, but Marathon issue
28 | https://github.com/mesosphere/marathon/issues/3636 means that service
29 | ports are not returned for applications using IP-per-task. We work around
30 | that here by assigning deterministic ports from a configurable range when
31 | required.
32 |
33 | Note that auto-assigning ports is only useful when using vhost: the ports
34 | that we assign here are not exposed to the client.
35 |
36 | The LB command line options --min-serv-port-ip-per-task and
37 | --max-serv-port-ip-per-task specify the allowed range of ports to
38 | auto-assign from. The range of ports used for auto-assignment should be
39 | selected to ensure no clashes with the exposed LB ports and the
40 | Marathon-assigned services ports.
41 |
42 | The service port assigner provides a mechanism to auto assign service ports
43 | using the application name to generate service port (while preventing
44 | clashes when the port is already claimed by another app). The assigner
45 | provides a deterministic set of ports for a given ordered set of port
46 | requests.
47 | """
48 | def __init__(self):
49 | self.min_port = None
50 | self.max_port = None
51 | self.max_ports = None
52 | self.can_assign = False
53 | self.next_port = None
54 | self.ports_by_app = {}
55 |
56 | def _assign_new_service_port(self, app, task_port):
57 | assert self.can_assign
58 |
59 | if self.max_ports <= len(self.ports_by_app):
60 | logger.warning("Service ports are exhausted")
61 | return None
62 |
63 | # We don't want to be searching forever, so limit the number of times
64 | # we clash to the number of remaining ports.
65 | ports = self.ports_by_app.values()
66 | port = None
67 | for i in range(MAX_CLASHES):
68 | hash_str = "%s-%s-%s" % (app['id'], task_port, i)
69 | hash_val = hashlib.sha1(hash_str.encode("utf-8")).hexdigest()
70 | hash_int = int(hash_val[:8], 16)
71 | trial_port = self.min_port + (hash_int % self.max_ports)
72 | if trial_port not in ports:
73 | port = trial_port
74 | break
75 | if port is None:
76 | for port in range(self.min_port, self.max_port + 1):
77 | if port not in ports:
78 | break
79 |
80 | # We must have assigned a unique port by now since we know there were
81 | # some available.
82 | assert port and port not in ports, port
83 |
84 | logger.debug("Assigned new port: %d", port)
85 | return port
86 |
87 | def _get_service_port(self, app, task_port):
88 | key = (app['id'], task_port)
89 | port = (self.ports_by_app.get(key) or
90 | self._assign_new_service_port(app, task_port))
91 | self.ports_by_app[key] = port
92 | return port
93 |
94 | def set_ports(self, min_port, max_port):
95 | """
96 | Set the range of ports that we can use for auto-assignment of
97 | service ports - just for IP-per-task apps.
98 | :param min_port: The minimum port value
99 | :param max_port: The maximum port value
100 | """
101 | assert not self.ports_by_app
102 | assert max_port >= min_port
103 | self.min_port = min_port
104 | self.max_port = max_port
105 | self.max_ports = max_port - min_port + 1
106 | self.can_assign = self.min_port and self.max_port
107 |
108 | def reset(self):
109 | """
110 | Reset the assigner so that ports are newly assigned.
111 | """
112 | self.ports_by_app = {}
113 |
114 | def get_service_ports(self, app):
115 | """
116 | Return a list of service ports for this app.
117 | :param app: The application.
118 | :return: The list of ports. Note that if auto-assigning and ports
119 | become exhausted, a port may be returned as None.
120 | """
121 | mode = get_app_networking_mode(app)
122 | if mode == "container" or mode == "container/bridge":
123 | # Here we must use portMappings
124 | portMappings = get_app_port_mappings(app)
125 | if len(portMappings) > 0:
126 | ports = filter(lambda p: p is not None,
127 | map(lambda p: p.get('servicePort', None),
128 | portMappings))
129 | ports = list(ports)
130 | if ports:
131 | return list(ports)
132 |
133 | ports = app.get('ports', [])
134 | if 'portDefinitions' in app:
135 | ports = filter(lambda p: p is not None,
136 | map(lambda p: p.get('port', None),
137 | app.get('portDefinitions', []))
138 | )
139 | ports = list(ports) # wtf python?
140 | # This supports legacy ip-per-container for Marathon 1.4.x and prior
141 | if not ports and mode == "container" and self.can_assign \
142 | and len(app['tasks']) > 0:
143 | task = app['tasks'][0]
144 | task_ports = get_app_task_ports(app, task, mode)
145 | if len(task_ports) > 0:
146 | ports = [self._get_service_port(app, task_port)
147 | for task_port in task_ports]
148 | logger.debug("Service ports: %r", ports)
149 | return ports
150 |
151 |
152 | class CurlHttpEventStream(object):
153 | def __init__(self, url, auth, verify):
154 | self.url = url
155 | self.received_buffer = BytesIO()
156 |
157 | headers = ['Cache-Control: no-cache', 'Accept: text/event-stream']
158 |
159 | self.curl = pycurl.Curl()
160 | self.curl.setopt(pycurl.URL, url)
161 | self.curl.setopt(pycurl.ENCODING, 'gzip')
162 | self.curl.setopt(pycurl.CONNECTTIMEOUT, 10)
163 | self.curl.setopt(pycurl.WRITEDATA, self.received_buffer)
164 |
165 | # The below settings are to prevent the connection from hanging if the
166 | # connection breaks silently. Since marathon-lb only listens, silent
167 | # connection failure results in marathon-lb waiting infinitely.
168 | #
169 | # Minimum bytes/second below which it is considered "low speed". So
170 | # "low speed" here refers to 0 bytes/second.
171 | self.curl.setopt(pycurl.LOW_SPEED_LIMIT, 1)
172 | # How long (in seconds) it's allowed to go below the speed limit
173 | # before it times out
174 | self.curl.setopt(pycurl.LOW_SPEED_TIME, 300)
175 |
176 | if auth and type(auth) is DCOSAuth:
177 | auth.refresh_auth_header()
178 | headers.append('Authorization: %s' % auth.auth_header)
179 | elif auth:
180 | self.curl.setopt(pycurl.HTTPAUTH, pycurl.HTTPAUTH_BASIC)
181 | self.curl.setopt(pycurl.USERPWD, '%s:%s' % auth)
182 | if verify:
183 | self.curl.setopt(pycurl.CAINFO, verify)
184 | else:
185 | self.curl.setopt(pycurl.SSL_VERIFYHOST, 0)
186 | self.curl.setopt(pycurl.SSL_VERIFYPEER, 0)
187 |
188 | self.curl.setopt(pycurl.HTTPHEADER, headers)
189 |
190 | self.curlmulti = pycurl.CurlMulti()
191 | self.curlmulti.add_handle(self.curl)
192 |
193 | self.status_code = 0
194 |
195 | SELECT_TIMEOUT = 10
196 |
197 | def _any_data_received(self):
198 | return self.received_buffer.tell() != 0
199 |
200 | def _get_received_data(self):
201 | result = self.received_buffer.getvalue()
202 | self.received_buffer.truncate(0)
203 | self.received_buffer.seek(0)
204 | return result
205 |
206 | def _check_status_code(self):
207 | if self.status_code == 0:
208 | self.status_code = self.curl.getinfo(pycurl.HTTP_CODE)
209 | if self.status_code != 0 and self.status_code != 200:
210 | raise Exception(str(self.status_code) + ' ' + self.url)
211 |
212 | def _perform_on_curl(self):
213 | while True:
214 | ret, num_handles = self.curlmulti.perform()
215 | if ret != pycurl.E_CALL_MULTI_PERFORM:
216 | break
217 | return num_handles
218 |
219 | def _iter_chunks(self):
220 | while True:
221 | remaining = self._perform_on_curl()
222 | if self._any_data_received():
223 | self._check_status_code()
224 | yield self._get_received_data()
225 | if remaining == 0:
226 | break
227 | self.curlmulti.select(self.SELECT_TIMEOUT)
228 |
229 | self._check_status_code()
230 | self._check_curl_errors()
231 |
232 | def _check_curl_errors(self):
233 | for f in self.curlmulti.info_read()[2]:
234 | raise pycurl.error(*f[1:])
235 |
236 | def iter_lines(self):
237 | chunks = self._iter_chunks()
238 | return self._split_lines_from_chunks(chunks)
239 |
240 | @staticmethod
241 | def _split_lines_from_chunks(chunks):
242 | # same behaviour as requests' Response.iter_lines(...)
243 |
244 | pending = None
245 | for chunk in chunks:
246 |
247 | if pending is not None:
248 | chunk = pending + chunk
249 | lines = chunk.splitlines()
250 |
251 | if lines and lines[-1] and chunk and lines[-1][-1] == chunk[-1]:
252 | pending = lines.pop()
253 | else:
254 | pending = None
255 |
256 | for line in lines:
257 | yield line
258 |
259 | if pending is not None:
260 | yield pending
261 |
262 |
263 | def resolve_ip(host):
264 | """
265 | :return: string, an empty string indicates that no ip was found.
266 | """
267 | cached_ip = ip_cache.get().get(host, "")
268 | if cached_ip != "":
269 | return cached_ip
270 | else:
271 | try:
272 | logger.debug("trying to resolve ip address for host %s", host)
273 | ip = socket.gethostbyname(host)
274 | ip_cache.get().set(host, ip)
275 | return ip
276 | except socket.gaierror:
277 | return ""
278 |
279 |
280 | class LRUCacheSingleton(object):
281 | def __init__(self):
282 | self.lru_cache = None
283 |
284 | def get(self):
285 | if self.lru_cache is None:
286 | self.lru_cache = LRUCache()
287 | return self.lru_cache
288 |
289 | def set(self, lru_cache):
290 | self.lru_cache = lru_cache
291 |
292 |
293 | ip_cache = LRUCacheSingleton()
294 |
295 |
296 | def get_app_networking_mode(app):
297 | mode = 'host'
298 |
299 | if app.get('ipAddress'):
300 | mode = 'container'
301 |
302 | _mode = app.get('container', {})\
303 | .get('docker', {})\
304 | .get('network', '')
305 | if _mode == 'USER':
306 | mode = 'container'
307 | elif _mode == 'BRIDGE':
308 | mode = 'container/bridge'
309 |
310 | networks = app.get('networks', [])
311 | for n in networks:
312 | # Modes cannot be mixed, so assigning the last mode is fine
313 | mode = n.get('mode', 'container')
314 |
315 | return mode
316 |
317 |
318 | def get_task_ip(task, mode):
319 | """
320 | :return: string, an empty string indicates that no ip was found.
321 | """
322 | if mode == 'container':
323 | task_ip_addresses = task.get('ipAddresses', [])
324 | if len(task_ip_addresses) == 0:
325 | logger.warning("Task %s does not yet have an ip address allocated",
326 | task['id'])
327 | return ""
328 | task_ip = task_ip_addresses[0].get('ipAddress', "")
329 | if task_ip == "":
330 | logger.warning("Task %s does not yet have an ip address allocated",
331 | task['id'])
332 | return ""
333 | return task_ip
334 | else:
335 | host = task.get('host', "")
336 | if host == "":
337 | logger.warning("Could not find task host, ignoring")
338 | return ""
339 | task_ip = resolve_ip(host)
340 | if task_ip == "":
341 | logger.warning("Could not resolve ip for host %s, ignoring",
342 | host)
343 | return ""
344 | return task_ip
345 |
346 |
347 | def get_app_port_mappings(app):
348 | """
349 | :return: list
350 | """
351 | portMappings = app.get('container', {})\
352 | .get('docker', {})\
353 | .get('portMappings', [])
354 | if len(portMappings) > 0:
355 | return portMappings
356 |
357 | return app.get('container', {})\
358 | .get('portMappings', [])
359 |
360 |
361 | def get_task_ports(task):
362 | """
363 | :return: list
364 | """
365 | return task.get('ports', [])
366 |
367 |
368 | def get_port_definition_ports(app):
369 | """
370 | :return: list
371 | """
372 | port_definitions = app.get('portDefinitions', [])
373 | return [p['port'] for p in port_definitions if 'port' in p]
374 |
375 |
376 | def get_ip_address_discovery_ports(app):
377 | """
378 | :return: list
379 | """
380 | ip_address = app.get('ipAddress', {})
381 | if len(ip_address) == 0:
382 | return []
383 | discovery = app.get('ipAddress', {}).get('discovery', {})
384 | return [int(p['number'])
385 | for p in discovery.get('ports', [])
386 | if 'number' in p]
387 |
388 |
389 | def get_port_mapping_ports(app):
390 | """
391 | :return: list
392 | """
393 | port_mappings = get_app_port_mappings(app)
394 | return [p['containerPort'] for p in port_mappings if 'containerPort' in p]
395 |
396 |
397 | def get_app_task_ports(app, task, mode):
398 | """
399 | :return: list
400 | """
401 | if mode == 'host':
402 | task_ports = get_task_ports(task)
403 | if len(task_ports) > 0:
404 | return task_ports
405 | return get_port_definition_ports(app)
406 | elif mode == 'container/bridge':
407 | task_ports = get_task_ports(task)
408 | if len(task_ports) > 0:
409 | return task_ports
410 | # Will only work for Marathon < 1.5
411 | task_ports = get_port_definition_ports(app)
412 | if len(task_ports) > 0:
413 | return task_ports
414 | return get_port_mapping_ports(app)
415 | else:
416 | task_ports = get_ip_address_discovery_ports(app)
417 | if len(task_ports) > 0:
418 | return task_ports
419 | return get_port_mapping_ports(app)
420 |
421 |
422 | def get_task_ip_and_ports(app, task):
423 | """
424 | Return the IP address and list of ports used to access a task. For a
425 | task using IP-per-task, this is the IP address of the task, and the ports
426 | exposed by the task services. Otherwise, this is the IP address of the
427 | host and the ports exposed by the host.
428 | :param app: The application owning the task.
429 | :param task: The task.
430 | :return: Tuple of (ip address, [ports]). Returns (None, None) if no IP
431 | address could be resolved or found for the task.
432 | """
433 |
434 | mode = get_app_networking_mode(app)
435 | task_ip = get_task_ip(task, mode)
436 | task_ports = get_app_task_ports(app, task, mode)
437 | # The overloading of empty string, and empty list as False is intentional.
438 | if not (task_ip and task_ports):
439 | return None, None
440 | logger.debug("Returning: %r, %r", task_ip, task_ports)
441 | return task_ip, task_ports
442 |
--------------------------------------------------------------------------------