├── .editorconfig ├── .gitattributes ├── .github └── workflows │ ├── build_and_test_with_resty_events.yml │ ├── build_and_test_with_worker_events.yml │ ├── lint.yml │ └── sast.yml ├── .gitignore ├── .luacheckrc ├── LICENSE ├── Makefile ├── README.md ├── config.ld ├── docs ├── index.html ├── ldoc.css ├── modules │ ├── Healthcheck.html │ ├── resty.healthcheck.html │ └── resty.healthcheck.utils.html └── topics │ └── README.md.html ├── lib └── resty │ └── healthcheck.lua ├── lua-resty-healthcheck-scm-1.rockspec ├── rockspecs ├── lua-resty-healthcheck-0.1.0-1.rockspec ├── lua-resty-healthcheck-0.2.0-1.rockspec ├── lua-resty-healthcheck-0.3.0-1.rockspec ├── lua-resty-healthcheck-0.4.0-1.rockspec ├── lua-resty-healthcheck-0.4.1-1.rockspec ├── lua-resty-healthcheck-0.4.1-2.rockspec ├── lua-resty-healthcheck-0.4.2-1.rockspec ├── lua-resty-healthcheck-0.4.2-2.rockspec ├── lua-resty-healthcheck-0.5.0-1.rockspec ├── lua-resty-healthcheck-0.5.0-2.rockspec ├── lua-resty-healthcheck-0.6.0-1.rockspec ├── lua-resty-healthcheck-0.6.0-2.rockspec ├── lua-resty-healthcheck-0.6.1-1.rockspec ├── lua-resty-healthcheck-0.6.1-2.rockspec ├── lua-resty-healthcheck-1.0.0-1.rockspec ├── lua-resty-healthcheck-1.0.0-2.rockspec ├── lua-resty-healthcheck-1.1.0-1.rockspec ├── lua-resty-healthcheck-1.1.0-2.rockspec ├── lua-resty-healthcheck-1.1.1-1.rockspec ├── lua-resty-healthcheck-1.1.1-2.rockspec ├── lua-resty-healthcheck-1.1.2-1.rockspec ├── lua-resty-healthcheck-1.1.2-2.rockspec ├── lua-resty-healthcheck-1.2.0-1.rockspec ├── lua-resty-healthcheck-1.2.0-2.rockspec ├── lua-resty-healthcheck-1.3.0-1.rockspec ├── lua-resty-healthcheck-1.3.0-2.rockspec ├── lua-resty-healthcheck-1.4.0-1.rockspec ├── lua-resty-healthcheck-1.4.1-1.rockspec ├── lua-resty-healthcheck-1.4.2-1.rockspec ├── lua-resty-healthcheck-1.5.0-1.rockspec ├── lua-resty-healthcheck-1.5.1-1.rockspec ├── lua-resty-healthcheck-1.6.0-1.rockspec ├── lua-resty-healthcheck-1.6.1-1.rockspec ├── lua-resty-healthcheck-1.6.2-1.rockspec ├── lua-resty-healthcheck-1.6.3-1.rockspec ├── lua-resty-healthcheck-2.0.0-1.rockspec ├── lua-resty-healthcheck-3.0.0-1.rockspec ├── lua-resty-healthcheck-3.0.1-1.rockspec ├── lua-resty-healthcheck-3.0.2-1.rockspec └── lua-resty-healthcheck-3.1.0-1.rockspec ├── t ├── with_resty-events │ ├── 00-new.t │ ├── 01-start-stop.t │ ├── 02-add_target.t │ ├── 03-get_target_status.t │ ├── 04-report_success.t │ ├── 05-report_failure.t │ ├── 06-report_http_status.t │ ├── 07-report_tcp_failure.t │ ├── 08-report_timeout.t │ ├── 09-active_probes.t │ ├── 10-garbagecollect.t_disabled │ ├── 11-clear.t │ ├── 12-set_target_status.t │ ├── 13-integration.t_disabled │ ├── 14-tls_active_probes.t │ ├── 15-get_virtualhost_target_status.t │ ├── 16-set_all_target_statuses_for_hostname.t │ ├── 17-mtls.t │ ├── 18-req-headers.t │ ├── 19-timer.t │ └── util │ │ ├── cert.pem │ │ ├── key.pem │ │ └── reindex └── with_worker-events │ ├── 00-new.t │ ├── 01-start-stop.t │ ├── 02-add_target.t │ ├── 03-get_target_status.t │ ├── 04-report_success.t │ ├── 05-report_failure.t │ ├── 06-report_http_status.t │ ├── 07-report_tcp_failure.t │ ├── 08-report_timeout.t │ ├── 09-active_probes.t │ ├── 10-garbagecollect.t │ ├── 11-clear.t │ ├── 12-set_target_status.t │ ├── 13-integration.t │ ├── 14-tls_active_probes.t │ ├── 15-get_virtualhost_target_status.t │ ├── 16-set_all_target_statuses_for_hostname.t │ ├── 17-mtls.t │ ├── 18-req-headers.t │ ├── 19-timer.t │ ├── 20-event_handler.t │ ├── 21-run_locked.t │ └── util │ ├── cert.pem │ ├── key.pem │ └── reindex └── valgrind.suppress /.editorconfig: -------------------------------------------------------------------------------- 1 | # See http://editorconfig.org 2 | root = true 3 | 4 | [*] 5 | end_of_line = lf 6 | insert_final_newline = true 7 | trim_trailing_whitespace = true 8 | charset = utf-8 9 | 10 | [*.lua] 11 | indent_style = space 12 | indent_size = 2 13 | 14 | [Makefile] 15 | indent_style = tab 16 | -------------------------------------------------------------------------------- /.gitattributes: -------------------------------------------------------------------------------- 1 | *.t linguist-language=Text 2 | -------------------------------------------------------------------------------- /.github/workflows/build_and_test_with_resty_events.yml: -------------------------------------------------------------------------------- 1 | name: Build and test - with resty_events 2 | 3 | concurrency: 4 | # for PR's cancel the running task, if another commit is pushed 5 | group: ${{ github.workflow }} ${{ github.ref }} 6 | cancel-in-progress: ${{ github.event_name == 'pull_request' }} 7 | 8 | on: 9 | pull_request: {} 10 | workflow_dispatch: {} 11 | push: 12 | branches: 13 | - main 14 | - master 15 | - release/** 16 | 17 | jobs: 18 | build: 19 | name: CI using lua-resty-events 20 | runs-on: ubuntu-20.04 21 | strategy: 22 | matrix: 23 | openresty-version: [1.19.9.1, 1.21.4.3, 1.25.3.1] 24 | 25 | steps: 26 | - name: Update and install OS dependencies 27 | run: | 28 | sudo apt-get update && sudo apt-get install -y libssl-dev ssl-cert 29 | sudo systemctl disable nginx 30 | sudo systemctl stop nginx 31 | 32 | 33 | - name: Set environment variables 34 | env: 35 | OPENRESTY_VER: ${{ matrix.openresty-version }} 36 | RESTY_EVENTS_VER: 0.3.0 37 | LUAROCKS_VER: 3.9.0 38 | OPENSSL_VER: 1.1.1q 39 | PCRE_VER: 8.45 40 | run: | 41 | echo "INSTALL_ROOT=/home/runner/work/cache/install-root" >> $GITHUB_ENV 42 | echo "DOWNLOAD_ROOT=/home/runner/work/cache/download-root" >> $GITHUB_ENV 43 | echo "OPENRESTY=$OPENRESTY_VER" >> $GITHUB_ENV 44 | echo "LUAROCKS=$LUAROCKS_VER" >> $GITHUB_ENV 45 | echo "OPENSSL=$OPENSSL_VER" >> $GITHUB_ENV 46 | echo "PCRE=$PCRE_VER" >> $GITHUB_ENV 47 | echo "RESTY_EVENTS=$RESTY_EVENTS_VER" >> $GITHUB_ENV 48 | echo "LD_LIBRARY_PATH=$HOME/install-root/lib:$LD_LIBRARY_PATH" >> $GITHUB_ENV 49 | 50 | - name: Checkout lua-resty-healthcheck 51 | uses: actions/checkout@v3 52 | 53 | - name: Lookup build cache 54 | uses: actions/cache@v3 55 | id: cache-deps 56 | with: 57 | path: | 58 | /home/runner/work/cache/install-root 59 | /home/runner/work/cache/download-root 60 | key: ${{ runner.os }}-${{ hashFiles('**/.github/workflows/build_and_test_with_resty_events.yml') }}-${{ matrix.openresty-version }} 61 | 62 | - name: Add to Path 63 | run: echo "$INSTALL_ROOT/bin:$INSTALL_ROOT/nginx/sbin:$INSTALL_ROOT/luajit/bin:/usr/bin" >> $GITHUB_PATH 64 | 65 | - name: Build and install OpenSSL 66 | run: | 67 | curl -sSLO https://www.openssl.org/source/openssl-$OPENSSL.tar.gz 68 | tar -xzf openssl-$OPENSSL.tar.gz 69 | cd openssl-$OPENSSL 70 | ./config -g shared -DPURIFY no-threads --prefix=$INSTALL_ROOT --openssldir=$INSTALL_ROOT no-unit-test 71 | make 72 | make install_sw 73 | 74 | - name: Checkout lua-resty-events 75 | uses: actions/checkout@v3 76 | with: 77 | repository: Kong/lua-resty-events 78 | ref: refs/tags/0.3.0 79 | path: lua-resty-events 80 | 81 | - name: Build and install OpenResty 82 | run: | 83 | curl -sSLO https://openresty.org/download/openresty-$OPENRESTY.tar.gz 84 | tar -xzf openresty-$OPENRESTY.tar.gz 85 | cd openresty-$OPENRESTY 86 | ./configure \ 87 | --prefix=$INSTALL_ROOT \ 88 | --with-cc-opt='-I$INSTALL_ROOT/include' \ 89 | --with-ld-opt='-L$INSTALL_ROOT/lib -Wl,-rpath,$INSTALL_ROOT/lib' \ 90 | --with-pcre-jit \ 91 | --with-http_ssl_module \ 92 | --with-http_realip_module \ 93 | --with-http_stub_status_module \ 94 | --with-http_v2_module \ 95 | --without-http_encrypted_session_module \ 96 | --with-stream_realip_module \ 97 | --with-stream_ssl_preread_module \ 98 | --add-module=../lua-resty-events \ 99 | --with-pcre 100 | make 101 | make install 102 | make install LUA_LIBDIR=$INSTALL_ROOT/lualib 103 | 104 | - name: Install LuaRocks 105 | run: | 106 | curl -sSLO https://luarocks.org/releases/luarocks-$LUAROCKS.tar.gz 107 | tar -xzf luarocks-$LUAROCKS.tar.gz 108 | cd luarocks-$LUAROCKS 109 | ./configure \ 110 | --prefix=$INSTALL_ROOT \ 111 | --lua-suffix=jit \ 112 | --with-lua=$INSTALL_ROOT/luajit \ 113 | --with-lua-include=$INSTALL_ROOT/luajit/include/luajit-2.1 114 | make build 115 | make install 116 | 117 | - name: Install manual dependencies 118 | run: | 119 | luarocks install luacheck 120 | 121 | - name: Install Test::NGINX 122 | run: | 123 | sudo apt-get install cpanminus 124 | cpanm --notest --local-lib=$HOME/perl5 local::lib && eval $(perl -I $HOME/perl5/lib/perl5/ -Mlocal::lib) 125 | cpanm --notest Test::Nginx 126 | 127 | - name: Install lua-resty-events 128 | run: | 129 | cd lua-resty-events 130 | OPENRESTY_PREFIX=$INSTALL_ROOT PREFIX=$INSTALL_ROOT LUA_LIB_DIR=$INSTALL_ROOT/lualib make install 131 | 132 | - name: Install lua-resty-healthcheck 133 | run: luarocks make 134 | 135 | - name: Run tests 136 | env: 137 | PATH: ${{ env.INSTALL_ROOT }}/bin:${{ env.INSTALL_ROOT }}/nginx/sbin:${{ env.INSTALL_ROOT }}/luajit/bin:/usr/bin 138 | TEST_NGINX_BINARY: ${{ env.INSTALL_ROOT }}/nginx/sbin/nginx 139 | run: | 140 | eval `luarocks path` 141 | eval $(perl -I $HOME/perl5/lib/perl5/ -Mlocal::lib) 142 | TEST_NGINX_TIMEOUT=4 TEST_NGINX_RANDOMIZE=1 prove -I. -r t/with_resty-events 143 | -------------------------------------------------------------------------------- /.github/workflows/build_and_test_with_worker_events.yml: -------------------------------------------------------------------------------- 1 | name: Build and test - with worker_events 2 | 3 | concurrency: 4 | # for PR's cancel the running task, if another commit is pushed 5 | group: ${{ github.workflow }} ${{ github.ref }} 6 | cancel-in-progress: ${{ github.event_name == 'pull_request' }} 7 | 8 | on: 9 | pull_request: {} 10 | workflow_dispatch: {} 11 | push: 12 | branches: 13 | - main 14 | - master 15 | 16 | jobs: 17 | build: 18 | name: CI using lua-resty-worker-events 19 | runs-on: ubuntu-20.04 20 | strategy: 21 | matrix: 22 | openresty-version: [1.21.4.3] 23 | 24 | steps: 25 | - name: Update and install OS dependencies 26 | run: | 27 | sudo apt-get update && sudo apt-get install -y libssl-dev ssl-cert 28 | sudo systemctl disable nginx 29 | sudo systemctl stop nginx 30 | 31 | 32 | - name: Set environment variables 33 | env: 34 | OPENRESTY_VER: ${{ matrix.openresty-version }} 35 | run: | 36 | echo "/usr/local/openresty/nginx/sbin" >> $GITHUB_PATH 37 | 38 | - name: Checkout lua-resty-healthcheck 39 | uses: actions/checkout@v3 40 | 41 | - name: Install OpenResty ${{ matrix.openresty-version }} 42 | env: 43 | OPENRESTY_VER: ${{ matrix.openresty-version }} 44 | run: | 45 | sudo apt-get -y install --no-install-recommends wget gnupg ca-certificates 46 | wget -O - https://openresty.org/package/pubkey.gpg | sudo apt-key add - 47 | echo "deb http://openresty.org/package/ubuntu $(lsb_release -sc) main" | sudo tee /etc/apt/sources.list.d/openresty.list 48 | sudo apt-get update 49 | sudo apt-get -y install openresty=$OPENRESTY_VER-1~focal1 50 | 51 | - name: Install LuaRocks 52 | run: sudo apt-get install -y luarocks 53 | 54 | - name: Install manual dependencies 55 | run: | 56 | sudo luarocks install luacheck 57 | sudo luarocks install lua-resty-worker-events 1.0.0 58 | 59 | - name: Install Test::NGINX 60 | run: | 61 | sudo apt-get install cpanminus 62 | cpanm --notest --local-lib=$HOME/perl5 local::lib && eval $(perl -I $HOME/perl5/lib/perl5/ -Mlocal::lib) 63 | cpanm --notest Test::Nginx 64 | 65 | - name: Checkout lua-resty-healthcheck 66 | uses: actions/checkout@v3 67 | 68 | - name: Install lua-resty-healthcheck 69 | run: sudo luarocks make 70 | 71 | - name: Run tests 72 | run: | 73 | eval `luarocks path` 74 | eval $(perl -I $HOME/perl5/lib/perl5/ -Mlocal::lib) 75 | TEST_NGINX_TIMEOUT=4 TEST_NGINX_RANDOMIZE=1 prove -I. -r t/with_worker-events 76 | -------------------------------------------------------------------------------- /.github/workflows/lint.yml: -------------------------------------------------------------------------------- 1 | name: Lint 2 | 3 | concurrency: 4 | # for PR's cancel the running task, if another commit is pushed 5 | group: ${{ github.workflow }} ${{ github.ref }} 6 | cancel-in-progress: ${{ github.event_name == 'pull_request' }} 7 | 8 | on: 9 | pull_request: {} 10 | workflow_dispatch: {} 11 | push: 12 | branches: 13 | - main 14 | - master 15 | 16 | concurrency: 17 | group: ${{ github.workflow }}-${{ github.ref }} 18 | cancel-in-progress: ${{ github.event_name == 'pull_request' }} 19 | 20 | jobs: 21 | lua-check: 22 | name: Lua Check 23 | runs-on: ubuntu-latest 24 | permissions: 25 | contents: read 26 | issues: read 27 | checks: write 28 | pull-requests: write 29 | if: (github.actor != 'dependabot[bot]') 30 | 31 | steps: 32 | - name: Checkout source code 33 | uses: actions/checkout@v3 34 | 35 | # Optional step to run on only changed files 36 | - name: Get changed files 37 | id: changed-files 38 | uses: kong/changed-files@4edd678ac3f81e2dc578756871e4d00c19191daf 39 | with: 40 | files: | 41 | **.lua 42 | 43 | - name: Lua Check 44 | if: steps.changed-files.outputs.any_changed == 'true' 45 | uses: Kong/public-shared-actions/code-check-actions/lua-lint@c03e30a36e8a2dde5cbd463229a96aaad7ccad24 46 | with: 47 | additional_args: '--no-default-config --config .luacheckrc' 48 | files: ${{ steps.changed-files.outputs.all_changed_files }} 49 | -------------------------------------------------------------------------------- /.github/workflows/sast.yml: -------------------------------------------------------------------------------- 1 | name: SAST 2 | 3 | concurrency: 4 | # for PR's cancel the running task, if another commit is pushed 5 | group: ${{ github.workflow }} ${{ github.ref }} 6 | cancel-in-progress: ${{ github.event_name == 'pull_request' }} 7 | 8 | on: 9 | pull_request: 10 | paths: 11 | - lib/**.lua 12 | push: 13 | branches: 14 | - master 15 | - main 16 | paths: 17 | - lib/**.lua 18 | workflow_dispatch: {} 19 | 20 | 21 | jobs: 22 | semgrep: 23 | name: Semgrep SAST 24 | runs-on: ubuntu-latest 25 | permissions: 26 | # required for all workflows 27 | security-events: write 28 | # only required for workflows in private repositories 29 | actions: read 30 | contents: read 31 | 32 | if: (github.actor != 'dependabot[bot]') 33 | 34 | steps: 35 | - uses: actions/checkout@v3 36 | - uses: Kong/public-shared-actions/security-actions/semgrep@c03e30a36e8a2dde5cbd463229a96aaad7ccad24 37 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | *.swp 2 | *.swo 3 | *~ 4 | go 5 | t/servroot/ 6 | nginx 7 | ctags 8 | tags 9 | a.lua 10 | .DS_Store 11 | -------------------------------------------------------------------------------- /.luacheckrc: -------------------------------------------------------------------------------- 1 | std = "ngx_lua" 2 | unused_args = false 3 | redefined = false 4 | max_line_length = false 5 | 6 | 7 | not_globals = { 8 | "string.len", 9 | "table.getn", 10 | } 11 | 12 | 13 | ignore = { 14 | "6.", -- ignore whitespace warnings 15 | } 16 | -------------------------------------------------------------------------------- /Makefile: -------------------------------------------------------------------------------- 1 | OPENRESTY_PREFIX=/usr/local/openresty 2 | 3 | PREFIX ?= /usr/local 4 | LUA_INCLUDE_DIR ?= $(PREFIX)/include 5 | LUA_LIB_DIR ?= $(PREFIX)/lib/lua/$(LUA_VERSION) 6 | INSTALL ?= install 7 | 8 | .PHONY: all test install 9 | 10 | all: ; 11 | 12 | install: all 13 | $(INSTALL) -d $(DESTDIR)/$(LUA_LIB_DIR)/resty/ 14 | $(INSTALL) lib/resty/*.lua $(DESTDIR)/$(LUA_LIB_DIR)/resty/ 15 | 16 | test: all 17 | PATH=$(OPENRESTY_PREFIX)/nginx/sbin:$$PATH prove -I../test-nginx/lib -r t 18 | 19 | -------------------------------------------------------------------------------- /config.ld: -------------------------------------------------------------------------------- 1 | project='lua-resty-healthcheck' 2 | title='Healthcheck library for OpenResty' 3 | description='Provides active and passive healthchecks (http and tcp) for OpenResty' 4 | format='discount' 5 | file='./lib/' 6 | dir='docs' 7 | readme='README.md' 8 | sort=true 9 | sort_modules=true 10 | all=false 11 | style='./docs/' 12 | -------------------------------------------------------------------------------- /docs/ldoc.css: -------------------------------------------------------------------------------- 1 | body { 2 | color: #47555c; 3 | font-size: 16px; 4 | font-family: "Open Sans", sans-serif; 5 | margin: 0; 6 | background: #eff4ff; 7 | } 8 | 9 | a:link { color: #008fee; } 10 | a:visited { color: #008fee; } 11 | a:hover { color: #22a7ff; } 12 | 13 | h1 { font-size:26px; font-weight: normal; } 14 | h2 { font-size:22px; font-weight: normal; } 15 | h3 { font-size:18px; font-weight: normal; } 16 | h4 { font-size:16px; font-weight: bold; } 17 | 18 | hr { 19 | height: 1px; 20 | background: #c1cce4; 21 | border: 0px; 22 | margin: 15px 0; 23 | } 24 | 25 | code, tt { 26 | font-family: monospace; 27 | } 28 | span.parameter { 29 | font-family: monospace; 30 | font-weight: bold; 31 | color: rgb(99, 115, 131); 32 | } 33 | span.parameter:after { 34 | content:":"; 35 | } 36 | span.types:before { 37 | content:"("; 38 | } 39 | span.types:after { 40 | content:")"; 41 | } 42 | .type { 43 | font-weight: bold; font-style:italic 44 | } 45 | 46 | p.name { 47 | font-family: "Andale Mono", monospace; 48 | } 49 | 50 | #navigation { 51 | float: left; 52 | background-color: white; 53 | border-right: 1px solid #d3dbec; 54 | border-bottom: 1px solid #d3dbec; 55 | 56 | width: 14em; 57 | vertical-align: top; 58 | overflow: visible; 59 | } 60 | 61 | #navigation br { 62 | display: none; 63 | } 64 | 65 | #navigation h1 { 66 | background-color: white; 67 | border-bottom: 1px solid #d3dbec; 68 | padding: 15px; 69 | margin-top: 0px; 70 | margin-bottom: 0px; 71 | } 72 | 73 | #navigation h2 { 74 | font-size: 18px; 75 | background-color: white; 76 | border-bottom: 1px solid #d3dbec; 77 | padding-left: 15px; 78 | padding-right: 15px; 79 | padding-top: 10px; 80 | padding-bottom: 10px; 81 | margin-top: 30px; 82 | margin-bottom: 0px; 83 | } 84 | 85 | #content h1 { 86 | background-color: #2c3e67; 87 | color: white; 88 | padding: 15px; 89 | margin: 0px; 90 | } 91 | 92 | #content h2 { 93 | background-color: #6c7ea7; 94 | color: white; 95 | padding: 15px; 96 | padding-top: 15px; 97 | padding-bottom: 15px; 98 | margin-top: 0px; 99 | } 100 | 101 | #content h2 a { 102 | background-color: #6c7ea7; 103 | color: white; 104 | text-decoration: none; 105 | } 106 | 107 | #content h2 a:hover { 108 | text-decoration: underline; 109 | } 110 | 111 | #content h3 { 112 | font-style: italic; 113 | padding-top: 15px; 114 | padding-bottom: 4px; 115 | margin-right: 15px; 116 | margin-left: 15px; 117 | margin-bottom: 5px; 118 | border-bottom: solid 1px #bcd; 119 | } 120 | 121 | #content h4 { 122 | margin-right: 15px; 123 | margin-left: 15px; 124 | border-bottom: solid 1px #bcd; 125 | } 126 | 127 | #content pre { 128 | margin: 15px; 129 | } 130 | 131 | pre { 132 | background-color: rgb(50, 55, 68); 133 | color: white; 134 | border-radius: 3px; 135 | /* border: 1px solid #C0C0C0; /* silver */ 136 | padding: 15px; 137 | overflow: auto; 138 | font-family: "Andale Mono", monospace; 139 | } 140 | 141 | #content ul pre.example { 142 | margin-left: 0px; 143 | } 144 | 145 | table.index { 146 | /* border: 1px #00007f; */ 147 | } 148 | table.index td { text-align: left; vertical-align: top; } 149 | 150 | #navigation ul 151 | { 152 | font-size:1em; 153 | list-style-type: none; 154 | margin: 1px 1px 10px 1px; 155 | padding-left: 20px; 156 | } 157 | 158 | #navigation li { 159 | text-indent: -1em; 160 | display: block; 161 | margin: 3px 0px 0px 22px; 162 | } 163 | 164 | #navigation li li a { 165 | margin: 0px 3px 0px -1em; 166 | } 167 | 168 | #content { 169 | margin-left: 14em; 170 | } 171 | 172 | #content p { 173 | padding-left: 15px; 174 | padding-right: 15px; 175 | } 176 | 177 | #content table { 178 | padding-left: 15px; 179 | padding-right: 15px; 180 | background-color: white; 181 | } 182 | 183 | #content p, #content table, #content ol, #content ul, #content dl { 184 | max-width: 900px; 185 | } 186 | 187 | #about { 188 | padding: 15px; 189 | padding-left: 16em; 190 | background-color: white; 191 | border-top: 1px solid #d3dbec; 192 | border-bottom: 1px solid #d3dbec; 193 | } 194 | 195 | table.module_list, table.function_list { 196 | border-width: 1px; 197 | border-style: solid; 198 | border-color: #cccccc; 199 | border-collapse: collapse; 200 | margin: 15px; 201 | } 202 | table.module_list td, table.function_list td { 203 | border-width: 1px; 204 | padding-left: 10px; 205 | padding-right: 10px; 206 | padding-top: 5px; 207 | padding-bottom: 5px; 208 | border: solid 1px rgb(193, 204, 228); 209 | } 210 | table.module_list td.name, table.function_list td.name { 211 | background-color: white; min-width: 200px; border-right-width: 0px; 212 | } 213 | table.module_list td.summary, table.function_list td.summary { 214 | background-color: white; width: 100%; border-left-width: 0px; 215 | } 216 | 217 | dl.function { 218 | margin-right: 15px; 219 | margin-left: 15px; 220 | border-bottom: solid 1px rgb(193, 204, 228); 221 | border-left: solid 1px rgb(193, 204, 228); 222 | border-right: solid 1px rgb(193, 204, 228); 223 | background-color: white; 224 | } 225 | 226 | dl.function dt { 227 | color: rgb(99, 123, 188); 228 | font-family: monospace; 229 | border-top: solid 1px rgb(193, 204, 228); 230 | padding: 15px; 231 | } 232 | 233 | dl.function dd { 234 | margin-left: 15px; 235 | margin-right: 15px; 236 | margin-top: 5px; 237 | margin-bottom: 15px; 238 | } 239 | 240 | #content dl.function dd h3 { 241 | margin-top: 0px; 242 | margin-left: 0px; 243 | padding-left: 0px; 244 | font-size: 16px; 245 | color: rgb(128, 128, 128); 246 | border-bottom: solid 1px #def; 247 | } 248 | 249 | #content dl.function dd ul, #content dl.function dd ol { 250 | padding: 0px; 251 | padding-left: 15px; 252 | list-style-type: none; 253 | } 254 | 255 | ul.nowrap { 256 | overflow:auto; 257 | white-space:nowrap; 258 | } 259 | 260 | .section-description { 261 | padding-left: 15px; 262 | padding-right: 15px; 263 | } 264 | 265 | /* stop sublists from having initial vertical space */ 266 | ul ul { margin-top: 0px; } 267 | ol ul { margin-top: 0px; } 268 | ol ol { margin-top: 0px; } 269 | ul ol { margin-top: 0px; } 270 | 271 | /* make the target distinct; helps when we're navigating to a function */ 272 | a:target + * { 273 | background-color: #FF9; 274 | } 275 | 276 | 277 | /* styles for prettification of source */ 278 | pre .comment { color: #bbccaa; } 279 | pre .constant { color: #a8660d; } 280 | pre .escape { color: #844631; } 281 | pre .keyword { color: #ffc090; font-weight: bold; } 282 | pre .library { color: #0e7c6b; } 283 | pre .marker { color: #512b1e; background: #fedc56; font-weight: bold; } 284 | pre .string { color: #8080ff; } 285 | pre .number { color: #f8660d; } 286 | pre .operator { color: #2239a8; font-weight: bold; } 287 | pre .preprocessor, pre .prepro { color: #a33243; } 288 | pre .global { color: #c040c0; } 289 | pre .user-keyword { color: #800080; } 290 | pre .prompt { color: #558817; } 291 | pre .url { color: #272fc2; text-decoration: underline; } 292 | -------------------------------------------------------------------------------- /docs/modules/resty.healthcheck.utils.html: -------------------------------------------------------------------------------- 1 | 3 | 4 | 5 | 6 | Healthcheck library for OpenResty 7 | 8 | 9 | 10 | 11 |
12 | 13 |
14 | 15 |
16 |
17 |
18 | 19 | 20 |
21 | 22 | 23 | 24 | 25 | 50 | 51 |
52 | 53 |

Module resty.healthcheck.utils

54 |

Utilities used by the healthcheck library.

55 |

Code originally from https://github.com/Kong/lua-resty-dns-client

56 |

Info:

57 |
    58 |
  • Copyright: 2017 Kong Inc.
  • 59 |
  • License: Apache 2.0
  • 60 |
  • Author: Hisham Muhammad, Thijs Schreijer
  • 61 |
62 | 63 | 64 |

Functions

65 | 66 | 67 | 68 | 69 | 70 |
gctimer (t, cb, self, ...)A garbage-collectible timer implementation.
71 | 72 |
73 |
74 | 75 | 76 |

Functions

77 | 78 |
79 |
80 | 81 | gctimer (t, cb, self, ...) 82 |
83 |
84 | A garbage-collectible timer implementation. 85 | Provides a timer that can be attached to an object, and GC'ed along 86 | with that object, as opposed to regular timers that keep running and 87 | prevent the object from being GC'ed. 88 | 89 | 90 |

Parameters:

91 |
    92 |
  • t 93 | time in ms 94 |
  • 95 |
  • cb 96 | callback for the timer. The signature is function(premature, self, ...) 97 |
  • 98 |
  • self 99 | the object the timer belongs to 100 |
  • 101 |
  • ... 102 | any additional parameters to pass to the timer callback 103 |
  • 104 |
105 | 106 |

Returns:

107 |
    108 | 109 | timer handle (same as ngx.timer.at) 110 |
111 | 112 | 113 | 114 | 115 |
116 |
117 | 118 | 119 |
120 |
121 |
122 | generated by LDoc 1.4.6 123 | Last updated 2020-06-17 13:10:48 124 |
125 |
126 | 127 | 128 | -------------------------------------------------------------------------------- /lua-resty-healthcheck-scm-1.rockspec: -------------------------------------------------------------------------------- 1 | package = "lua-resty-healthcheck" 2 | version = "scm-1" 3 | source = { 4 | url = "git://github.com/Kong/lua-resty-healthcheck", 5 | } 6 | description = { 7 | summary = "Healthchecks for OpenResty to check upstream service status", 8 | detailed = [[ 9 | lua-resty-healthcheck is a module that can check upstream service 10 | availability by sending requests and validating responses at timed 11 | intervals. 12 | ]], 13 | license = "Apache 2.0", 14 | homepage = "https://github.com/Kong/lua-resty-healthcheck" 15 | } 16 | dependencies = { 17 | "penlight >= 1.9.2", 18 | "lua-resty-timer ~> 1", 19 | } 20 | build = { 21 | type = "builtin", 22 | modules = { 23 | ["resty.healthcheck"] = "lib/resty/healthcheck.lua", 24 | } 25 | } 26 | -------------------------------------------------------------------------------- /rockspecs/lua-resty-healthcheck-0.1.0-1.rockspec: -------------------------------------------------------------------------------- 1 | package = "lua-resty-healthcheck" 2 | version = "0.1.0-1" 3 | source = { 4 | url = "https://github.com/Kong/lua-resty-healthcheck/archive/0.1.0.tar.gz", 5 | dir = "lua-resty-healthcheck-0.1.0" 6 | } 7 | description = { 8 | summary = "Healthchecks for OpenResty to check upstream service status", 9 | detailed = [[ 10 | lua-resty-healthcheck is a module that can check upstream service 11 | availability by sending requests and validating responses at timed 12 | intervals. 13 | ]], 14 | license = "Apache 2.0", 15 | homepage = "https://github.com/Kong/lua-resty-healthcheck" 16 | } 17 | dependencies = { 18 | "lua-resty-worker-events == 0.3.1", 19 | } 20 | build = { 21 | type = "builtin", 22 | modules = { 23 | ["resty.healthcheck"] = "lib/resty/healthcheck.lua", 24 | ["resty.healthcheck.utils"] = "lib/resty/healthcheck/utils.lua", 25 | } 26 | } 27 | -------------------------------------------------------------------------------- /rockspecs/lua-resty-healthcheck-0.2.0-1.rockspec: -------------------------------------------------------------------------------- 1 | package = "lua-resty-healthcheck" 2 | version = "0.2.0-1" 3 | source = { 4 | url = "https://github.com/Kong/lua-resty-healthcheck/archive/0.2.0.tar.gz", 5 | dir = "lua-resty-healthcheck-0.2.0" 6 | } 7 | description = { 8 | summary = "Healthchecks for OpenResty to check upstream service status", 9 | detailed = [[ 10 | lua-resty-healthcheck is a module that can check upstream service 11 | availability by sending requests and validating responses at timed 12 | intervals. 13 | ]], 14 | license = "Apache 2.0", 15 | homepage = "https://github.com/Kong/lua-resty-healthcheck" 16 | } 17 | dependencies = { 18 | "lua-resty-worker-events == 0.3.1", 19 | } 20 | build = { 21 | type = "builtin", 22 | modules = { 23 | ["resty.healthcheck"] = "lib/resty/healthcheck.lua", 24 | ["resty.healthcheck.utils"] = "lib/resty/healthcheck/utils.lua", 25 | } 26 | } 27 | -------------------------------------------------------------------------------- /rockspecs/lua-resty-healthcheck-0.3.0-1.rockspec: -------------------------------------------------------------------------------- 1 | package = "lua-resty-healthcheck" 2 | version = "0.3.0-1" 3 | source = { 4 | url = "https://github.com/Kong/lua-resty-healthcheck/archive/0.3.0.tar.gz", 5 | dir = "lua-resty-healthcheck-0.3.0" 6 | } 7 | description = { 8 | summary = "Healthchecks for OpenResty to check upstream service status", 9 | detailed = [[ 10 | lua-resty-healthcheck is a module that can check upstream service 11 | availability by sending requests and validating responses at timed 12 | intervals. 13 | ]], 14 | license = "Apache 2.0", 15 | homepage = "https://github.com/Kong/lua-resty-healthcheck" 16 | } 17 | dependencies = { 18 | "lua-resty-worker-events == 0.3.1", 19 | } 20 | build = { 21 | type = "builtin", 22 | modules = { 23 | ["resty.healthcheck"] = "lib/resty/healthcheck.lua", 24 | ["resty.healthcheck.utils"] = "lib/resty/healthcheck/utils.lua", 25 | } 26 | } 27 | -------------------------------------------------------------------------------- /rockspecs/lua-resty-healthcheck-0.4.0-1.rockspec: -------------------------------------------------------------------------------- 1 | package = "lua-resty-healthcheck" 2 | version = "0.4.0-1" 3 | source = { 4 | url = "https://github.com/Kong/lua-resty-healthcheck/archive/0.4.0.tar.gz", 5 | tag = "0.4.0", 6 | dir = "lua-resty-healthcheck-0.4.0" 7 | } 8 | description = { 9 | summary = "Healthchecks for OpenResty to check upstream service status", 10 | detailed = [[ 11 | lua-resty-healthcheck is a module that can check upstream service 12 | availability by sending requests and validating responses at timed 13 | intervals. 14 | ]], 15 | homepage = "https://github.com/Kong/lua-resty-healthcheck", 16 | license = "Apache 2.0" 17 | } 18 | dependencies = { 19 | "lua-resty-worker-events == 0.3.1" 20 | } 21 | build = { 22 | type = "builtin", 23 | modules = { 24 | ["resty.healthcheck"] = "lib/resty/healthcheck.lua", 25 | ["resty.healthcheck.utils"] = "lib/resty/healthcheck/utils.lua" 26 | } 27 | } 28 | -------------------------------------------------------------------------------- /rockspecs/lua-resty-healthcheck-0.4.1-1.rockspec: -------------------------------------------------------------------------------- 1 | package = "lua-resty-healthcheck" 2 | version = "0.4.1-1" 3 | source = { 4 | url = "https://github.com/Kong/lua-resty-healthcheck/archive/0.4.1.tar.gz", 5 | tag = "0.4.1", 6 | dir = "lua-resty-healthcheck-0.4.1" 7 | } 8 | description = { 9 | summary = "Healthchecks for OpenResty to check upstream service status", 10 | detailed = [[ 11 | lua-resty-healthcheck is a module that can check upstream service 12 | availability by sending requests and validating responses at timed 13 | intervals. 14 | ]], 15 | homepage = "https://github.com/Kong/lua-resty-healthcheck", 16 | license = "Apache 2.0" 17 | } 18 | dependencies = { 19 | "lua-resty-worker-events >= 0.3.2" 20 | } 21 | build = { 22 | type = "builtin", 23 | modules = { 24 | ["resty.healthcheck"] = "lib/resty/healthcheck.lua", 25 | ["resty.healthcheck.utils"] = "lib/resty/healthcheck/utils.lua" 26 | } 27 | } 28 | -------------------------------------------------------------------------------- /rockspecs/lua-resty-healthcheck-0.4.1-2.rockspec: -------------------------------------------------------------------------------- 1 | package = "lua-resty-healthcheck" 2 | version = "0.4.1-2" 3 | source = { 4 | url = "https://github.com/Kong/lua-resty-healthcheck/archive/0.4.1.tar.gz", 5 | tag = "0.4.1", 6 | dir = "lua-resty-healthcheck-0.4.1" 7 | } 8 | description = { 9 | summary = "Healthchecks for OpenResty to check upstream service status", 10 | detailed = [[ 11 | lua-resty-healthcheck is a module that can check upstream service 12 | availability by sending requests and validating responses at timed 13 | intervals. 14 | ]], 15 | homepage = "https://github.com/Kong/lua-resty-healthcheck", 16 | license = "Apache 2.0" 17 | } 18 | dependencies = { 19 | "lua-resty-worker-events >= 0.3.2, < 2.0", 20 | } 21 | build = { 22 | type = "builtin", 23 | modules = { 24 | ["resty.healthcheck"] = "lib/resty/healthcheck.lua", 25 | ["resty.healthcheck.utils"] = "lib/resty/healthcheck/utils.lua" 26 | } 27 | } 28 | -------------------------------------------------------------------------------- /rockspecs/lua-resty-healthcheck-0.4.2-1.rockspec: -------------------------------------------------------------------------------- 1 | package = "lua-resty-healthcheck" 2 | version = "0.4.2-1" 3 | source = { 4 | url = "https://github.com/Kong/lua-resty-healthcheck/archive/0.4.2.tar.gz", 5 | tag = "0.4.2", 6 | dir = "lua-resty-healthcheck-0.4.2" 7 | } 8 | description = { 9 | summary = "Healthchecks for OpenResty to check upstream service status", 10 | detailed = [[ 11 | lua-resty-healthcheck is a module that can check upstream service 12 | availability by sending requests and validating responses at timed 13 | intervals. 14 | ]], 15 | homepage = "https://github.com/Kong/lua-resty-healthcheck", 16 | license = "Apache 2.0" 17 | } 18 | dependencies = { 19 | "lua-resty-worker-events >= 0.3.2" 20 | } 21 | build = { 22 | type = "builtin", 23 | modules = { 24 | ["resty.healthcheck"] = "lib/resty/healthcheck.lua", 25 | ["resty.healthcheck.utils"] = "lib/resty/healthcheck/utils.lua" 26 | } 27 | } 28 | -------------------------------------------------------------------------------- /rockspecs/lua-resty-healthcheck-0.4.2-2.rockspec: -------------------------------------------------------------------------------- 1 | package = "lua-resty-healthcheck" 2 | version = "0.4.2-2" 3 | source = { 4 | url = "https://github.com/Kong/lua-resty-healthcheck/archive/0.4.2.tar.gz", 5 | tag = "0.4.2", 6 | dir = "lua-resty-healthcheck-0.4.2" 7 | } 8 | description = { 9 | summary = "Healthchecks for OpenResty to check upstream service status", 10 | detailed = [[ 11 | lua-resty-healthcheck is a module that can check upstream service 12 | availability by sending requests and validating responses at timed 13 | intervals. 14 | ]], 15 | homepage = "https://github.com/Kong/lua-resty-healthcheck", 16 | license = "Apache 2.0" 17 | } 18 | dependencies = { 19 | "lua-resty-worker-events >= 0.3.2, < 2.0", 20 | } 21 | build = { 22 | type = "builtin", 23 | modules = { 24 | ["resty.healthcheck"] = "lib/resty/healthcheck.lua", 25 | ["resty.healthcheck.utils"] = "lib/resty/healthcheck/utils.lua" 26 | } 27 | } 28 | -------------------------------------------------------------------------------- /rockspecs/lua-resty-healthcheck-0.5.0-1.rockspec: -------------------------------------------------------------------------------- 1 | package = "lua-resty-healthcheck" 2 | version = "0.5.0-1" 3 | source = { 4 | url = "https://github.com/Kong/lua-resty-healthcheck/archive/0.5.0.tar.gz", 5 | tag = "0.5.0", 6 | dir = "lua-resty-healthcheck-0.5.0" 7 | } 8 | description = { 9 | summary = "Healthchecks for OpenResty to check upstream service status", 10 | detailed = [[ 11 | lua-resty-healthcheck is a module that can check upstream service 12 | availability by sending requests and validating responses at timed 13 | intervals. 14 | ]], 15 | homepage = "https://github.com/Kong/lua-resty-healthcheck", 16 | license = "Apache 2.0" 17 | } 18 | dependencies = { 19 | "lua-resty-worker-events >= 0.3.2" 20 | } 21 | build = { 22 | type = "builtin", 23 | modules = { 24 | ["resty.healthcheck"] = "lib/resty/healthcheck.lua", 25 | ["resty.healthcheck.utils"] = "lib/resty/healthcheck/utils.lua" 26 | } 27 | } 28 | -------------------------------------------------------------------------------- /rockspecs/lua-resty-healthcheck-0.5.0-2.rockspec: -------------------------------------------------------------------------------- 1 | package = "lua-resty-healthcheck" 2 | version = "0.5.0-2" 3 | source = { 4 | url = "https://github.com/Kong/lua-resty-healthcheck/archive/0.5.0.tar.gz", 5 | tag = "0.5.0", 6 | dir = "lua-resty-healthcheck-0.5.0" 7 | } 8 | description = { 9 | summary = "Healthchecks for OpenResty to check upstream service status", 10 | detailed = [[ 11 | lua-resty-healthcheck is a module that can check upstream service 12 | availability by sending requests and validating responses at timed 13 | intervals. 14 | ]], 15 | homepage = "https://github.com/Kong/lua-resty-healthcheck", 16 | license = "Apache 2.0" 17 | } 18 | dependencies = { 19 | "lua-resty-worker-events >= 0.3.2, < 2.0", 20 | } 21 | build = { 22 | type = "builtin", 23 | modules = { 24 | ["resty.healthcheck"] = "lib/resty/healthcheck.lua", 25 | ["resty.healthcheck.utils"] = "lib/resty/healthcheck/utils.lua" 26 | } 27 | } 28 | -------------------------------------------------------------------------------- /rockspecs/lua-resty-healthcheck-0.6.0-1.rockspec: -------------------------------------------------------------------------------- 1 | package = "lua-resty-healthcheck" 2 | version = "0.6.0-1" 3 | source = { 4 | url = "https://github.com/Kong/lua-resty-healthcheck/archive/0.6.0.tar.gz", 5 | tag = "0.6.0", 6 | dir = "lua-resty-healthcheck-0.6.0" 7 | } 8 | description = { 9 | summary = "Healthchecks for OpenResty to check upstream service status", 10 | detailed = [[ 11 | lua-resty-healthcheck is a module that can check upstream service 12 | availability by sending requests and validating responses at timed 13 | intervals. 14 | ]], 15 | homepage = "https://github.com/Kong/lua-resty-healthcheck", 16 | license = "Apache 2.0" 17 | } 18 | dependencies = { 19 | "lua-resty-worker-events >= 0.3.2" 20 | } 21 | build = { 22 | type = "builtin", 23 | modules = { 24 | ["resty.healthcheck"] = "lib/resty/healthcheck.lua", 25 | ["resty.healthcheck.utils"] = "lib/resty/healthcheck/utils.lua" 26 | } 27 | } 28 | -------------------------------------------------------------------------------- /rockspecs/lua-resty-healthcheck-0.6.0-2.rockspec: -------------------------------------------------------------------------------- 1 | package = "lua-resty-healthcheck" 2 | version = "0.6.0-2" 3 | source = { 4 | url = "https://github.com/Kong/lua-resty-healthcheck/archive/0.6.0.tar.gz", 5 | tag = "0.6.0", 6 | dir = "lua-resty-healthcheck-0.6.0" 7 | } 8 | description = { 9 | summary = "Healthchecks for OpenResty to check upstream service status", 10 | detailed = [[ 11 | lua-resty-healthcheck is a module that can check upstream service 12 | availability by sending requests and validating responses at timed 13 | intervals. 14 | ]], 15 | homepage = "https://github.com/Kong/lua-resty-healthcheck", 16 | license = "Apache 2.0" 17 | } 18 | dependencies = { 19 | "lua-resty-worker-events >= 0.3.2, < 2.0", 20 | } 21 | build = { 22 | type = "builtin", 23 | modules = { 24 | ["resty.healthcheck"] = "lib/resty/healthcheck.lua", 25 | ["resty.healthcheck.utils"] = "lib/resty/healthcheck/utils.lua" 26 | } 27 | } 28 | -------------------------------------------------------------------------------- /rockspecs/lua-resty-healthcheck-0.6.1-1.rockspec: -------------------------------------------------------------------------------- 1 | package = "lua-resty-healthcheck" 2 | version = "0.6.1-1" 3 | source = { 4 | url = "https://github.com/Kong/lua-resty-healthcheck/archive/0.6.1.tar.gz", 5 | tag = "0.6.1", 6 | dir = "lua-resty-healthcheck-0.6.1" 7 | } 8 | description = { 9 | summary = "Healthchecks for OpenResty to check upstream service status", 10 | detailed = [[ 11 | lua-resty-healthcheck is a module that can check upstream service 12 | availability by sending requests and validating responses at timed 13 | intervals. 14 | ]], 15 | homepage = "https://github.com/Kong/lua-resty-healthcheck", 16 | license = "Apache 2.0" 17 | } 18 | dependencies = { 19 | "lua-resty-worker-events >= 0.3.2" 20 | } 21 | build = { 22 | type = "builtin", 23 | modules = { 24 | ["resty.healthcheck"] = "lib/resty/healthcheck.lua", 25 | ["resty.healthcheck.utils"] = "lib/resty/healthcheck/utils.lua" 26 | } 27 | } 28 | -------------------------------------------------------------------------------- /rockspecs/lua-resty-healthcheck-0.6.1-2.rockspec: -------------------------------------------------------------------------------- 1 | package = "lua-resty-healthcheck" 2 | version = "0.6.1-2" 3 | source = { 4 | url = "https://github.com/Kong/lua-resty-healthcheck/archive/0.6.1.tar.gz", 5 | tag = "0.6.1", 6 | dir = "lua-resty-healthcheck-0.6.1" 7 | } 8 | description = { 9 | summary = "Healthchecks for OpenResty to check upstream service status", 10 | detailed = [[ 11 | lua-resty-healthcheck is a module that can check upstream service 12 | availability by sending requests and validating responses at timed 13 | intervals. 14 | ]], 15 | homepage = "https://github.com/Kong/lua-resty-healthcheck", 16 | license = "Apache 2.0" 17 | } 18 | dependencies = { 19 | "lua-resty-worker-events >= 0.3.2, < 2.0", 20 | } 21 | build = { 22 | type = "builtin", 23 | modules = { 24 | ["resty.healthcheck"] = "lib/resty/healthcheck.lua", 25 | ["resty.healthcheck.utils"] = "lib/resty/healthcheck/utils.lua" 26 | } 27 | } 28 | -------------------------------------------------------------------------------- /rockspecs/lua-resty-healthcheck-1.0.0-1.rockspec: -------------------------------------------------------------------------------- 1 | package = "lua-resty-healthcheck" 2 | version = "1.0.0-1" 3 | source = { 4 | url = "https://github.com/Kong/lua-resty-healthcheck/archive/1.0.0.tar.gz", 5 | tag = "1.0.0", 6 | dir = "lua-resty-healthcheck-1.0.0" 7 | } 8 | description = { 9 | summary = "Healthchecks for OpenResty to check upstream service status", 10 | detailed = [[ 11 | lua-resty-healthcheck is a module that can check upstream service 12 | availability by sending requests and validating responses at timed 13 | intervals. 14 | ]], 15 | homepage = "https://github.com/Kong/lua-resty-healthcheck", 16 | license = "Apache 2.0" 17 | } 18 | dependencies = { 19 | "lua-resty-worker-events >= 0.3.2" 20 | } 21 | build = { 22 | type = "builtin", 23 | modules = { 24 | ["resty.healthcheck"] = "lib/resty/healthcheck.lua", 25 | ["resty.healthcheck.utils"] = "lib/resty/healthcheck/utils.lua" 26 | } 27 | } 28 | -------------------------------------------------------------------------------- /rockspecs/lua-resty-healthcheck-1.0.0-2.rockspec: -------------------------------------------------------------------------------- 1 | package = "lua-resty-healthcheck" 2 | version = "1.0.0-2" 3 | source = { 4 | url = "https://github.com/Kong/lua-resty-healthcheck/archive/1.0.0.tar.gz", 5 | tag = "1.0.0", 6 | dir = "lua-resty-healthcheck-1.0.0" 7 | } 8 | description = { 9 | summary = "Healthchecks for OpenResty to check upstream service status", 10 | detailed = [[ 11 | lua-resty-healthcheck is a module that can check upstream service 12 | availability by sending requests and validating responses at timed 13 | intervals. 14 | ]], 15 | homepage = "https://github.com/Kong/lua-resty-healthcheck", 16 | license = "Apache 2.0" 17 | } 18 | dependencies = { 19 | "lua-resty-worker-events >= 0.3.2, < 2.0", 20 | } 21 | build = { 22 | type = "builtin", 23 | modules = { 24 | ["resty.healthcheck"] = "lib/resty/healthcheck.lua", 25 | ["resty.healthcheck.utils"] = "lib/resty/healthcheck/utils.lua" 26 | } 27 | } 28 | -------------------------------------------------------------------------------- /rockspecs/lua-resty-healthcheck-1.1.0-1.rockspec: -------------------------------------------------------------------------------- 1 | package = "lua-resty-healthcheck" 2 | version = "1.1.0-1" 3 | source = { 4 | url = "https://github.com/Kong/lua-resty-healthcheck/archive/1.1.0.tar.gz", 5 | tag = "1.1.0", 6 | dir = "lua-resty-healthcheck-1.1.0" 7 | } 8 | description = { 9 | summary = "Healthchecks for OpenResty to check upstream service status", 10 | detailed = [[ 11 | lua-resty-healthcheck is a module that can check upstream service 12 | availability by sending requests and validating responses at timed 13 | intervals. 14 | ]], 15 | homepage = "https://github.com/Kong/lua-resty-healthcheck", 16 | license = "Apache 2.0" 17 | } 18 | dependencies = { 19 | "lua-resty-worker-events >= 0.3.2" 20 | } 21 | build = { 22 | type = "builtin", 23 | modules = { 24 | ["resty.healthcheck"] = "lib/resty/healthcheck.lua", 25 | ["resty.healthcheck.utils"] = "lib/resty/healthcheck/utils.lua" 26 | } 27 | } 28 | -------------------------------------------------------------------------------- /rockspecs/lua-resty-healthcheck-1.1.0-2.rockspec: -------------------------------------------------------------------------------- 1 | package = "lua-resty-healthcheck" 2 | version = "1.1.0-2" 3 | source = { 4 | url = "https://github.com/Kong/lua-resty-healthcheck/archive/1.1.0.tar.gz", 5 | tag = "1.1.0", 6 | dir = "lua-resty-healthcheck-1.1.0" 7 | } 8 | description = { 9 | summary = "Healthchecks for OpenResty to check upstream service status", 10 | detailed = [[ 11 | lua-resty-healthcheck is a module that can check upstream service 12 | availability by sending requests and validating responses at timed 13 | intervals. 14 | ]], 15 | homepage = "https://github.com/Kong/lua-resty-healthcheck", 16 | license = "Apache 2.0" 17 | } 18 | dependencies = { 19 | "lua-resty-worker-events >= 0.3.2, < 2.0", 20 | } 21 | build = { 22 | type = "builtin", 23 | modules = { 24 | ["resty.healthcheck"] = "lib/resty/healthcheck.lua", 25 | ["resty.healthcheck.utils"] = "lib/resty/healthcheck/utils.lua" 26 | } 27 | } 28 | -------------------------------------------------------------------------------- /rockspecs/lua-resty-healthcheck-1.1.1-1.rockspec: -------------------------------------------------------------------------------- 1 | package = "lua-resty-healthcheck" 2 | version = "1.1.1-1" 3 | source = { 4 | url = "https://github.com/Kong/lua-resty-healthcheck/archive/1.1.1.tar.gz", 5 | tag = "1.1.1", 6 | dir = "lua-resty-healthcheck-1.1.1" 7 | } 8 | description = { 9 | summary = "Healthchecks for OpenResty to check upstream service status", 10 | detailed = [[ 11 | lua-resty-healthcheck is a module that can check upstream service 12 | availability by sending requests and validating responses at timed 13 | intervals. 14 | ]], 15 | homepage = "https://github.com/Kong/lua-resty-healthcheck", 16 | license = "Apache 2.0" 17 | } 18 | dependencies = { 19 | "lua-resty-worker-events >= 0.3.2" 20 | } 21 | build = { 22 | type = "builtin", 23 | modules = { 24 | ["resty.healthcheck"] = "lib/resty/healthcheck.lua", 25 | ["resty.healthcheck.utils"] = "lib/resty/healthcheck/utils.lua" 26 | } 27 | } 28 | -------------------------------------------------------------------------------- /rockspecs/lua-resty-healthcheck-1.1.1-2.rockspec: -------------------------------------------------------------------------------- 1 | package = "lua-resty-healthcheck" 2 | version = "1.1.1-2" 3 | source = { 4 | url = "https://github.com/Kong/lua-resty-healthcheck/archive/1.1.1.tar.gz", 5 | tag = "1.1.1", 6 | dir = "lua-resty-healthcheck-1.1.1" 7 | } 8 | description = { 9 | summary = "Healthchecks for OpenResty to check upstream service status", 10 | detailed = [[ 11 | lua-resty-healthcheck is a module that can check upstream service 12 | availability by sending requests and validating responses at timed 13 | intervals. 14 | ]], 15 | homepage = "https://github.com/Kong/lua-resty-healthcheck", 16 | license = "Apache 2.0" 17 | } 18 | dependencies = { 19 | "lua-resty-worker-events >= 0.3.2, < 2.0", 20 | } 21 | build = { 22 | type = "builtin", 23 | modules = { 24 | ["resty.healthcheck"] = "lib/resty/healthcheck.lua", 25 | ["resty.healthcheck.utils"] = "lib/resty/healthcheck/utils.lua" 26 | } 27 | } 28 | -------------------------------------------------------------------------------- /rockspecs/lua-resty-healthcheck-1.1.2-1.rockspec: -------------------------------------------------------------------------------- 1 | package = "lua-resty-healthcheck" 2 | version = "1.1.2-1" 3 | source = { 4 | url = "https://github.com/Kong/lua-resty-healthcheck/archive/1.1.2.tar.gz", 5 | tag = "1.1.2", 6 | dir = "lua-resty-healthcheck-1.1.2" 7 | } 8 | description = { 9 | summary = "Healthchecks for OpenResty to check upstream service status", 10 | detailed = [[ 11 | lua-resty-healthcheck is a module that can check upstream service 12 | availability by sending requests and validating responses at timed 13 | intervals. 14 | ]], 15 | homepage = "https://github.com/Kong/lua-resty-healthcheck", 16 | license = "Apache 2.0" 17 | } 18 | dependencies = { 19 | "lua-resty-worker-events >= 0.3.2" 20 | } 21 | build = { 22 | type = "builtin", 23 | modules = { 24 | ["resty.healthcheck"] = "lib/resty/healthcheck.lua", 25 | ["resty.healthcheck.utils"] = "lib/resty/healthcheck/utils.lua" 26 | } 27 | } 28 | -------------------------------------------------------------------------------- /rockspecs/lua-resty-healthcheck-1.1.2-2.rockspec: -------------------------------------------------------------------------------- 1 | package = "lua-resty-healthcheck" 2 | version = "1.1.2-2" 3 | source = { 4 | url = "https://github.com/Kong/lua-resty-healthcheck/archive/1.1.2.tar.gz", 5 | tag = "1.1.2", 6 | dir = "lua-resty-healthcheck-1.1.2" 7 | } 8 | description = { 9 | summary = "Healthchecks for OpenResty to check upstream service status", 10 | detailed = [[ 11 | lua-resty-healthcheck is a module that can check upstream service 12 | availability by sending requests and validating responses at timed 13 | intervals. 14 | ]], 15 | homepage = "https://github.com/Kong/lua-resty-healthcheck", 16 | license = "Apache 2.0" 17 | } 18 | dependencies = { 19 | "lua-resty-worker-events >= 0.3.2, < 2.0", 20 | } 21 | build = { 22 | type = "builtin", 23 | modules = { 24 | ["resty.healthcheck"] = "lib/resty/healthcheck.lua", 25 | ["resty.healthcheck.utils"] = "lib/resty/healthcheck/utils.lua" 26 | } 27 | } 28 | -------------------------------------------------------------------------------- /rockspecs/lua-resty-healthcheck-1.2.0-1.rockspec: -------------------------------------------------------------------------------- 1 | package = "lua-resty-healthcheck" 2 | version = "1.2.0-1" 3 | source = { 4 | url = "https://github.com/Kong/lua-resty-healthcheck/archive/1.2.0.tar.gz", 5 | dir = "lua-resty-healthcheck-1.2.0" 6 | } 7 | description = { 8 | summary = "Healthchecks for OpenResty to check upstream service status", 9 | detailed = [[ 10 | lua-resty-healthcheck is a module that can check upstream service 11 | availability by sending requests and validating responses at timed 12 | intervals. 13 | ]], 14 | homepage = "https://github.com/Kong/lua-resty-healthcheck", 15 | license = "Apache 2.0" 16 | } 17 | dependencies = { 18 | "lua-resty-worker-events >= 0.3.2" 19 | } 20 | build = { 21 | type = "builtin", 22 | modules = { 23 | ["resty.healthcheck"] = "lib/resty/healthcheck.lua", 24 | ["resty.healthcheck.utils"] = "lib/resty/healthcheck/utils.lua" 25 | } 26 | } 27 | -------------------------------------------------------------------------------- /rockspecs/lua-resty-healthcheck-1.2.0-2.rockspec: -------------------------------------------------------------------------------- 1 | package = "lua-resty-healthcheck" 2 | version = "1.2.0-2" 3 | source = { 4 | url = "https://github.com/Kong/lua-resty-healthcheck/archive/1.2.0.tar.gz", 5 | dir = "lua-resty-healthcheck-1.2.0" 6 | } 7 | description = { 8 | summary = "Healthchecks for OpenResty to check upstream service status", 9 | detailed = [[ 10 | lua-resty-healthcheck is a module that can check upstream service 11 | availability by sending requests and validating responses at timed 12 | intervals. 13 | ]], 14 | homepage = "https://github.com/Kong/lua-resty-healthcheck", 15 | license = "Apache 2.0" 16 | } 17 | dependencies = { 18 | "lua-resty-worker-events >= 0.3.2, < 2.0", 19 | } 20 | build = { 21 | type = "builtin", 22 | modules = { 23 | ["resty.healthcheck"] = "lib/resty/healthcheck.lua", 24 | ["resty.healthcheck.utils"] = "lib/resty/healthcheck/utils.lua" 25 | } 26 | } 27 | -------------------------------------------------------------------------------- /rockspecs/lua-resty-healthcheck-1.3.0-1.rockspec: -------------------------------------------------------------------------------- 1 | package = "lua-resty-healthcheck" 2 | version = "1.3.0-1" 3 | source = { 4 | url = "https://github.com/Kong/lua-resty-healthcheck/archive/1.3.0.tar.gz", 5 | dir = "lua-resty-healthcheck-1.3.0" 6 | } 7 | description = { 8 | summary = "Healthchecks for OpenResty to check upstream service status", 9 | detailed = [[ 10 | lua-resty-healthcheck is a module that can check upstream service 11 | availability by sending requests and validating responses at timed 12 | intervals. 13 | ]], 14 | homepage = "https://github.com/Kong/lua-resty-healthcheck", 15 | license = "Apache 2.0" 16 | } 17 | dependencies = { 18 | "lua-resty-worker-events >= 0.3.2", 19 | "penlight >= 1.7.0", 20 | } 21 | build = { 22 | type = "builtin", 23 | modules = { 24 | ["resty.healthcheck"] = "lib/resty/healthcheck.lua", 25 | ["resty.healthcheck.utils"] = "lib/resty/healthcheck/utils.lua" 26 | } 27 | } 28 | -------------------------------------------------------------------------------- /rockspecs/lua-resty-healthcheck-1.3.0-2.rockspec: -------------------------------------------------------------------------------- 1 | package = "lua-resty-healthcheck" 2 | version = "1.3.0-2" 3 | source = { 4 | url = "https://github.com/Kong/lua-resty-healthcheck/archive/1.3.0.tar.gz", 5 | dir = "lua-resty-healthcheck-1.3.0" 6 | } 7 | description = { 8 | summary = "Healthchecks for OpenResty to check upstream service status", 9 | detailed = [[ 10 | lua-resty-healthcheck is a module that can check upstream service 11 | availability by sending requests and validating responses at timed 12 | intervals. 13 | ]], 14 | homepage = "https://github.com/Kong/lua-resty-healthcheck", 15 | license = "Apache 2.0" 16 | } 17 | dependencies = { 18 | "lua-resty-worker-events >= 0.3.2, < 2.0", 19 | "penlight >= 1.7.0", 20 | } 21 | build = { 22 | type = "builtin", 23 | modules = { 24 | ["resty.healthcheck"] = "lib/resty/healthcheck.lua", 25 | ["resty.healthcheck.utils"] = "lib/resty/healthcheck/utils.lua" 26 | } 27 | } 28 | -------------------------------------------------------------------------------- /rockspecs/lua-resty-healthcheck-1.4.0-1.rockspec: -------------------------------------------------------------------------------- 1 | package = "lua-resty-healthcheck" 2 | version = "1.4.0-1" 3 | source = { 4 | url = "https://github.com/Kong/lua-resty-healthcheck/archive/1.4.0.tar.gz", 5 | dir = "lua-resty-healthcheck-1.4.0" 6 | } 7 | description = { 8 | summary = "Healthchecks for OpenResty to check upstream service status", 9 | detailed = [[ 10 | lua-resty-healthcheck is a module that can check upstream service 11 | availability by sending requests and validating responses at timed 12 | intervals. 13 | ]], 14 | homepage = "https://github.com/Kong/lua-resty-healthcheck", 15 | license = "Apache 2.0" 16 | } 17 | dependencies = { 18 | "lua-resty-worker-events == 1.0.0", 19 | "penlight >= 1.9.2", 20 | "lua-resty-timer ~> 1", 21 | } 22 | build = { 23 | type = "builtin", 24 | modules = { 25 | ["resty.healthcheck"] = "lib/resty/healthcheck.lua", 26 | } 27 | } 28 | -------------------------------------------------------------------------------- /rockspecs/lua-resty-healthcheck-1.4.1-1.rockspec: -------------------------------------------------------------------------------- 1 | package = "lua-resty-healthcheck" 2 | version = "1.4.1-1" 3 | source = { 4 | url = "https://github.com/Kong/lua-resty-healthcheck/archive/1.4.1.tar.gz", 5 | dir = "lua-resty-healthcheck-1.4.1" 6 | } 7 | description = { 8 | summary = "Healthchecks for OpenResty to check upstream service status", 9 | detailed = [[ 10 | lua-resty-healthcheck is a module that can check upstream service 11 | availability by sending requests and validating responses at timed 12 | intervals. 13 | ]], 14 | homepage = "https://github.com/Kong/lua-resty-healthcheck", 15 | license = "Apache 2.0" 16 | } 17 | dependencies = { 18 | "lua-resty-worker-events == 1.0.0", 19 | "penlight >= 1.9.2", 20 | "lua-resty-timer ~> 1", 21 | } 22 | build = { 23 | type = "builtin", 24 | modules = { 25 | ["resty.healthcheck"] = "lib/resty/healthcheck.lua", 26 | } 27 | } 28 | -------------------------------------------------------------------------------- /rockspecs/lua-resty-healthcheck-1.4.2-1.rockspec: -------------------------------------------------------------------------------- 1 | package = "lua-resty-healthcheck" 2 | version = "1.4.2-1" 3 | source = { 4 | url = "https://github.com/Kong/lua-resty-healthcheck/archive/1.4.2.tar.gz", 5 | dir = "lua-resty-healthcheck-1.4.2" 6 | } 7 | description = { 8 | summary = "Healthchecks for OpenResty to check upstream service status", 9 | detailed = [[ 10 | lua-resty-healthcheck is a module that can check upstream service 11 | availability by sending requests and validating responses at timed 12 | intervals. 13 | ]], 14 | homepage = "https://github.com/Kong/lua-resty-healthcheck", 15 | license = "Apache 2.0" 16 | } 17 | dependencies = { 18 | "lua-resty-worker-events == 1.0.0", 19 | "penlight >= 1.9.2", 20 | "lua-resty-timer ~> 1", 21 | } 22 | build = { 23 | type = "builtin", 24 | modules = { 25 | ["resty.healthcheck"] = "lib/resty/healthcheck.lua", 26 | } 27 | } 28 | -------------------------------------------------------------------------------- /rockspecs/lua-resty-healthcheck-1.5.0-1.rockspec: -------------------------------------------------------------------------------- 1 | package = "lua-resty-healthcheck" 2 | version = "1.5.0-1" 3 | source = { 4 | url = "https://github.com/Kong/lua-resty-healthcheck/archive/1.5.0.tar.gz", 5 | dir = "lua-resty-healthcheck-1.5.0" 6 | } 7 | description = { 8 | summary = "Healthchecks for OpenResty to check upstream service status", 9 | detailed = [[ 10 | lua-resty-healthcheck is a module that can check upstream service 11 | availability by sending requests and validating responses at timed 12 | intervals. 13 | ]], 14 | license = "Apache 2.0", 15 | homepage = "https://github.com/Kong/lua-resty-healthcheck" 16 | } 17 | dependencies = { 18 | "lua-resty-worker-events == 1.0.0", 19 | "penlight >= 1.9.2", 20 | "lua-resty-timer ~> 1", 21 | } 22 | build = { 23 | type = "builtin", 24 | modules = { 25 | ["resty.healthcheck"] = "lib/resty/healthcheck.lua", 26 | } 27 | } 28 | -------------------------------------------------------------------------------- /rockspecs/lua-resty-healthcheck-1.5.1-1.rockspec: -------------------------------------------------------------------------------- 1 | package = "lua-resty-healthcheck" 2 | version = "1.5.1-1" 3 | source = { 4 | url = "https://github.com/Kong/lua-resty-healthcheck/archive/1.5.1.tar.gz", 5 | dir = "lua-resty-healthcheck-1.5.1" 6 | } 7 | description = { 8 | summary = "Healthchecks for OpenResty to check upstream service status", 9 | detailed = [[ 10 | lua-resty-healthcheck is a module that can check upstream service 11 | availability by sending requests and validating responses at timed 12 | intervals. 13 | ]], 14 | license = "Apache 2.0", 15 | homepage = "https://github.com/Kong/lua-resty-healthcheck" 16 | } 17 | dependencies = { 18 | "lua-resty-worker-events == 1.0.0", 19 | "penlight >= 1.9.2", 20 | "lua-resty-timer ~> 1", 21 | } 22 | build = { 23 | type = "builtin", 24 | modules = { 25 | ["resty.healthcheck"] = "lib/resty/healthcheck.lua", 26 | } 27 | } 28 | -------------------------------------------------------------------------------- /rockspecs/lua-resty-healthcheck-1.6.0-1.rockspec: -------------------------------------------------------------------------------- 1 | package = "lua-resty-healthcheck" 2 | version = "1.6.0-1" 3 | source = { 4 | url = "https://github.com/Kong/lua-resty-healthcheck/archive/1.6.0.tar.gz", 5 | dir = "lua-resty-healthcheck-1.6.0" 6 | } 7 | description = { 8 | summary = "Healthchecks for OpenResty to check upstream service status", 9 | detailed = [[ 10 | lua-resty-healthcheck is a module that can check upstream service 11 | availability by sending requests and validating responses at timed 12 | intervals. 13 | ]], 14 | license = "Apache 2.0", 15 | homepage = "https://github.com/Kong/lua-resty-healthcheck" 16 | } 17 | dependencies = { 18 | "penlight >= 1.9.2", 19 | "lua-resty-timer ~> 1", 20 | } 21 | build = { 22 | type = "builtin", 23 | modules = { 24 | ["resty.healthcheck"] = "lib/resty/healthcheck.lua", 25 | } 26 | } 27 | -------------------------------------------------------------------------------- /rockspecs/lua-resty-healthcheck-1.6.1-1.rockspec: -------------------------------------------------------------------------------- 1 | package = "lua-resty-healthcheck" 2 | version = "1.6.1-1" 3 | source = { 4 | url = "https://github.com/Kong/lua-resty-healthcheck/archive/1.6.1.tar.gz", 5 | dir = "lua-resty-healthcheck-1.6.1" 6 | } 7 | description = { 8 | summary = "Healthchecks for OpenResty to check upstream service status", 9 | detailed = [[ 10 | lua-resty-healthcheck is a module that can check upstream service 11 | availability by sending requests and validating responses at timed 12 | intervals. 13 | ]], 14 | license = "Apache 2.0", 15 | homepage = "https://github.com/Kong/lua-resty-healthcheck" 16 | } 17 | dependencies = { 18 | "penlight >= 1.9.2", 19 | "lua-resty-timer ~> 1", 20 | } 21 | build = { 22 | type = "builtin", 23 | modules = { 24 | ["resty.healthcheck"] = "lib/resty/healthcheck.lua", 25 | } 26 | } 27 | -------------------------------------------------------------------------------- /rockspecs/lua-resty-healthcheck-1.6.2-1.rockspec: -------------------------------------------------------------------------------- 1 | package = "lua-resty-healthcheck" 2 | version = "1.6.2-1" 3 | source = { 4 | url = "https://github.com/Kong/lua-resty-healthcheck/archive/1.6.2.tar.gz", 5 | dir = "lua-resty-healthcheck-1.6.2" 6 | } 7 | description = { 8 | summary = "Healthchecks for OpenResty to check upstream service status", 9 | detailed = [[ 10 | lua-resty-healthcheck is a module that can check upstream service 11 | availability by sending requests and validating responses at timed 12 | intervals. 13 | ]], 14 | license = "Apache 2.0", 15 | homepage = "https://github.com/Kong/lua-resty-healthcheck" 16 | } 17 | dependencies = { 18 | "penlight >= 1.9.2", 19 | "lua-resty-timer ~> 1", 20 | } 21 | build = { 22 | type = "builtin", 23 | modules = { 24 | ["resty.healthcheck"] = "lib/resty/healthcheck.lua", 25 | } 26 | } 27 | -------------------------------------------------------------------------------- /rockspecs/lua-resty-healthcheck-1.6.3-1.rockspec: -------------------------------------------------------------------------------- 1 | package = "lua-resty-healthcheck" 2 | version = "1.6.3-1" 3 | source = { 4 | url = "git+https://github.com/Kong/lua-resty-healthcheck.git", 5 | tag = "1.6.3" 6 | } 7 | description = { 8 | summary = "Healthchecks for OpenResty to check upstream service status", 9 | detailed = [[ 10 | lua-resty-healthcheck is a module that can check upstream service 11 | availability by sending requests and validating responses at timed 12 | intervals. 13 | ]], 14 | license = "Apache 2.0", 15 | homepage = "https://github.com/Kong/lua-resty-healthcheck" 16 | } 17 | dependencies = { 18 | "penlight >= 1.9.2", 19 | "lua-resty-timer ~> 1", 20 | } 21 | build = { 22 | type = "builtin", 23 | modules = { 24 | ["resty.healthcheck"] = "lib/resty/healthcheck.lua", 25 | } 26 | } 27 | -------------------------------------------------------------------------------- /rockspecs/lua-resty-healthcheck-2.0.0-1.rockspec: -------------------------------------------------------------------------------- 1 | package = "lua-resty-healthcheck" 2 | version = "2.0.0-1" 3 | source = { 4 | url = "git://github.com/kong/lua-resty-healthcheck", 5 | tag = "2.0.0", 6 | } 7 | description = { 8 | summary = "Healthchecks for OpenResty to check upstream service status", 9 | detailed = [[ 10 | lua-resty-healthcheck is a module that can check upstream service 11 | availability by sending requests and validating responses at timed 12 | intervals. 13 | ]], 14 | homepage = "https://github.com/Kong/lua-resty-healthcheck", 15 | license = "Apache 2.0" 16 | } 17 | dependencies = { 18 | "lua-resty-worker-events ~> 2", 19 | "penlight >= 1.7.0", 20 | "lua-resty-timer ~> 1", 21 | } 22 | build = { 23 | type = "builtin", 24 | modules = { 25 | ["resty.healthcheck"] = "lib/resty/healthcheck.lua", 26 | } 27 | } 28 | -------------------------------------------------------------------------------- /rockspecs/lua-resty-healthcheck-3.0.0-1.rockspec: -------------------------------------------------------------------------------- 1 | package = "lua-resty-healthcheck" 2 | version = "3.0.0-1" 3 | source = { 4 | url = "git+https://github.com/Kong/lua-resty-healthcheck.git", 5 | tag = "3.0.0" 6 | } 7 | description = { 8 | summary = "Healthchecks for OpenResty to check upstream service status", 9 | detailed = [[ 10 | lua-resty-healthcheck is a module that can check upstream service 11 | availability by sending requests and validating responses at timed 12 | intervals. 13 | ]], 14 | license = "Apache 2.0", 15 | homepage = "https://github.com/Kong/lua-resty-healthcheck" 16 | } 17 | dependencies = { 18 | "penlight >= 1.9.2", 19 | "lua-resty-timer ~> 1", 20 | } 21 | build = { 22 | type = "builtin", 23 | modules = { 24 | ["resty.healthcheck"] = "lib/resty/healthcheck.lua", 25 | } 26 | } 27 | -------------------------------------------------------------------------------- /rockspecs/lua-resty-healthcheck-3.0.1-1.rockspec: -------------------------------------------------------------------------------- 1 | package = "lua-resty-healthcheck" 2 | version = "3.0.1-1" 3 | source = { 4 | url = "git+https://github.com/Kong/lua-resty-healthcheck.git", 5 | tag = "3.0.1" 6 | } 7 | description = { 8 | summary = "Healthchecks for OpenResty to check upstream service status", 9 | detailed = [[ 10 | lua-resty-healthcheck is a module that can check upstream service 11 | availability by sending requests and validating responses at timed 12 | intervals. 13 | ]], 14 | license = "Apache 2.0", 15 | homepage = "https://github.com/Kong/lua-resty-healthcheck" 16 | } 17 | dependencies = { 18 | "penlight >= 1.9.2", 19 | "lua-resty-timer ~> 1", 20 | } 21 | build = { 22 | type = "builtin", 23 | modules = { 24 | ["resty.healthcheck"] = "lib/resty/healthcheck.lua", 25 | } 26 | } 27 | -------------------------------------------------------------------------------- /rockspecs/lua-resty-healthcheck-3.0.2-1.rockspec: -------------------------------------------------------------------------------- 1 | package = "lua-resty-healthcheck" 2 | version = "3.0.2-1" 3 | source = { 4 | url = "git+https://github.com/Kong/lua-resty-healthcheck.git", 5 | tag = "3.0.2" 6 | } 7 | description = { 8 | summary = "Healthchecks for OpenResty to check upstream service status", 9 | detailed = [[ 10 | lua-resty-healthcheck is a module that can check upstream service 11 | availability by sending requests and validating responses at timed 12 | intervals. 13 | ]], 14 | license = "Apache 2.0", 15 | homepage = "https://github.com/Kong/lua-resty-healthcheck" 16 | } 17 | dependencies = { 18 | "penlight >= 1.9.2", 19 | "lua-resty-timer ~> 1", 20 | } 21 | build = { 22 | type = "builtin", 23 | modules = { 24 | ["resty.healthcheck"] = "lib/resty/healthcheck.lua", 25 | } 26 | } 27 | -------------------------------------------------------------------------------- /rockspecs/lua-resty-healthcheck-3.1.0-1.rockspec: -------------------------------------------------------------------------------- 1 | package = "lua-resty-healthcheck" 2 | version = "3.1.0-1" 3 | source = { 4 | url = "git+https://github.com/Kong/lua-resty-healthcheck.git", 5 | tag = "3.1.0" 6 | } 7 | description = { 8 | summary = "Healthchecks for OpenResty to check upstream service status", 9 | detailed = [[ 10 | lua-resty-healthcheck is a module that can check upstream service 11 | availability by sending requests and validating responses at timed 12 | intervals. 13 | ]], 14 | license = "Apache 2.0", 15 | homepage = "https://github.com/Kong/lua-resty-healthcheck" 16 | } 17 | dependencies = { 18 | "penlight >= 1.9.2", 19 | "lua-resty-timer ~> 1", 20 | } 21 | build = { 22 | type = "builtin", 23 | modules = { 24 | ["resty.healthcheck"] = "lib/resty/healthcheck.lua", 25 | } 26 | } 27 | -------------------------------------------------------------------------------- /t/with_resty-events/00-new.t: -------------------------------------------------------------------------------- 1 | use Test::Nginx::Socket::Lua; 2 | use Cwd qw(cwd); 3 | 4 | workers(1); 5 | 6 | plan tests => repeat_each() * (blocks() * 3) - 2; 7 | 8 | my $pwd = cwd(); 9 | $ENV{TEST_NGINX_SERVROOT} = server_root(); 10 | 11 | our $HttpConfig = qq{ 12 | lua_package_path "$pwd/lib/?.lua;;"; 13 | lua_shared_dict test_shm 8m; 14 | 15 | server { 16 | server_name kong_worker_events; 17 | listen unix:$ENV{TEST_NGINX_SERVROOT}/worker_events.sock; 18 | access_log off; 19 | location / { 20 | content_by_lua_block { 21 | require("resty.events.compat").run() 22 | } 23 | } 24 | } 25 | }; 26 | 27 | run_tests(); 28 | 29 | __DATA__ 30 | 31 | === TEST 1: new() requires worker_events to be configured 32 | --- http_config eval: $::HttpConfig 33 | --- config 34 | location = /t { 35 | content_by_lua_block { 36 | local healthcheck = require("resty.healthcheck") 37 | local ok, err = pcall(healthcheck.new, { 38 | events_module = "resty.events", 39 | }) 40 | ngx.log(ngx.ERR, err) 41 | } 42 | } 43 | --- request 44 | GET /t 45 | --- response_body 46 | 47 | --- error_log 48 | please configure 49 | 50 | === TEST 2: new() requires 'name' 51 | --- http_config eval: $::HttpConfig 52 | --- config 53 | location = /t { 54 | content_by_lua_block { 55 | local we = require "resty.events.compat" 56 | assert(we.configure({ unique_timeout = 5, broker_id = 0, listening = "unix:" .. ngx.config.prefix() .. "worker_events.sock" })) 57 | local healthcheck = require("resty.healthcheck") 58 | local ok, err = pcall(healthcheck.new, { 59 | events_module = "resty.events", 60 | shm_name = "test_shm", 61 | }) 62 | ngx.log(ngx.ERR, err) 63 | } 64 | } 65 | --- request 66 | GET /t 67 | --- response_body 68 | 69 | --- error_log 70 | required option 'name' is missing 71 | 72 | === TEST 3: new() fails with invalid shm 73 | --- http_config eval: $::HttpConfig 74 | --- config 75 | location = /t { 76 | content_by_lua_block { 77 | local we = require "resty.events.compat" 78 | assert(we.configure({ unique_timeout = 5, broker_id = 0, listening = "unix:" .. ngx.config.prefix() .. "worker_events.sock" })) 79 | local healthcheck = require("resty.healthcheck") 80 | local ok, err = pcall(healthcheck.new, { 81 | name = "testing", 82 | shm_name = "invalid_shm", 83 | events_module = "resty.events", 84 | }) 85 | ngx.log(ngx.ERR, err) 86 | } 87 | } 88 | --- request 89 | GET /t 90 | --- response_body 91 | 92 | --- error_log 93 | no shm found by name 94 | 95 | === TEST 4: new() initializes with default config 96 | --- http_config eval: $::HttpConfig 97 | --- config 98 | location = /t { 99 | content_by_lua_block { 100 | local we = require "resty.events.compat" 101 | assert(we.configure({ unique_timeout = 5, broker_id = 0, listening = "unix:" .. ngx.config.prefix() .. "worker_events.sock" })) 102 | local healthcheck = require("resty.healthcheck") 103 | local ok, err = pcall(healthcheck.new, { 104 | name = "testing", 105 | shm_name = "test_shm", 106 | events_module = "resty.events", 107 | }) 108 | } 109 | } 110 | --- request 111 | GET /t 112 | --- response_body 113 | 114 | --- error_log 115 | Healthchecker started! 116 | 117 | === TEST 5: new() only accepts http or tcp types 118 | --- http_config eval: $::HttpConfig 119 | --- config 120 | location = /t { 121 | content_by_lua_block { 122 | local we = require "resty.events.compat" 123 | assert(we.configure({ unique_timeout = 5, broker_id = 0, listening = "unix:" .. ngx.config.prefix() .. "worker_events.sock" })) 124 | local healthcheck = require("resty.healthcheck") 125 | local ok, err = pcall(healthcheck.new, { 126 | name = "testing", 127 | shm_name = "test_shm", 128 | events_module = "resty.events", 129 | type = "http", 130 | }) 131 | ngx.say(ok) 132 | local ok, err = pcall(healthcheck.new, { 133 | name = "testing", 134 | shm_name = "test_shm", 135 | events_module = "resty.events", 136 | type = "tcp", 137 | }) 138 | ngx.say(ok) 139 | local ok, err = pcall(healthcheck.new, { 140 | name = "testing", 141 | shm_name = "test_shm", 142 | events_module = "resty.events", 143 | type = "get lost", 144 | }) 145 | ngx.say(ok) 146 | } 147 | } 148 | --- request 149 | GET /t 150 | --- response_body 151 | true 152 | true 153 | false 154 | 155 | === TEST 6: new() deals with bad inputs 156 | --- http_config eval: $::HttpConfig 157 | --- config 158 | location = /t { 159 | content_by_lua_block { 160 | local we = require "resty.events.compat" 161 | assert(we.configure({ unique_timeout = 5, broker_id = 0, listening = "unix:" .. ngx.config.prefix() .. "worker_events.sock" })) 162 | local healthcheck = require("resty.healthcheck") 163 | 164 | -- tests for failure 165 | local tests = { 166 | { active = { timeout = -1 }}, 167 | { active = { timeout = 1e+42 }}, 168 | { active = { concurrency = -1 }}, 169 | { active = { concurrency = 1e42 }}, 170 | { active = { healthy = { interval = -1 }}}, 171 | { active = { healthy = { interval = 1e42 }}}, 172 | { active = { healthy = { successes = -1 }}}, 173 | { active = { healthy = { successes = 1e42 }}}, 174 | { active = { unhealthy = { interval = -1 }}}, 175 | { active = { unhealthy = { interval = 1e42 }}}, 176 | { active = { unhealthy = { tcp_failures = -1 }}}, 177 | { active = { unhealthy = { tcp_failures = 1e42 }}}, 178 | { active = { unhealthy = { timeouts = -1 }}}, 179 | { active = { unhealthy = { timeouts = 1e42 }}}, 180 | { active = { unhealthy = { http_failures = -1 }}}, 181 | { active = { unhealthy = { http_failures = 1e42 }}}, 182 | { passive = { healthy = { successes = -1 }}}, 183 | { passive = { healthy = { successes = 1e42 }}}, 184 | { passive = { unhealthy = { tcp_failures = -1 }}}, 185 | { passive = { unhealthy = { tcp_failures = 1e42 }}}, 186 | { passive = { unhealthy = { timeouts = -1 }}}, 187 | { passive = { unhealthy = { timeouts = 1e42 }}}, 188 | { passive = { unhealthy = { http_failures = -1 }}}, 189 | { passive = { unhealthy = { http_failures = 1e42 }}}, 190 | } 191 | for _, test in ipairs(tests) do 192 | local ok, err = pcall(healthcheck.new, { 193 | name = "testing", 194 | shm_name = "test_shm", 195 | events_module = "resty.events", 196 | type = "http", 197 | checks = test, 198 | }) 199 | ngx.say(ok) 200 | end 201 | } 202 | } 203 | --- request 204 | GET /t 205 | --- response_body 206 | false 207 | false 208 | false 209 | false 210 | false 211 | false 212 | false 213 | false 214 | false 215 | false 216 | false 217 | false 218 | false 219 | false 220 | false 221 | false 222 | false 223 | false 224 | false 225 | false 226 | false 227 | false 228 | false 229 | false 230 | -------------------------------------------------------------------------------- /t/with_resty-events/01-start-stop.t: -------------------------------------------------------------------------------- 1 | use Test::Nginx::Socket::Lua; 2 | use Cwd qw(cwd); 3 | 4 | workers(1); 5 | 6 | plan tests => repeat_each() * (blocks() * 3) + 1; 7 | 8 | my $pwd = cwd(); 9 | $ENV{TEST_NGINX_SERVROOT} = server_root(); 10 | 11 | our $HttpConfig = qq{ 12 | lua_package_path "$pwd/lib/?.lua;;"; 13 | lua_shared_dict test_shm 8m; 14 | 15 | init_worker_by_lua_block { 16 | local we = require "resty.events.compat" 17 | assert(we.configure({ 18 | unique_timeout = 5, 19 | broker_id = 0, 20 | listening = "unix:$ENV{TEST_NGINX_SERVROOT}/worker_events.sock" 21 | })) 22 | assert(we.configured()) 23 | } 24 | 25 | server { 26 | server_name kong_worker_events; 27 | listen unix:$ENV{TEST_NGINX_SERVROOT}/worker_events.sock; 28 | access_log off; 29 | location / { 30 | content_by_lua_block { 31 | require("resty.events.compat").run() 32 | } 33 | } 34 | } 35 | }; 36 | 37 | run_tests(); 38 | 39 | __DATA__ 40 | 41 | === TEST 1: start() can start after stop() 42 | --- http_config eval: $::HttpConfig 43 | --- config 44 | location = /t { 45 | content_by_lua_block { 46 | local healthcheck = require("resty.healthcheck") 47 | local checker = healthcheck.new({ 48 | name = "testing", 49 | shm_name = "test_shm", 50 | events_module = "resty.events", 51 | checks = { 52 | active = { 53 | healthy = { 54 | interval = 0.1 55 | }, 56 | unhealthy = { 57 | interval = 0.1 58 | } 59 | } 60 | } 61 | }) 62 | local ok, err = checker:stop() 63 | ngx.sleep(0.2) -- wait twice the interval 64 | local ok, err = checker:start() 65 | ngx.say(ok) 66 | } 67 | } 68 | --- request 69 | GET /t 70 | --- response_body 71 | true 72 | --- no_error_log 73 | [error] 74 | 75 | 76 | === TEST 3: start() is a no-op if active intervals are 0 77 | --- http_config eval: $::HttpConfig 78 | --- config 79 | location = /t { 80 | content_by_lua_block { 81 | local healthcheck = require("resty.healthcheck") 82 | local checker = healthcheck.new({ 83 | name = "testing", 84 | shm_name = "test_shm", 85 | events_module = "resty.events", 86 | checks = { 87 | active = { 88 | healthy = { 89 | interval = 0 90 | }, 91 | unhealthy = { 92 | interval = 0 93 | } 94 | } 95 | } 96 | }) 97 | local ok, err = checker:start() 98 | ngx.say(ok) 99 | local ok, err = checker:start() 100 | ngx.say(ok) 101 | local ok, err = checker:start() 102 | ngx.say(ok) 103 | } 104 | } 105 | --- request 106 | GET /t 107 | --- response_body 108 | true 109 | true 110 | true 111 | --- no_error_log 112 | [error] 113 | 114 | === TEST 4: stop() stops health checks 115 | --- http_config eval: $::HttpConfig 116 | --- config 117 | location = /t { 118 | content_by_lua_block { 119 | local healthcheck = require("resty.healthcheck") 120 | local checker = healthcheck.new({ 121 | name = "testing", 122 | shm_name = "test_shm", 123 | events_module = "resty.events", 124 | checks = { 125 | active = { 126 | healthy = { 127 | interval = 0.1 128 | }, 129 | unhealthy = { 130 | interval = 0.1 131 | } 132 | } 133 | } 134 | }) 135 | local ok, err = checker:stop() 136 | ngx.say(ok) 137 | } 138 | } 139 | --- request 140 | GET /t 141 | --- response_body 142 | true 143 | --- no_error_log 144 | [error] 145 | checking 146 | 147 | === TEST 5: start() restarts health checks 148 | --- http_config eval: $::HttpConfig 149 | --- config 150 | location = /t { 151 | content_by_lua_block { 152 | local healthcheck = require("resty.healthcheck") 153 | local checker = healthcheck.new({ 154 | name = "testing", 155 | shm_name = "test_shm", 156 | events_module = "resty.events", 157 | checks = { 158 | active = { 159 | healthy = { 160 | interval = 0.1 161 | }, 162 | unhealthy = { 163 | interval = 0.1 164 | } 165 | } 166 | } 167 | }) 168 | local ok, err = checker:stop() 169 | ngx.say(ok) 170 | ngx.sleep(1) -- active healthchecks might take up to 1s to start 171 | local ok, err = checker:start() 172 | ngx.say(ok) 173 | ngx.sleep(0.2) -- wait twice the interval 174 | } 175 | } 176 | --- request 177 | GET /t 178 | --- response_body 179 | true 180 | true 181 | --- error_log 182 | checking 183 | -------------------------------------------------------------------------------- /t/with_resty-events/02-add_target.t: -------------------------------------------------------------------------------- 1 | use Test::Nginx::Socket::Lua; 2 | use Cwd qw(cwd); 3 | 4 | workers(1); 5 | 6 | plan tests => repeat_each() * (blocks() * 4) + 3; 7 | 8 | my $pwd = cwd(); 9 | $ENV{TEST_NGINX_SERVROOT} = server_root(); 10 | 11 | our $HttpConfig = qq{ 12 | lua_package_path "$pwd/lib/?.lua;;"; 13 | lua_shared_dict test_shm 8m; 14 | 15 | init_worker_by_lua_block { 16 | local we = require "resty.events.compat" 17 | assert(we.configure({ 18 | unique_timeout = 5, 19 | broker_id = 0, 20 | listening = "unix:$ENV{TEST_NGINX_SERVROOT}/worker_events.sock" 21 | })) 22 | assert(we.configured()) 23 | } 24 | 25 | server { 26 | server_name kong_worker_events; 27 | listen unix:$ENV{TEST_NGINX_SERVROOT}/worker_events.sock; 28 | access_log off; 29 | location / { 30 | content_by_lua_block { 31 | require("resty.events.compat").run() 32 | } 33 | } 34 | } 35 | }; 36 | 37 | run_tests(); 38 | 39 | __DATA__ 40 | 41 | === TEST 1: add_target() adds an unhealthy target 42 | --- http_config eval: $::HttpConfig 43 | --- config 44 | location = /t { 45 | content_by_lua_block { 46 | local healthcheck = require("resty.healthcheck") 47 | local checker = healthcheck.new({ 48 | name = "testing", 49 | shm_name = "test_shm", 50 | events_module = "resty.events", 51 | checks = { 52 | active = { 53 | healthy = { 54 | interval = 0.1 55 | }, 56 | unhealthy = { 57 | interval = 0.1 58 | } 59 | } 60 | } 61 | }) 62 | ngx.sleep(1) -- active healthchecks might take up to 1s to start 63 | local ok, err = checker:add_target("127.0.0.1", 11111, nil, false) 64 | ngx.say(ok) 65 | ngx.sleep(0.5) 66 | } 67 | } 68 | --- request 69 | GET /t 70 | --- response_body 71 | true 72 | --- error_log 73 | checking healthy targets: nothing to do 74 | checking unhealthy targets: #1 75 | 76 | --- no_error_log 77 | checking healthy targets: #1 78 | 79 | 80 | 81 | === TEST 2: add_target() adds a healthy target 82 | --- http_config eval 83 | qq{ 84 | $::HttpConfig 85 | 86 | server { 87 | listen 2112; 88 | location = /status { 89 | return 200; 90 | } 91 | } 92 | } 93 | --- config 94 | location = /t { 95 | content_by_lua_block { 96 | local healthcheck = require("resty.healthcheck") 97 | local checker = healthcheck.new({ 98 | name = "testing", 99 | shm_name = "test_shm", 100 | events_module = "resty.events", 101 | checks = { 102 | active = { 103 | http_path = "/status", 104 | healthy = { 105 | interval = 0.1 106 | }, 107 | unhealthy = { 108 | interval = 0.1 109 | } 110 | } 111 | } 112 | }) 113 | ngx.sleep(1) -- active healthchecks might take up to 1s to start 114 | local ok, err = checker:add_target("127.0.0.1", 2112, nil, true) 115 | ngx.say(ok) 116 | ngx.sleep(0.2) -- wait twice the interval 117 | } 118 | } 119 | --- request 120 | GET /t 121 | --- response_body 122 | true 123 | --- error_log 124 | checking unhealthy targets: nothing to do 125 | checking healthy targets: #1 126 | 127 | --- no_error_log 128 | checking unhealthy targets: #1 129 | 130 | 131 | 132 | === TEST 3: calling add_target() repeatedly does not change status 133 | --- http_config eval 134 | qq{ 135 | $::HttpConfig 136 | 137 | server { 138 | listen 2113; 139 | location = /status { 140 | return 200; 141 | } 142 | } 143 | } 144 | --- config 145 | location = /t { 146 | content_by_lua_block { 147 | local healthcheck = require("resty.healthcheck") 148 | local checker = healthcheck.new({ 149 | name = "testing", 150 | shm_name = "test_shm", 151 | events_module = "resty.events", 152 | checks = { 153 | active = { 154 | http_path = "/status", 155 | healthy = { 156 | interval = 0.1, 157 | successes = 1, 158 | }, 159 | unhealthy = { 160 | interval = 0.1, 161 | tcp_failures = 1, 162 | http_failures = 1, 163 | } 164 | } 165 | } 166 | }) 167 | ngx.sleep(1) -- active healthchecks might take up to 1s to start 168 | local ok, err = checker:add_target("127.0.0.1", 2113, nil, true) 169 | local ok, err = checker:add_target("127.0.0.1", 2113, nil, false) 170 | ngx.say(ok) 171 | ngx.sleep(0.2) -- wait twice the interval 172 | } 173 | } 174 | --- request 175 | GET /t 176 | --- response_body 177 | true 178 | --- error_log 179 | checking unhealthy targets: nothing to do 180 | checking healthy targets: #1 181 | 182 | --- no_error_log 183 | checking unhealthy targets: #1 184 | -------------------------------------------------------------------------------- /t/with_resty-events/03-get_target_status.t: -------------------------------------------------------------------------------- 1 | use Test::Nginx::Socket::Lua; 2 | use Cwd qw(cwd); 3 | 4 | workers(1); 5 | 6 | plan tests => repeat_each() * (blocks() * 4); 7 | 8 | my $pwd = cwd(); 9 | $ENV{TEST_NGINX_SERVROOT} = server_root(); 10 | 11 | our $HttpConfig = qq{ 12 | lua_package_path "$pwd/lib/?.lua;;"; 13 | lua_shared_dict test_shm 8m; 14 | 15 | init_worker_by_lua_block { 16 | local we = require "resty.events.compat" 17 | assert(we.configure({ 18 | unique_timeout = 5, 19 | broker_id = 0, 20 | listening = "unix:$ENV{TEST_NGINX_SERVROOT}/worker_events.sock" 21 | })) 22 | assert(we.configured()) 23 | } 24 | 25 | server { 26 | server_name kong_worker_events; 27 | listen unix:$ENV{TEST_NGINX_SERVROOT}/worker_events.sock; 28 | access_log off; 29 | location / { 30 | content_by_lua_block { 31 | require("resty.events.compat").run() 32 | } 33 | } 34 | } 35 | }; 36 | 37 | run_tests(); 38 | 39 | __DATA__ 40 | 41 | === TEST 1: get_target_status() reports proper status 42 | --- http_config eval 43 | qq{ 44 | $::HttpConfig 45 | 46 | server { 47 | listen 2115; 48 | location = /status { 49 | return 200; 50 | } 51 | } 52 | } 53 | --- config 54 | location = /t { 55 | content_by_lua_block { 56 | local healthcheck = require("resty.healthcheck") 57 | local checker = healthcheck.new({ 58 | name = "testing", 59 | shm_name = "test_shm", 60 | events_module = "resty.events", 61 | checks = { 62 | active = { 63 | http_path = "/status", 64 | healthy = { 65 | interval = 999, -- we don't want active checks 66 | successes = 1, 67 | }, 68 | unhealthy = { 69 | interval = 999, -- we don't want active checks 70 | tcp_failures = 1, 71 | http_failures = 1, 72 | } 73 | }, 74 | passive = { 75 | healthy = { 76 | successes = 1, 77 | }, 78 | unhealthy = { 79 | tcp_failures = 1, 80 | http_failures = 1, 81 | } 82 | } 83 | } 84 | }) 85 | local ok, err = checker:add_target("127.0.0.1", 2115, nil, true) 86 | ngx.sleep(0.01) 87 | ngx.say(checker:get_target_status("127.0.0.1", 2115)) -- true 88 | 89 | checker:report_tcp_failure("127.0.0.1", 2115) 90 | ngx.sleep(0.01) 91 | ngx.say(checker:get_target_status("127.0.0.1", 2115)) -- false 92 | 93 | checker:report_success("127.0.0.1", 2115) 94 | ngx.sleep(0.01) 95 | ngx.say(checker:get_target_status("127.0.0.1", 2115)) -- true 96 | } 97 | } 98 | --- request 99 | GET /t 100 | --- response_body 101 | true 102 | false 103 | true 104 | --- no_error_log 105 | checking healthy targets: #1 106 | checking unhealthy targets: #1 107 | -------------------------------------------------------------------------------- /t/with_resty-events/08-report_timeout.t: -------------------------------------------------------------------------------- 1 | use Test::Nginx::Socket::Lua; 2 | use Cwd qw(cwd); 3 | 4 | workers(1); 5 | 6 | plan tests => repeat_each() * 16; 7 | 8 | my $pwd = cwd(); 9 | $ENV{TEST_NGINX_SERVROOT} = server_root(); 10 | 11 | our $HttpConfig = qq{ 12 | lua_package_path "$pwd/lib/?.lua;;"; 13 | lua_shared_dict test_shm 8m; 14 | 15 | init_worker_by_lua_block { 16 | local we = require "resty.events.compat" 17 | assert(we.configure({ 18 | unique_timeout = 5, 19 | broker_id = 0, 20 | listening = "unix:$ENV{TEST_NGINX_SERVROOT}/worker_events.sock" 21 | })) 22 | assert(we.configured()) 23 | } 24 | 25 | server { 26 | server_name kong_worker_events; 27 | listen unix:$ENV{TEST_NGINX_SERVROOT}/worker_events.sock; 28 | access_log off; 29 | location / { 30 | content_by_lua_block { 31 | require("resty.events.compat").run() 32 | } 33 | } 34 | } 35 | }; 36 | 37 | run_tests(); 38 | 39 | __DATA__ 40 | 41 | 42 | 43 | === TEST 1: report_timeout() active + passive 44 | --- http_config eval 45 | qq{ 46 | $::HttpConfig 47 | 48 | server { 49 | listen 2122; 50 | location = /status { 51 | return 200; 52 | } 53 | } 54 | } 55 | --- config 56 | location = /t { 57 | content_by_lua_block { 58 | local healthcheck = require("resty.healthcheck") 59 | local checker = healthcheck.new({ 60 | name = "testing", 61 | shm_name = "test_shm", 62 | events_module = "resty.events", 63 | type = "http", 64 | checks = { 65 | active = { 66 | http_path = "/status", 67 | healthy = { 68 | interval = 999, -- we don't want active checks 69 | successes = 3, 70 | }, 71 | unhealthy = { 72 | interval = 999, -- we don't want active checks 73 | tcp_failures = 3, 74 | http_failures = 5, 75 | timeouts = 2, 76 | } 77 | }, 78 | passive = { 79 | healthy = { 80 | successes = 3, 81 | }, 82 | unhealthy = { 83 | tcp_failures = 3, 84 | http_failures = 5, 85 | timeouts = 2, 86 | } 87 | } 88 | } 89 | }) 90 | local ok, err = checker:add_target("127.0.0.1", 2122, nil, true) 91 | local ok, err = checker:add_target("127.0.0.1", 2113, nil, true) 92 | ngx.sleep(0.01) 93 | checker:report_timeout("127.0.0.1", 2122, nil, "active") 94 | checker:report_timeout("127.0.0.1", 2113, nil, "passive") 95 | checker:report_timeout("127.0.0.1", 2122, nil, "active") 96 | checker:report_timeout("127.0.0.1", 2113, nil, "passive") 97 | ngx.sleep(0.01) 98 | ngx.say(checker:get_target_status("127.0.0.1", 2122)) -- false 99 | ngx.say(checker:get_target_status("127.0.0.1", 2113)) -- false 100 | } 101 | } 102 | --- request 103 | GET /t 104 | --- response_body 105 | false 106 | false 107 | --- error_log 108 | unhealthy TIMEOUT increment (1/2) for '(127.0.0.1:2122)' 109 | unhealthy TIMEOUT increment (2/2) for '(127.0.0.1:2122)' 110 | event: target status '(127.0.0.1:2122)' from 'true' to 'false' 111 | unhealthy TIMEOUT increment (1/2) for '(127.0.0.1:2113)' 112 | unhealthy TIMEOUT increment (2/2) for '(127.0.0.1:2113)' 113 | event: target status '(127.0.0.1:2113)' from 'true' to 'false' 114 | 115 | 116 | === TEST 2: report_timeout() for active is a nop when active.unhealthy.timeouts == 0 117 | --- http_config eval 118 | qq{ 119 | $::HttpConfig 120 | 121 | server { 122 | listen 2122; 123 | location = /status { 124 | return 200; 125 | } 126 | } 127 | } 128 | --- config 129 | location = /t { 130 | content_by_lua_block { 131 | local healthcheck = require("resty.healthcheck") 132 | local checker = healthcheck.new({ 133 | name = "testing", 134 | shm_name = "test_shm", 135 | events_module = "resty.events", 136 | type = "http", 137 | checks = { 138 | active = { 139 | http_path = "/status", 140 | healthy = { 141 | interval = 999, -- we don't want active checks 142 | successes = 3, 143 | }, 144 | unhealthy = { 145 | interval = 999, -- we don't want active checks 146 | tcp_failures = 3, 147 | http_failures = 5, 148 | timeouts = 0, 149 | } 150 | }, 151 | passive = { 152 | healthy = { 153 | successes = 3, 154 | }, 155 | unhealthy = { 156 | tcp_failures = 3, 157 | http_failures = 5, 158 | timeouts = 2, 159 | } 160 | } 161 | } 162 | }) 163 | local ok, err = checker:add_target("127.0.0.1", 2122, nil, true) 164 | ngx.sleep(0.01) 165 | checker:report_timeout("127.0.0.1", 2122, nil, "active") 166 | checker:report_timeout("127.0.0.1", 2122, nil, "active") 167 | checker:report_timeout("127.0.0.1", 2122, nil, "active") 168 | ngx.sleep(0.01) 169 | ngx.say(checker:get_target_status("127.0.0.1", 2122)) -- true 170 | } 171 | } 172 | --- request 173 | GET /t 174 | --- response_body 175 | true 176 | --- no_error_log 177 | unhealthy TCP increment 178 | event: target status '(127.0.0.1:2122)' from 'true' to 'false' 179 | 180 | 181 | 182 | === TEST 3: report_timeout() for passive is a nop when passive.unhealthy.timeouts == 0 183 | --- http_config eval 184 | qq{ 185 | $::HttpConfig 186 | 187 | server { 188 | listen 2122; 189 | location = /status { 190 | return 200; 191 | } 192 | } 193 | } 194 | --- config 195 | location = /t { 196 | content_by_lua_block { 197 | local healthcheck = require("resty.healthcheck") 198 | local checker = healthcheck.new({ 199 | name = "testing", 200 | shm_name = "test_shm", 201 | events_module = "resty.events", 202 | type = "http", 203 | checks = { 204 | active = { 205 | http_path = "/status", 206 | healthy = { 207 | interval = 999, -- we don't want active checks 208 | successes = 3, 209 | }, 210 | unhealthy = { 211 | interval = 999, -- we don't want active checks 212 | tcp_failures = 3, 213 | http_failures = 5, 214 | timeouts = 2, 215 | } 216 | }, 217 | passive = { 218 | healthy = { 219 | successes = 3, 220 | }, 221 | unhealthy = { 222 | tcp_failures = 3, 223 | http_failures = 5, 224 | timeouts = 0, 225 | } 226 | } 227 | } 228 | }) 229 | local ok, err = checker:add_target("127.0.0.1", 2122, nil, true) 230 | ngx.sleep(0.01) 231 | checker:report_timeout("127.0.0.1", 2122, nil, "passive") 232 | checker:report_timeout("127.0.0.1", 2122, nil, "passive") 233 | checker:report_timeout("127.0.0.1", 2122, nil, "passive") 234 | ngx.sleep(0.01) 235 | ngx.say(checker:get_target_status("127.0.0.1", 2122)) -- true 236 | } 237 | } 238 | --- request 239 | GET /t 240 | --- response_body 241 | true 242 | --- no_error_log 243 | unhealthy TCP increment 244 | event: target status '(127.0.0.1:2122)' from 'true' to 'false' 245 | -------------------------------------------------------------------------------- /t/with_resty-events/10-garbagecollect.t_disabled: -------------------------------------------------------------------------------- 1 | # This test is disabled 2 | # 3 | # We need to understand if it is a false-positive or lua-resty-healthcheck is 4 | # actually leaking the event module memory when deleting a checker instance. 5 | # 6 | # Please rename this test if a solution is found or remove it if it is a 7 | # false-positive. 8 | 9 | use Test::Nginx::Socket::Lua; 10 | use Cwd qw(cwd); 11 | 12 | workers(1); 13 | 14 | plan tests => repeat_each() * (blocks() * 3); 15 | 16 | my $pwd = cwd(); 17 | $ENV{TEST_NGINX_SERVROOT} = server_root(); 18 | 19 | our $HttpConfig = qq{ 20 | lua_package_path "$pwd/lib/?.lua;;"; 21 | lua_shared_dict test_shm 8m; 22 | 23 | init_worker_by_lua_block { 24 | local we = require "resty.events.compat" 25 | assert(we.configure({ 26 | unique_timeout = 5, 27 | broker_id = 0, 28 | listening = "unix:$ENV{TEST_NGINX_SERVROOT}/worker_events.sock" 29 | })) 30 | assert(we.configured()) 31 | } 32 | 33 | server { 34 | server_name kong_worker_events; 35 | listen unix:$ENV{TEST_NGINX_SERVROOT}/worker_events.sock; 36 | access_log off; 37 | location / { 38 | content_by_lua_block { 39 | require("resty.events.compat").run() 40 | } 41 | } 42 | } 43 | }; 44 | 45 | run_tests(); 46 | 47 | __DATA__ 48 | 49 | 50 | 51 | === TEST 1: garbage collect the checker object 52 | --- http_config eval 53 | qq{ 54 | $::HttpConfig 55 | 56 | server { 57 | listen 2121; 58 | location = /status { 59 | return 200; 60 | } 61 | } 62 | } 63 | --- config 64 | location = /t { 65 | content_by_lua_block { 66 | local dump = function(...) ngx.log(ngx.DEBUG,"\027[31m\n", require("pl.pretty").write({...}),"\027[0m") end 67 | local healthcheck = require("resty.healthcheck") 68 | local checker = healthcheck.new({ 69 | name = "testing", 70 | shm_name = "test_shm", 71 | events_module = "resty.events", 72 | type = "http", 73 | checks = { 74 | active = { 75 | http_path = "/status", 76 | healthy = { 77 | interval = 0.1, 78 | successes = 3, 79 | }, 80 | unhealthy = { 81 | interval = 0.1, 82 | http_failures = 3, 83 | } 84 | }, 85 | } 86 | }) 87 | assert(checker:add_target("127.0.0.1", 2121, nil, true)) 88 | local weak_table = setmetatable({ checker },{ 89 | __mode = "v", 90 | }) 91 | checker = nil -- now only anchored in weak table above 92 | collectgarbage() 93 | collectgarbage() 94 | collectgarbage() 95 | collectgarbage() 96 | ngx.sleep(0.5) -- leave room for timers to run (they shouldn't, but we want to be sure) 97 | ngx.say(#weak_table) -- after GC, should be 0 length 98 | } 99 | } 100 | --- request 101 | GET /t 102 | --- response_body 103 | 0 104 | --- no_error_log 105 | checking healthy targets: #1 106 | -------------------------------------------------------------------------------- /t/with_resty-events/12-set_target_status.t: -------------------------------------------------------------------------------- 1 | use Test::Nginx::Socket::Lua; 2 | use Cwd qw(cwd); 3 | 4 | workers(1); 5 | 6 | plan tests => repeat_each() * blocks() * 2; 7 | 8 | my $pwd = cwd(); 9 | $ENV{TEST_NGINX_SERVROOT} = server_root(); 10 | 11 | our $HttpConfig = qq{ 12 | lua_package_path "$pwd/lib/?.lua;;"; 13 | lua_shared_dict test_shm 8m; 14 | 15 | init_worker_by_lua_block { 16 | local we = require "resty.events.compat" 17 | assert(we.configure({ 18 | unique_timeout = 5, 19 | broker_id = 0, 20 | listening = "unix:$ENV{TEST_NGINX_SERVROOT}/worker_events.sock" 21 | })) 22 | assert(we.configured()) 23 | } 24 | 25 | server { 26 | server_name kong_worker_events; 27 | listen unix:$ENV{TEST_NGINX_SERVROOT}/worker_events.sock; 28 | access_log off; 29 | location / { 30 | content_by_lua_block { 31 | require("resty.events.compat").run() 32 | } 33 | } 34 | } 35 | }; 36 | 37 | run_tests(); 38 | 39 | __DATA__ 40 | 41 | === TEST 1: set_target_status() updates a status 42 | --- http_config eval 43 | qq{ 44 | $::HttpConfig 45 | } 46 | --- config 47 | location = /t { 48 | content_by_lua_block { 49 | local healthcheck = require("resty.healthcheck") 50 | local checker = healthcheck.new({ 51 | name = "testing", 52 | shm_name = "test_shm", 53 | events_module = "resty.events", 54 | }) 55 | local ok, err = checker:add_target("127.0.0.1", 2112, nil, true) 56 | ngx.sleep(0.01) 57 | ngx.say(checker:get_target_status("127.0.0.1", 2112)) -- true 58 | checker:set_target_status("127.0.0.1", 2112, nil, false) 59 | ngx.sleep(0.01) 60 | ngx.say(checker:get_target_status("127.0.0.1", 2112)) -- false 61 | checker:set_target_status("127.0.0.1", 2112, nil, true) 62 | ngx.sleep(0.01) 63 | ngx.say(checker:get_target_status("127.0.0.1", 2112)) -- true 64 | } 65 | } 66 | --- request 67 | GET /t 68 | --- response_body 69 | true 70 | false 71 | true 72 | 73 | 74 | === TEST 2: set_target_status() restores node after passive check disables it 75 | --- http_config eval 76 | qq{ 77 | $::HttpConfig 78 | } 79 | --- config 80 | location = /t { 81 | content_by_lua_block { 82 | local healthcheck = require("resty.healthcheck") 83 | local checker = healthcheck.new({ 84 | name = "testing", 85 | shm_name = "test_shm", 86 | events_module = "resty.events", 87 | checks = { 88 | passive = { 89 | unhealthy = { 90 | tcp_failures = 2, 91 | http_failures = 2, 92 | } 93 | } 94 | } 95 | }) 96 | local ok, err = checker:add_target("127.0.0.1", 2112, nil, true) 97 | ngx.sleep(0.01) 98 | ngx.say(checker:get_target_status("127.0.0.1", 2112)) -- true 99 | checker:report_http_status("127.0.0.1", 2112, nil, 500) 100 | checker:report_http_status("127.0.0.1", 2112, nil, 500) 101 | ngx.sleep(0.01) 102 | ngx.say(checker:get_target_status("127.0.0.1", 2112)) -- false 103 | checker:set_target_status("127.0.0.1", 2112, nil, true) 104 | ngx.sleep(0.01) 105 | ngx.say(checker:get_target_status("127.0.0.1", 2112)) -- true 106 | } 107 | } 108 | --- request 109 | GET /t 110 | --- response_body 111 | true 112 | false 113 | true 114 | 115 | 116 | === TEST 3: set_target_status() resets the failure counters 117 | --- http_config eval 118 | qq{ 119 | $::HttpConfig 120 | } 121 | --- config 122 | location = /t { 123 | content_by_lua_block { 124 | local healthcheck = require("resty.healthcheck") 125 | local checker = healthcheck.new({ 126 | name = "testing", 127 | shm_name = "test_shm", 128 | events_module = "resty.events", 129 | checks = { 130 | passive = { 131 | healthy = { 132 | successes = 2, 133 | }, 134 | unhealthy = { 135 | tcp_failures = 2, 136 | http_failures = 2, 137 | } 138 | } 139 | } 140 | }) 141 | local ok, err = checker:add_target("127.0.0.1", 2112, nil, true) 142 | ngx.sleep(0.01) 143 | ngx.say(checker:get_target_status("127.0.0.1", 2112)) -- true 144 | checker:report_http_status("127.0.0.1", 2112, nil, 500) 145 | checker:set_target_status("127.0.0.1", 2112, nil, true) 146 | checker:report_http_status("127.0.0.1", 2112, nil, 500) 147 | ngx.sleep(0.01) 148 | ngx.say(checker:get_target_status("127.0.0.1", 2112)) -- true 149 | checker:report_http_status("127.0.0.1", 2112, nil, 500) 150 | ngx.sleep(0.01) 151 | ngx.say(checker:get_target_status("127.0.0.1", 2112)) -- false 152 | } 153 | } 154 | --- request 155 | GET /t 156 | --- response_body 157 | true 158 | true 159 | false 160 | 161 | 162 | === TEST 3: set_target_status() resets the success counters 163 | --- http_config eval 164 | qq{ 165 | $::HttpConfig 166 | } 167 | --- config 168 | location = /t { 169 | content_by_lua_block { 170 | local healthcheck = require("resty.healthcheck") 171 | local checker = healthcheck.new({ 172 | name = "testing", 173 | shm_name = "test_shm", 174 | events_module = "resty.events", 175 | checks = { 176 | passive = { 177 | healthy = { 178 | successes = 2, 179 | }, 180 | unhealthy = { 181 | tcp_failures = 2, 182 | http_failures = 2, 183 | } 184 | } 185 | } 186 | }) 187 | local ok, err = checker:add_target("127.0.0.1", 2112, nil, true) 188 | ngx.sleep(0.01) 189 | checker:set_target_status("127.0.0.1", 2112, nil, false) 190 | ngx.sleep(0.01) 191 | ngx.say(checker:get_target_status("127.0.0.1", 2112)) -- false 192 | checker:report_http_status("127.0.0.1", 2112, nil, 200) 193 | checker:set_target_status("127.0.0.1", 2112, nil, false) 194 | checker:report_http_status("127.0.0.1", 2112, nil, 200) 195 | ngx.sleep(0.01) 196 | ngx.say(checker:get_target_status("127.0.0.1", 2112)) -- false 197 | checker:report_http_status("127.0.0.1", 2112, nil, 200) 198 | ngx.sleep(0.01) 199 | ngx.say(checker:get_target_status("127.0.0.1", 2112)) -- true 200 | } 201 | } 202 | --- request 203 | GET /t 204 | --- response_body 205 | false 206 | false 207 | true 208 | -------------------------------------------------------------------------------- /t/with_resty-events/13-integration.t_disabled: -------------------------------------------------------------------------------- 1 | # This test is disabled 2 | # 3 | # All the test steps used here take longer than the request timeout because of 4 | # all the ngx.sleep needed to synchronize the events. Running them invididually 5 | # seem to work, so the solution is to split the integration test into smaller 6 | # tests. 7 | 8 | use Test::Nginx::Socket::Lua; 9 | use Cwd qw(cwd); 10 | 11 | workers(1); 12 | 13 | plan tests => repeat_each() * 2; 14 | 15 | my $pwd = cwd(); 16 | $ENV{TEST_NGINX_SERVROOT} = server_root(); 17 | 18 | our $HttpConfig = qq{ 19 | lua_package_path "$pwd/lib/?.lua;;"; 20 | lua_shared_dict test_shm 8m; 21 | 22 | init_worker_by_lua_block { 23 | local we = require "resty.events.compat" 24 | assert(we.configure({ 25 | unique_timeout = 5, 26 | broker_id = 0, 27 | listening = "unix:$ENV{TEST_NGINX_SERVROOT}/worker_events.sock" 28 | })) 29 | assert(we.configured()) 30 | } 31 | 32 | server { 33 | server_name kong_worker_events; 34 | listen unix:$ENV{TEST_NGINX_SERVROOT}/worker_events.sock; 35 | access_log off; 36 | location / { 37 | content_by_lua_block { 38 | require("resty.events.compat").run() 39 | } 40 | } 41 | } 42 | }; 43 | 44 | run_tests(); 45 | 46 | __DATA__ 47 | 48 | 49 | 50 | === TEST 1: ensure counters work properly 51 | --- http_config eval 52 | qq{ 53 | $::HttpConfig 54 | } 55 | --- config eval 56 | qq{ 57 | location = /t { 58 | content_by_lua_block { 59 | local host = "127.0.0.1" 60 | local port = 2112 61 | 62 | local healthcheck = require("resty.healthcheck") 63 | local checker = healthcheck.new({ 64 | test = true, 65 | name = "testing", 66 | shm_name = "test_shm", 67 | events_module = "resty.events", 68 | type = "http", 69 | checks = { 70 | active = { 71 | http_path = "/status", 72 | healthy = { 73 | interval = 0, 74 | successes = 4, 75 | }, 76 | unhealthy = { 77 | interval = 0, 78 | tcp_failures = 2, 79 | http_failures = 0, 80 | } 81 | }, 82 | passive = { 83 | healthy = { 84 | successes = 2, 85 | }, 86 | unhealthy = { 87 | tcp_failures = 2, 88 | http_failures = 2, 89 | timeouts = 2, 90 | } 91 | } 92 | } 93 | }) 94 | 95 | local ok, err = checker:add_target(host, port, nil, true) 96 | 97 | -- S = successes counter 98 | -- F = http_failures counter 99 | -- T = tcp_failures counter 100 | -- O = timeouts counter 101 | 102 | local cases = {} 103 | 104 | local function incr(idxs, i, max) 105 | idxs[i] = idxs[i] + 1 106 | if idxs[i] > max and i > 1 then 107 | idxs[i] = 1 108 | incr(idxs, i - 1, max) 109 | end 110 | end 111 | 112 | local function add_cases(cases, len, m) 113 | local idxs = {} 114 | for i = 1, len do 115 | idxs[i] = 1 116 | end 117 | local word = {} 118 | for _ = 1, (#m) ^ len do 119 | for c = 1, len do 120 | word[c] = m[idxs[c]] 121 | end 122 | table.insert(cases, table.concat(word)) 123 | incr(idxs, len, #m) 124 | end 125 | end 126 | 127 | local m = { "S", "F", "T", "O" } 128 | 129 | -- There are 324 (3*3*3*3*4) possible internal states 130 | -- to the above healthcheck configuration where all limits are set to 2. 131 | -- We need at least five events (4*4*4*4) to be able 132 | -- to exercise all of them 133 | for i = 1, 5 do 134 | add_cases(cases, i, m) 135 | end 136 | 137 | -- Brute-force test all combinations of health events up to 5 events 138 | -- and compares the results given by the library with a simple simulation 139 | -- that implements the specified behavior. 140 | local function run_test_case(case) 141 | assert(checker:set_target_status(host, port, nil, true)) 142 | ngx.sleep(0.002) 143 | local i = 1 144 | local s, f, t, o = 0, 0, 0, 0 145 | local mode = true 146 | for c in case:gmatch(".") do 147 | if c == "S" then 148 | checker:report_http_status(host, port, nil, 200, "passive") 149 | ngx.sleep(0.002) 150 | s = s + 1 151 | f, t, o = 0, 0, 0 152 | if s == 2 then 153 | mode = true 154 | end 155 | elseif c == "F" then 156 | checker:report_http_status(host, port, nil, 500, "passive") 157 | ngx.sleep(0.002) 158 | f = f + 1 159 | s = 0 160 | if f == 2 then 161 | mode = false 162 | end 163 | elseif c == "T" then 164 | checker:report_tcp_failure(host, port, nil, "read", "passive") 165 | ngx.sleep(0.002) 166 | t = t + 1 167 | s = 0 168 | if t == 2 then 169 | mode = false 170 | end 171 | elseif c == "O" then 172 | checker:report_timeout(host, port, nil, "passive") 173 | ngx.sleep(0.002) 174 | o = o + 1 175 | s = 0 176 | if o == 2 then 177 | mode = false 178 | end 179 | end 180 | 181 | --local ctr, state = checker:test_get_counter(host, port, nil) 182 | --ngx.say(case, ": ", c, " ", string.format("%08x", ctr), " ", state) 183 | --ngx.log(ngx.DEBUG, case, ": ", c, " ", string.format("%08x", ctr), " ", state) 184 | 185 | if checker:get_target_status(host, port, nil) ~= mode then 186 | ngx.say("failed: ", case, " step ", i, " expected ", mode) 187 | return false 188 | end 189 | i = i + 1 190 | end 191 | return true 192 | end 193 | 194 | for _, case in ipairs(cases) do 195 | ngx.log(ngx.ERR, "Case: ", case) 196 | run_test_case(case) 197 | end 198 | ngx.say("all ok!") 199 | } 200 | } 201 | } 202 | --- request 203 | GET /t 204 | --- response_body 205 | all ok! 206 | --- error_log 207 | --- no_error_log 208 | -------------------------------------------------------------------------------- /t/with_resty-events/14-tls_active_probes.t: -------------------------------------------------------------------------------- 1 | use Test::Nginx::Socket::Lua; 2 | use Cwd qw(cwd); 3 | 4 | workers(1); 5 | 6 | plan tests => blocks() * 2; 7 | 8 | my $pwd = cwd(); 9 | $ENV{TEST_NGINX_SERVROOT} = server_root(); 10 | 11 | our $HttpConfig = qq{ 12 | lua_package_path "$pwd/lib/?.lua;;"; 13 | lua_shared_dict test_shm 8m; 14 | 15 | server { 16 | server_name kong_worker_events; 17 | listen unix:$ENV{TEST_NGINX_SERVROOT}/worker_events.sock; 18 | access_log off; 19 | location / { 20 | content_by_lua_block { 21 | require("resty.events.compat").run() 22 | } 23 | } 24 | } 25 | }; 26 | 27 | run_tests(); 28 | 29 | __DATA__ 30 | 31 | 32 | 33 | === TEST 1: active probes, valid https 34 | --- http_config eval: $::HttpConfig 35 | --- config 36 | location = /t { 37 | lua_ssl_trusted_certificate /etc/ssl/certs/ca-certificates.crt; 38 | lua_ssl_verify_depth 2; 39 | content_by_lua_block { 40 | local we = require "resty.events.compat" 41 | assert(we.configure({ unique_timeout = 5, broker_id = 0, listening = "unix:" .. ngx.config.prefix() .. "worker_events.sock" })) 42 | local healthcheck = require("resty.healthcheck") 43 | local checker = healthcheck.new({ 44 | name = "testing", 45 | shm_name = "test_shm", 46 | events_module = "resty.events", 47 | checks = { 48 | active = { 49 | timeout = 2, 50 | type = "https", 51 | http_path = "/", 52 | healthy = { 53 | interval = 2, 54 | successes = 2, 55 | }, 56 | unhealthy = { 57 | interval = 2, 58 | tcp_failures = 2, 59 | } 60 | }, 61 | } 62 | }) 63 | local ok, err = checker:add_target("104.154.89.105", 443, "badssl.com", false) 64 | ngx.sleep(16) -- wait for 4x the check interval 65 | ngx.say(checker:get_target_status("104.154.89.105", 443, "badssl.com")) -- true 66 | } 67 | } 68 | --- request 69 | GET /t 70 | --- response_body 71 | true 72 | --- timeout 73 | 20 74 | 75 | === TEST 2: active probes, invalid cert 76 | --- http_config eval: $::HttpConfig 77 | --- config 78 | location = /t { 79 | lua_ssl_trusted_certificate /etc/ssl/certs/ca-certificates.crt; 80 | lua_ssl_verify_depth 2; 81 | content_by_lua_block { 82 | local we = require "resty.events.compat" 83 | assert(we.configure({ unique_timeout = 5, broker_id = 0, listening = "unix:" .. ngx.config.prefix() .. "worker_events.sock" })) 84 | local healthcheck = require("resty.healthcheck") 85 | local checker = healthcheck.new({ 86 | name = "testing", 87 | shm_name = "test_shm", 88 | events_module = "resty.events", 89 | checks = { 90 | active = { 91 | timeout = 2, 92 | type = "https", 93 | http_path = "/", 94 | healthy = { 95 | interval = 2, 96 | successes = 2, 97 | }, 98 | unhealthy = { 99 | interval = 2, 100 | tcp_failures = 2, 101 | } 102 | }, 103 | } 104 | }) 105 | local ok, err = checker:add_target("104.154.89.105", 443, "wrong.host.badssl.com", true) 106 | ngx.sleep(16) -- wait for 4x the check interval 107 | ngx.say(checker:get_target_status("104.154.89.105", 443, "wrong.host.badssl.com")) -- false 108 | } 109 | } 110 | --- request 111 | GET /t 112 | --- response_body 113 | false 114 | --- timeout 115 | 20 116 | 117 | === TEST 3: active probes, accept invalid cert when disabling check 118 | --- http_config eval: $::HttpConfig 119 | --- config 120 | location = /t { 121 | lua_ssl_trusted_certificate /etc/ssl/certs/ca-certificates.crt; 122 | lua_ssl_verify_depth 2; 123 | content_by_lua_block { 124 | local we = require "resty.events.compat" 125 | assert(we.configure({ unique_timeout = 5, broker_id = 0, listening = "unix:" .. ngx.config.prefix() .. "worker_events.sock" })) 126 | local healthcheck = require("resty.healthcheck") 127 | local checker = healthcheck.new({ 128 | name = "testing", 129 | shm_name = "test_shm", 130 | events_module = "resty.events", 131 | checks = { 132 | active = { 133 | timeout = 2, 134 | type = "https", 135 | https_verify_certificate = false, 136 | http_path = "/", 137 | healthy = { 138 | interval = 2, 139 | successes = 2, 140 | }, 141 | unhealthy = { 142 | interval = 2, 143 | tcp_failures = 2, 144 | } 145 | }, 146 | } 147 | }) 148 | local ok, err = checker:add_target("104.154.89.105", 443, "wrong.host.badssl.com", false) 149 | ngx.sleep(16) -- wait for 4x the check interval 150 | ngx.say(checker:get_target_status("104.154.89.105", 443, "wrong.host.badssl.com")) -- true 151 | } 152 | } 153 | --- request 154 | GET /t 155 | --- response_body 156 | true 157 | --- timeout 158 | 20 159 | -------------------------------------------------------------------------------- /t/with_resty-events/17-mtls.t: -------------------------------------------------------------------------------- 1 | use Test::Nginx::Socket::Lua; 2 | use Cwd qw(cwd); 3 | 4 | workers(1); 5 | 6 | plan tests => repeat_each() * 4; 7 | 8 | my $pwd = cwd(); 9 | $ENV{TEST_NGINX_SERVROOT} = server_root(); 10 | 11 | our $HttpConfig = qq{ 12 | lua_package_path "$pwd/lib/?.lua;;"; 13 | lua_shared_dict test_shm 8m; 14 | 15 | init_worker_by_lua_block { 16 | local we = require "resty.events.compat" 17 | assert(we.configure({ 18 | unique_timeout = 5, 19 | broker_id = 0, 20 | listening = "unix:$ENV{TEST_NGINX_SERVROOT}/worker_events.sock" 21 | })) 22 | assert(we.configured()) 23 | } 24 | 25 | server { 26 | server_name kong_worker_events; 27 | listen unix:$ENV{TEST_NGINX_SERVROOT}/worker_events.sock; 28 | access_log off; 29 | location / { 30 | content_by_lua_block { 31 | require("resty.events.compat").run() 32 | } 33 | } 34 | } 35 | }; 36 | 37 | run_tests(); 38 | 39 | __DATA__ 40 | 41 | === TEST 1: configure a MTLS probe 42 | --- http_config eval 43 | qq{ 44 | $::HttpConfig 45 | } 46 | --- config 47 | location = /t { 48 | content_by_lua_block { 49 | local pl_file = require "pl.file" 50 | local cert = pl_file.read("t/with_resty-events/util/cert.pem", true) 51 | local key = pl_file.read("t/with_resty-events/util/key.pem", true) 52 | 53 | local healthcheck = require("resty.healthcheck") 54 | local checker = healthcheck.new({ 55 | name = "testing_mtls", 56 | shm_name = "test_shm", 57 | events_module = "resty.events", 58 | type = "http", 59 | ssl_cert = cert, 60 | ssl_key = key, 61 | checks = { 62 | active = { 63 | http_path = "/status", 64 | healthy = { 65 | interval = 999, -- we don't want active checks 66 | successes = 3, 67 | }, 68 | unhealthy = { 69 | interval = 999, -- we don't want active checks 70 | tcp_failures = 3, 71 | http_failures = 3, 72 | } 73 | }, 74 | passive = { 75 | healthy = { 76 | successes = 3, 77 | }, 78 | unhealthy = { 79 | tcp_failures = 3, 80 | http_failures = 3, 81 | } 82 | } 83 | } 84 | }) 85 | ngx.say(checker ~= nil) -- true 86 | } 87 | } 88 | --- request 89 | GET /t 90 | --- response_body 91 | true 92 | 93 | 94 | === TEST 2: configure a MTLS probe with parsed cert/key 95 | --- http_config eval 96 | qq{ 97 | $::HttpConfig 98 | } 99 | --- config 100 | location = /t { 101 | content_by_lua_block { 102 | local pl_file = require "pl.file" 103 | local ssl = require "ngx.ssl" 104 | local cert = ssl.parse_pem_cert(pl_file.read("t/with_resty-events/util/cert.pem", true)) 105 | local key = ssl.parse_pem_priv_key(pl_file.read("t/with_resty-events/util/key.pem", true)) 106 | 107 | local healthcheck = require("resty.healthcheck") 108 | local checker = healthcheck.new({ 109 | name = "testing_mtls", 110 | shm_name = "test_shm", 111 | events_module = "resty.events", 112 | type = "http", 113 | ssl_cert = cert, 114 | ssl_key = key, 115 | checks = { 116 | active = { 117 | http_path = "/status", 118 | healthy = { 119 | interval = 999, -- we don't want active checks 120 | successes = 3, 121 | }, 122 | unhealthy = { 123 | interval = 999, -- we don't want active checks 124 | tcp_failures = 3, 125 | http_failures = 3, 126 | } 127 | }, 128 | passive = { 129 | healthy = { 130 | successes = 3, 131 | }, 132 | unhealthy = { 133 | tcp_failures = 3, 134 | http_failures = 3, 135 | } 136 | } 137 | } 138 | }) 139 | ngx.say(checker ~= nil) -- true 140 | } 141 | } 142 | --- request 143 | GET /t 144 | --- response_body 145 | true 146 | -------------------------------------------------------------------------------- /t/with_resty-events/18-req-headers.t: -------------------------------------------------------------------------------- 1 | use Test::Nginx::Socket::Lua 'no_plan'; 2 | use Cwd qw(cwd); 3 | 4 | workers(1); 5 | 6 | my $pwd = cwd(); 7 | $ENV{TEST_NGINX_SERVROOT} = server_root(); 8 | 9 | our $HttpConfig = qq{ 10 | lua_package_path "$pwd/lib/?.lua;;"; 11 | lua_shared_dict test_shm 8m; 12 | 13 | init_worker_by_lua_block { 14 | local we = require "resty.events.compat" 15 | assert(we.configure({ 16 | unique_timeout = 5, 17 | broker_id = 0, 18 | listening = "unix:$ENV{TEST_NGINX_SERVROOT}/worker_events.sock" 19 | })) 20 | assert(we.configured()) 21 | } 22 | 23 | server { 24 | server_name kong_worker_events; 25 | listen unix:$ENV{TEST_NGINX_SERVROOT}/worker_events.sock; 26 | access_log off; 27 | location / { 28 | content_by_lua_block { 29 | require("resty.events.compat").run() 30 | } 31 | } 32 | } 33 | }; 34 | 35 | run_tests(); 36 | 37 | __DATA__ 38 | 39 | === TEST 1: headers: {"User-Agent: curl/7.29.0"} 40 | --- http_config eval 41 | qq{ 42 | $::HttpConfig 43 | 44 | server { 45 | listen 2112; 46 | location = /status { 47 | return 200; 48 | } 49 | } 50 | } 51 | --- config 52 | location = /t { 53 | content_by_lua_block { 54 | local healthcheck = require("resty.healthcheck") 55 | local checker = healthcheck.new({ 56 | name = "testing", 57 | shm_name = "test_shm", 58 | events_module = "resty.events", 59 | checks = { 60 | active = { 61 | http_path = "/status", 62 | healthy = { 63 | interval = 0.1 64 | }, 65 | headers = {"User-Agent: curl/7.29.0"} 66 | } 67 | } 68 | }) 69 | ngx.sleep(0.2) -- wait twice the interval 70 | local ok, err = checker:add_target("127.0.0.1", 2112, nil, true) 71 | ngx.say(ok) 72 | ngx.sleep(0.2) -- wait twice the interval 73 | } 74 | } 75 | --- request 76 | GET /t 77 | --- response_body 78 | true 79 | --- error_log 80 | checking healthy targets: nothing to do 81 | checking healthy targets: #1 82 | GET /status HTTP/1.0 83 | User-Agent: curl/7.29.0 84 | Host: 127.0.0.1 85 | 86 | 87 | 88 | === TEST 2: headers: {"User-Agent: curl"} 89 | --- http_config eval 90 | qq{ 91 | $::HttpConfig 92 | 93 | server { 94 | listen 2112; 95 | location = /status { 96 | return 200; 97 | } 98 | } 99 | } 100 | --- config 101 | location = /t { 102 | content_by_lua_block { 103 | local healthcheck = require("resty.healthcheck") 104 | local checker = healthcheck.new({ 105 | name = "testing", 106 | shm_name = "test_shm", 107 | events_module = "resty.events", 108 | checks = { 109 | active = { 110 | http_path = "/status", 111 | healthy = { 112 | interval = 0.1 113 | }, 114 | headers = {"User-Agent: curl"} 115 | } 116 | } 117 | }) 118 | ngx.sleep(0.2) -- wait twice the interval 119 | local ok, err = checker:add_target("127.0.0.1", 2112, nil, true) 120 | ngx.say(ok) 121 | ngx.sleep(0.2) -- wait twice the interval 122 | } 123 | } 124 | --- request 125 | GET /t 126 | --- response_body 127 | true 128 | --- error_log 129 | checking healthy targets: nothing to do 130 | checking healthy targets: #1 131 | GET /status HTTP/1.0 132 | User-Agent: curl 133 | Host: 127.0.0.1 134 | 135 | 136 | === TEST 3: headers: { ["User-Agent"] = "curl" } 137 | --- http_config eval 138 | qq{ 139 | $::HttpConfig 140 | 141 | server { 142 | listen 2112; 143 | location = /status { 144 | return 200; 145 | } 146 | } 147 | } 148 | --- config 149 | location = /t { 150 | content_by_lua_block { 151 | local healthcheck = require("resty.healthcheck") 152 | local checker = healthcheck.new({ 153 | name = "testing", 154 | shm_name = "test_shm", 155 | events_module = "resty.events", 156 | checks = { 157 | active = { 158 | http_path = "/status", 159 | healthy = { 160 | interval = 0.1 161 | }, 162 | headers = { ["User-Agent"] = "curl" } 163 | } 164 | } 165 | }) 166 | ngx.sleep(0.2) -- wait twice the interval 167 | local ok, err = checker:add_target("127.0.0.1", 2112, nil, true) 168 | ngx.say(ok) 169 | ngx.sleep(0.2) -- wait twice the interval 170 | } 171 | } 172 | --- request 173 | GET /t 174 | --- response_body 175 | true 176 | --- error_log 177 | checking healthy targets: nothing to do 178 | checking healthy targets: #1 179 | GET /status HTTP/1.0 180 | User-Agent: curl 181 | Host: 127.0.0.1 182 | 183 | 184 | 185 | === TEST 4: headers: { ["User-Agent"] = {"curl"} } 186 | --- http_config eval 187 | qq{ 188 | $::HttpConfig 189 | 190 | server { 191 | listen 2112; 192 | location = /status { 193 | return 200; 194 | } 195 | } 196 | } 197 | --- config 198 | location = /t { 199 | content_by_lua_block { 200 | local healthcheck = require("resty.healthcheck") 201 | local checker = healthcheck.new({ 202 | name = "testing", 203 | shm_name = "test_shm", 204 | events_module = "resty.events", 205 | checks = { 206 | active = { 207 | http_path = "/status", 208 | healthy = { 209 | interval = 0.1 210 | }, 211 | headers = { ["User-Agent"] = {"curl"} } 212 | } 213 | } 214 | }) 215 | ngx.sleep(0.2) -- wait twice the interval 216 | local ok, err = checker:add_target("127.0.0.1", 2112, nil, true) 217 | ngx.say(ok) 218 | ngx.sleep(0.2) -- wait twice the interval 219 | } 220 | } 221 | --- request 222 | GET /t 223 | --- response_body 224 | true 225 | --- error_log 226 | checking healthy targets: nothing to do 227 | checking healthy targets: #1 228 | GET /status HTTP/1.0 229 | User-Agent: curl 230 | Host: 127.0.0.1 231 | 232 | 233 | 234 | === TEST 5: headers: { ["User-Agent"] = {"curl", "nginx"} } 235 | --- http_config eval 236 | qq{ 237 | $::HttpConfig 238 | 239 | server { 240 | listen 2112; 241 | location = /status { 242 | return 200; 243 | } 244 | } 245 | } 246 | --- config 247 | location = /t { 248 | content_by_lua_block { 249 | local healthcheck = require("resty.healthcheck") 250 | local checker = healthcheck.new({ 251 | name = "testing", 252 | shm_name = "test_shm", 253 | events_module = "resty.events", 254 | checks = { 255 | active = { 256 | http_path = "/status", 257 | healthy = { 258 | interval = 0.1 259 | }, 260 | headers = { ["User-Agent"] = {"curl", "nginx"} } 261 | } 262 | } 263 | }) 264 | ngx.sleep(0.2) -- wait twice the interval 265 | local ok, err = checker:add_target("127.0.0.1", 2112, nil, true) 266 | ngx.say(ok) 267 | ngx.sleep(0.2) -- wait twice the interval 268 | } 269 | } 270 | --- request 271 | GET /t 272 | --- response_body 273 | true 274 | --- error_log 275 | checking healthy targets: nothing to do 276 | checking healthy targets: #1 277 | GET /status HTTP/1.0 278 | User-Agent: curl 279 | User-Agent: nginx 280 | Host: 127.0.0.1 281 | -------------------------------------------------------------------------------- /t/with_resty-events/19-timer.t: -------------------------------------------------------------------------------- 1 | use Test::Nginx::Socket::Lua; 2 | use Cwd qw(cwd); 3 | 4 | workers(1); 5 | 6 | plan tests => repeat_each() * blocks() * 2; 7 | 8 | my $pwd = cwd(); 9 | $ENV{TEST_NGINX_SERVROOT} = server_root(); 10 | 11 | our $HttpConfig = qq{ 12 | lua_package_path "$pwd/lib/?.lua;;"; 13 | lua_shared_dict test_shm 8m; 14 | 15 | init_worker_by_lua_block { 16 | local we = require "resty.events.compat" 17 | assert(we.configure({ 18 | unique_timeout = 5, 19 | broker_id = 0, 20 | listening = "unix:$ENV{TEST_NGINX_SERVROOT}/worker_events.sock" 21 | })) 22 | assert(we.configured()) 23 | } 24 | 25 | server { 26 | server_name kong_worker_events; 27 | listen unix:$ENV{TEST_NGINX_SERVROOT}/worker_events.sock; 28 | access_log off; 29 | location / { 30 | content_by_lua_block { 31 | require("resty.events.compat").run() 32 | } 33 | } 34 | } 35 | }; 36 | 37 | run_tests(); 38 | 39 | __DATA__ 40 | 41 | 42 | 43 | 44 | === TEST 1: active probes, http node failing 45 | --- http_config eval 46 | qq{ 47 | $::HttpConfig 48 | 49 | server { 50 | listen 2130; 51 | location = /status { 52 | content_by_lua_block { 53 | ngx.sleep(2) 54 | ngx.exit(500); 55 | } 56 | } 57 | } 58 | } 59 | --- config 60 | location = /t { 61 | content_by_lua_block { 62 | local healthcheck = require("resty.healthcheck") 63 | local checker = healthcheck.new({ 64 | name = "testing", 65 | shm_name = "test_shm", 66 | events_module = "resty.events", 67 | type = "http", 68 | checks = { 69 | active = { 70 | timeout = 1, 71 | http_path = "/status", 72 | healthy = { 73 | interval = 0.1, 74 | successes = 3, 75 | }, 76 | unhealthy = { 77 | interval = 0.1, 78 | http_failures = 3, 79 | } 80 | }, 81 | } 82 | }) 83 | local ok, err = checker:add_target("127.0.0.1", 2130, nil, true) 84 | ngx.sleep(3) -- wait for some time to let the checks run 85 | -- There should be no more than 3 timers running atm, but 86 | -- add a few spaces for worker events 87 | ngx.say(tonumber(ngx.timer.running_count()) <= 5) 88 | } 89 | } 90 | --- request 91 | GET /t 92 | --- response_body 93 | true 94 | -------------------------------------------------------------------------------- /t/with_resty-events/util/cert.pem: -------------------------------------------------------------------------------- 1 | -----BEGIN CERTIFICATE----- 2 | MIIDCTCCAfGgAwIBAgIUWWntedJ1yLAJE2baK/Mg06osmGAwDQYJKoZIhvcNAQEL 3 | BQAwFDESMBAGA1UECgwJS29uZyBJbmMuMB4XDTIwMDQyMzIwMjcwMFoXDTMwMDQy 4 | MTIwMjcwMFowFDESMBAGA1UECgwJS29uZyBJbmMuMIIBIjANBgkqhkiG9w0BAQEF 5 | AAOCAQ8AMIIBCgKCAQEAvVBrEH34MzwKlkBapiNyXr9huSShuojy+7i/01BSFng3 6 | 1TiejXJ3pEjykZqt7ENkZ6+BTYUdb9klK221yXiSyX71x97O0WHHuhH/m4XwGiIH 7 | YPBHdg+ExdMRflXgwtlW3of2hTWxkPkPQDPhoSQVMc5DkU7EOgrTxkv1rUWVAed4 8 | gSK4IT2AkhKwOSkewZANj2bnK5Evf71ACyJd7IQbJAIYoKBwRJAUXJMA7XAreIB+ 9 | nEr9whNYTklhB4aEa2wtOQuiQubIMJzdOryEX5nufH+tL4p1QKhRPFAqqtJ2Czgw 10 | YZY/v9IrThl19r0nL7FIvxFDNIMeOamJxDLQqsh9NwIDAQABo1MwUTAdBgNVHQ4E 11 | FgQU9t6YAdQ5mOXeqvptN5l3yYZGibEwHwYDVR0jBBgwFoAU9t6YAdQ5mOXeqvpt 12 | N5l3yYZGibEwDwYDVR0TAQH/BAUwAwEB/zANBgkqhkiG9w0BAQsFAAOCAQEAhi83 13 | aXsfJGqr9Zb1guWxbI8uKoG6o88ptXjV2c6dJnxXag0A/Rj+bX2bcPkN2kvQksNl 14 | MBUQlniOydZfsBUAoC0V7yyGUv9eO2RIeFnnNpRXNu+n+Kg2bvgvu8BKNNNOASZv 15 | +Vmzvo9lbfhS9MNAxYk9eTiPNUZ3zn2RfFyT6YWWJbRjk//EAlchyud3XGug9/hw 16 | c05dtzWEYT8GdzMd+Y1/2kR5r/CapSj7GEqL5T3+zDIfjbhTokV7WBrw6og2avoZ 17 | vzrF8xWucry5/2mKQbRxMyCtKYUKTcoLzF4HrNQCETm0n9qUODrHER7Wit9fQFZX 18 | 1GEA3BkX2tsbIVVaig== 19 | -----END CERTIFICATE----- 20 | -------------------------------------------------------------------------------- /t/with_resty-events/util/key.pem: -------------------------------------------------------------------------------- 1 | -----BEGIN PRIVATE KEY----- 2 | MIIEvQIBADANBgkqhkiG9w0BAQEFAASCBKcwggSjAgEAAoIBAQC9UGsQffgzPAqW 3 | QFqmI3Jev2G5JKG6iPL7uL/TUFIWeDfVOJ6NcnekSPKRmq3sQ2Rnr4FNhR1v2SUr 4 | bbXJeJLJfvXH3s7RYce6Ef+bhfAaIgdg8Ed2D4TF0xF+VeDC2Vbeh/aFNbGQ+Q9A 5 | M+GhJBUxzkORTsQ6CtPGS/WtRZUB53iBIrghPYCSErA5KR7BkA2PZucrkS9/vUAL 6 | Il3shBskAhigoHBEkBRckwDtcCt4gH6cSv3CE1hOSWEHhoRrbC05C6JC5sgwnN06 7 | vIRfme58f60vinVAqFE8UCqq0nYLODBhlj+/0itOGXX2vScvsUi/EUM0gx45qYnE 8 | MtCqyH03AgMBAAECggEAA1hWa/Yt2onnDfyZHXJm5PGwwlq5WNhuorADA7LZoHgD 9 | VIspkgpBvu9jCduX0yLltUdOm5YMjRtjIr9PhP3SaikKIrv3H5AAvXLv90mIko2j 10 | X70fJiDkEbLHDlpqHEdG16vDWVs3hf5AnLvN8tD2ZujkHL8tjHEAiPJyptsh5OSw 11 | XaltCD67U940XXJ89x0zFZ/3RoRk78wX3ELz7/dY0cMnslMavON+LYTq9hQZyVmm 12 | nOhZICWerKjax4t5f9PZ/zM6IhEVrUhw2WrC31tgRo+ITCIA/nkKid8vNhkiLVdw 13 | jTyAYDLgYW7K8/zVrzmV9TOr3CaZHLQxnF/LMpIEAQKBgQDjnA/G4g2mDD7lsqU1 14 | N3it87v2VBnZPFNW6L17Qig+2BDTXg1kadFBlp8qtEJI+H5axVSmzsrlmATJVhUK 15 | iYOQwiEsQnt4tGmWZI268NAIUtv0TX0i9yscsezmvGABMcyBCF7ZwFhUfhy0pn1t 16 | kzmbYN4AjYdcisCnSusoMD92NwKBgQDU7YVNuieMIZCIuSxG61N1+ZyX3Ul5l6KU 17 | m1xw1PZvugqXnQlOLV/4Iaz86Vvlt2aDqTWO/iv4LU7ixNdhRtxFIU/b2a8DzDOw 18 | ijhzMGRJqJOdi1NfciiIWHyrjRmGbhCgm784vqV7qbQomiIsjgnDvjoZkossZMiJ 19 | 63vs7huxAQKBgQDiQjT8w6JFuk6cD+Zi7G2unmfvCtNXO7ys3Fffu3g+YJL5SrmN 20 | ZBN8W7qFvQNXfo48tYTc/Rx8941qh4QLIYAD2rcXRE9xQgbkVbj+aHykiZnVVWJb 21 | 69CTidux0vist1BPxH5lf+tOsr7eZdKxpnTRnI2Thx1URSoWI0d4f93WKQKBgBXn 22 | kW0bl3HtCgdmtU1ebCmY0ik1VJezp8AN84aQAgIga3KJbymhtVu7ayZhg1iwc1Vc 23 | FOxu7WsMji75/QY+2e4qrSJ61GxZl3+z2HbRJaAGPZlZeew5vD26jKjBTTztGbzM 24 | CPH3euKr5KLAqH9Y5VxDt4pl7vdULuUxWoBXRnYBAoGAHIFMYiCdXETtrFHKVTzc 25 | vm4P24PnsNHoDTGMXPeRYRKF2+3VEJrwp1Q3fue4Go4zFB8I6nhNVIbh4dIHxFab 26 | hyxZvGWGUgRvTvD4VYn/YHVoSf2/xNZ0r/S2LKomp+jwoWKfukbCoDjAOWvnK5iD 27 | o41Tn0yhzBdnrYguKznGR3g= 28 | -----END PRIVATE KEY----- 29 | -------------------------------------------------------------------------------- /t/with_resty-events/util/reindex: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env lua 2 | 3 | if not arg[1] then 4 | io.stderr:write("Usage: "..arg[0].." t/*.t\n") 5 | os.exit(1) 6 | end 7 | 8 | for _, name in ipairs(arg) do 9 | local i = 1 10 | local fd = io.open(name, "r") 11 | if fd then 12 | local new = name.."~" 13 | local out = io.open(new, "w") 14 | for line in fd:lines() do 15 | local test, n, desc = line:match("^(===%s*TEST%s*)(%d+)(.*)$") 16 | if test then 17 | out:write(test .. tostring(i) .. desc .. "\n") 18 | i = i + 1 19 | else 20 | out:write(line .. "\n") 21 | end 22 | end 23 | out:close() 24 | fd:close() 25 | os.execute("mv " .. new .. " " .. name) 26 | end 27 | end 28 | -------------------------------------------------------------------------------- /t/with_worker-events/01-start-stop.t: -------------------------------------------------------------------------------- 1 | use Test::Nginx::Socket::Lua; 2 | use Cwd qw(cwd); 3 | 4 | workers(1); 5 | 6 | plan tests => repeat_each() * (blocks() * 3) + 1; 7 | 8 | my $pwd = cwd(); 9 | 10 | our $HttpConfig = qq{ 11 | lua_package_path "$pwd/lib/?.lua;;"; 12 | lua_shared_dict test_shm 8m; 13 | lua_shared_dict my_worker_events 8m; 14 | }; 15 | 16 | run_tests(); 17 | 18 | __DATA__ 19 | 20 | === TEST 1: start() can start after stop() 21 | --- http_config eval: $::HttpConfig 22 | --- config 23 | location = /t { 24 | content_by_lua_block { 25 | local we = require "resty.worker.events" 26 | assert(we.configure{ shm = "my_worker_events", interval = 0.1 }) 27 | local healthcheck = require("resty.healthcheck") 28 | local checker = healthcheck.new({ 29 | name = "testing", 30 | shm_name = "test_shm", 31 | checks = { 32 | active = { 33 | healthy = { 34 | interval = 0.1 35 | }, 36 | unhealthy = { 37 | interval = 0.1 38 | } 39 | } 40 | } 41 | }) 42 | local ok, err = checker:stop() 43 | ngx.sleep(0.2) -- wait twice the interval 44 | local ok, err = checker:start() 45 | ngx.say(ok) 46 | } 47 | } 48 | --- request 49 | GET /t 50 | --- response_body 51 | true 52 | --- no_error_log 53 | [error] 54 | 55 | 56 | === TEST 3: start() is a no-op if active intervals are 0 57 | --- http_config eval: $::HttpConfig 58 | --- config 59 | location = /t { 60 | content_by_lua_block { 61 | local we = require "resty.worker.events" 62 | assert(we.configure{ shm = "my_worker_events", interval = 0.1 }) 63 | local healthcheck = require("resty.healthcheck") 64 | local checker = healthcheck.new({ 65 | name = "testing", 66 | shm_name = "test_shm", 67 | checks = { 68 | active = { 69 | healthy = { 70 | interval = 0 71 | }, 72 | unhealthy = { 73 | interval = 0 74 | } 75 | } 76 | } 77 | }) 78 | local ok, err = checker:start() 79 | ngx.say(ok) 80 | local ok, err = checker:start() 81 | ngx.say(ok) 82 | local ok, err = checker:start() 83 | ngx.say(ok) 84 | } 85 | } 86 | --- request 87 | GET /t 88 | --- response_body 89 | true 90 | true 91 | true 92 | --- no_error_log 93 | [error] 94 | 95 | === TEST 4: stop() stops health checks 96 | --- http_config eval: $::HttpConfig 97 | --- config 98 | location = /t { 99 | content_by_lua_block { 100 | local we = require "resty.worker.events" 101 | assert(we.configure{ shm = "my_worker_events", interval = 0.1 }) 102 | local healthcheck = require("resty.healthcheck") 103 | local checker = healthcheck.new({ 104 | name = "testing", 105 | shm_name = "test_shm", 106 | checks = { 107 | active = { 108 | healthy = { 109 | interval = 0.1 110 | }, 111 | unhealthy = { 112 | interval = 0.1 113 | } 114 | } 115 | } 116 | }) 117 | local ok, err = checker:stop() 118 | ngx.say(ok) 119 | } 120 | } 121 | --- request 122 | GET /t 123 | --- response_body 124 | true 125 | --- no_error_log 126 | [error] 127 | checking 128 | 129 | === TEST 5: start() restarts health checks 130 | --- http_config eval: $::HttpConfig 131 | --- config 132 | location = /t { 133 | content_by_lua_block { 134 | local we = require "resty.worker.events" 135 | assert(we.configure{ shm = "my_worker_events", interval = 0.1 }) 136 | local healthcheck = require("resty.healthcheck") 137 | local checker = healthcheck.new({ 138 | name = "testing", 139 | shm_name = "test_shm", 140 | checks = { 141 | active = { 142 | healthy = { 143 | interval = 0.1 144 | }, 145 | unhealthy = { 146 | interval = 0.1 147 | } 148 | } 149 | } 150 | }) 151 | local ok, err = checker:stop() 152 | ngx.say(ok) 153 | ngx.sleep(1) -- active healthchecks might take up to 1s to start 154 | local ok, err = checker:start() 155 | ngx.say(ok) 156 | ngx.sleep(0.2) -- wait twice the interval 157 | } 158 | } 159 | --- request 160 | GET /t 161 | --- response_body 162 | true 163 | true 164 | --- error_log 165 | checking 166 | -------------------------------------------------------------------------------- /t/with_worker-events/02-add_target.t: -------------------------------------------------------------------------------- 1 | use Test::Nginx::Socket::Lua; 2 | use Cwd qw(cwd); 3 | 4 | workers(1); 5 | 6 | plan tests => repeat_each() * (blocks() * 4) + 3; 7 | 8 | my $pwd = cwd(); 9 | 10 | our $HttpConfig = qq{ 11 | lua_package_path "$pwd/lib/?.lua;;"; 12 | lua_shared_dict test_shm 8m; 13 | lua_shared_dict my_worker_events 8m; 14 | }; 15 | 16 | run_tests(); 17 | 18 | __DATA__ 19 | 20 | === TEST 1: add_target() adds an unhealthy target 21 | --- http_config eval: $::HttpConfig 22 | --- config 23 | location = /t { 24 | content_by_lua_block { 25 | local we = require "resty.worker.events" 26 | assert(we.configure{ shm = "my_worker_events", interval = 0.1 }) 27 | local healthcheck = require("resty.healthcheck") 28 | local checker = healthcheck.new({ 29 | name = "testing", 30 | shm_name = "test_shm", 31 | checks = { 32 | active = { 33 | healthy = { 34 | interval = 0.1 35 | }, 36 | unhealthy = { 37 | interval = 0.1 38 | } 39 | } 40 | } 41 | }) 42 | ngx.sleep(1) -- active healthchecks might take up to 1s to start 43 | local ok, err = checker:add_target("127.0.0.1", 11111, nil, false) 44 | ngx.say(ok) 45 | ngx.sleep(0.5) 46 | } 47 | } 48 | --- request 49 | GET /t 50 | --- response_body 51 | true 52 | --- error_log 53 | checking healthy targets: nothing to do 54 | checking unhealthy targets: #1 55 | 56 | --- no_error_log 57 | checking healthy targets: #1 58 | 59 | 60 | 61 | === TEST 2: add_target() adds a healthy target 62 | --- http_config eval 63 | qq{ 64 | $::HttpConfig 65 | 66 | server { 67 | listen 2112; 68 | location = /status { 69 | return 200; 70 | } 71 | } 72 | } 73 | --- config 74 | location = /t { 75 | content_by_lua_block { 76 | local we = require "resty.worker.events" 77 | assert(we.configure{ shm = "my_worker_events", interval = 0.1 }) 78 | local healthcheck = require("resty.healthcheck") 79 | local checker = healthcheck.new({ 80 | name = "testing", 81 | shm_name = "test_shm", 82 | checks = { 83 | active = { 84 | http_path = "/status", 85 | healthy = { 86 | interval = 0.1 87 | }, 88 | unhealthy = { 89 | interval = 0.1 90 | } 91 | } 92 | } 93 | }) 94 | ngx.sleep(1) -- active healthchecks might take up to 1s to start 95 | local ok, err = checker:add_target("127.0.0.1", 2112, nil, true) 96 | ngx.say(ok) 97 | ngx.sleep(0.2) -- wait twice the interval 98 | } 99 | } 100 | --- request 101 | GET /t 102 | --- response_body 103 | true 104 | --- error_log 105 | checking unhealthy targets: nothing to do 106 | checking healthy targets: #1 107 | 108 | --- no_error_log 109 | checking unhealthy targets: #1 110 | 111 | 112 | 113 | === TEST 3: calling add_target() repeatedly does not change status 114 | --- http_config eval 115 | qq{ 116 | $::HttpConfig 117 | 118 | server { 119 | listen 2113; 120 | location = /status { 121 | return 200; 122 | } 123 | } 124 | } 125 | --- config 126 | location = /t { 127 | content_by_lua_block { 128 | local we = require "resty.worker.events" 129 | assert(we.configure{ shm = "my_worker_events", interval = 0.1 }) 130 | local healthcheck = require("resty.healthcheck") 131 | local checker = healthcheck.new({ 132 | name = "testing", 133 | shm_name = "test_shm", 134 | checks = { 135 | active = { 136 | http_path = "/status", 137 | healthy = { 138 | interval = 0.1, 139 | successes = 1, 140 | }, 141 | unhealthy = { 142 | interval = 0.1, 143 | tcp_failures = 1, 144 | http_failures = 1, 145 | } 146 | } 147 | } 148 | }) 149 | ngx.sleep(1) -- active healthchecks might take up to 1s to start 150 | local ok, err = checker:add_target("127.0.0.1", 2113, nil, true) 151 | local ok, err = checker:add_target("127.0.0.1", 2113, nil, false) 152 | ngx.say(ok) 153 | ngx.sleep(0.2) -- wait twice the interval 154 | } 155 | } 156 | --- request 157 | GET /t 158 | --- response_body 159 | true 160 | --- error_log 161 | checking unhealthy targets: nothing to do 162 | checking healthy targets: #1 163 | 164 | --- no_error_log 165 | checking unhealthy targets: #1 166 | -------------------------------------------------------------------------------- /t/with_worker-events/03-get_target_status.t: -------------------------------------------------------------------------------- 1 | use Test::Nginx::Socket::Lua; 2 | use Cwd qw(cwd); 3 | 4 | workers(1); 5 | 6 | plan tests => repeat_each() * (blocks() * 4); 7 | 8 | my $pwd = cwd(); 9 | 10 | our $HttpConfig = qq{ 11 | lua_package_path "$pwd/lib/?.lua;;"; 12 | lua_shared_dict test_shm 8m; 13 | lua_shared_dict my_worker_events 8m; 14 | }; 15 | 16 | run_tests(); 17 | 18 | __DATA__ 19 | 20 | === TEST 1: get_target_status() reports proper status 21 | --- http_config eval 22 | qq{ 23 | $::HttpConfig 24 | 25 | server { 26 | listen 2115; 27 | location = /status { 28 | return 200; 29 | } 30 | } 31 | } 32 | --- config 33 | location = /t { 34 | content_by_lua_block { 35 | local we = require "resty.worker.events" 36 | assert(we.configure{ shm = "my_worker_events", interval = 0.1 }) 37 | local healthcheck = require("resty.healthcheck") 38 | local checker = healthcheck.new({ 39 | name = "testing", 40 | shm_name = "test_shm", 41 | checks = { 42 | active = { 43 | http_path = "/status", 44 | healthy = { 45 | interval = 999, -- we don't want active checks 46 | successes = 1, 47 | }, 48 | unhealthy = { 49 | interval = 999, -- we don't want active checks 50 | tcp_failures = 1, 51 | http_failures = 1, 52 | } 53 | }, 54 | passive = { 55 | healthy = { 56 | successes = 1, 57 | }, 58 | unhealthy = { 59 | tcp_failures = 1, 60 | http_failures = 1, 61 | } 62 | } 63 | } 64 | }) 65 | ngx.sleep(0.1) -- wait for initial timers to run once 66 | local ok, err = checker:add_target("127.0.0.1", 2115, nil, true) 67 | ngx.say(checker:get_target_status("127.0.0.1", 2115)) -- true 68 | checker:report_tcp_failure("127.0.0.1", 2115) 69 | ngx.say(checker:get_target_status("127.0.0.1", 2115)) -- false 70 | checker:report_success("127.0.0.1", 2115) 71 | ngx.say(checker:get_target_status("127.0.0.1", 2115)) -- true 72 | } 73 | } 74 | --- request 75 | GET /t 76 | --- response_body 77 | true 78 | false 79 | true 80 | --- no_error_log 81 | checking healthy targets: #1 82 | checking unhealthy targets: #1 83 | -------------------------------------------------------------------------------- /t/with_worker-events/07-report_tcp_failure.t: -------------------------------------------------------------------------------- 1 | use Test::Nginx::Socket::Lua; 2 | use Cwd qw(cwd); 3 | 4 | workers(1); 5 | 6 | plan tests => repeat_each() * 18; 7 | 8 | my $pwd = cwd(); 9 | 10 | our $HttpConfig = qq{ 11 | lua_package_path "$pwd/lib/?.lua;;"; 12 | lua_shared_dict test_shm 8m; 13 | lua_shared_dict my_worker_events 8m; 14 | }; 15 | 16 | run_tests(); 17 | 18 | __DATA__ 19 | 20 | 21 | 22 | === TEST 1: report_tcp_failure() active + passive 23 | --- http_config eval 24 | qq{ 25 | $::HttpConfig 26 | 27 | server { 28 | listen 2120; 29 | location = /status { 30 | return 200; 31 | } 32 | } 33 | } 34 | --- config 35 | location = /t { 36 | content_by_lua_block { 37 | local we = require "resty.worker.events" 38 | assert(we.configure{ shm = "my_worker_events", interval = 0.1 }) 39 | local healthcheck = require("resty.healthcheck") 40 | local checker = healthcheck.new({ 41 | name = "testing", 42 | shm_name = "test_shm", 43 | type = "http", 44 | checks = { 45 | active = { 46 | http_path = "/status", 47 | healthy = { 48 | interval = 999, -- we don't want active checks 49 | successes = 3, 50 | }, 51 | unhealthy = { 52 | interval = 999, -- we don't want active checks 53 | tcp_failures = 3, 54 | http_failures = 5, 55 | } 56 | }, 57 | passive = { 58 | healthy = { 59 | successes = 3, 60 | }, 61 | unhealthy = { 62 | tcp_failures = 3, 63 | http_failures = 5, 64 | } 65 | } 66 | } 67 | }) 68 | ngx.sleep(0.1) -- wait for initial timers to run once 69 | local ok, err = checker:add_target("127.0.0.1", 2120, nil, true) 70 | local ok, err = checker:add_target("127.0.0.1", 2113, nil, true) 71 | checker:report_tcp_failure("127.0.0.1", 2120, nil, nil, "active") 72 | checker:report_tcp_failure("127.0.0.1", 2113, nil, nil, "passive") 73 | checker:report_tcp_failure("127.0.0.1", 2120, nil, nil, "active") 74 | checker:report_tcp_failure("127.0.0.1", 2113, nil, nil, "passive") 75 | checker:report_tcp_failure("127.0.0.1", 2120, nil, nil, "active") 76 | checker:report_tcp_failure("127.0.0.1", 2113, nil, nil, "passive") 77 | ngx.say(checker:get_target_status("127.0.0.1", 2120)) -- false 78 | ngx.say(checker:get_target_status("127.0.0.1", 2113)) -- false 79 | } 80 | } 81 | --- request 82 | GET /t 83 | --- response_body 84 | false 85 | false 86 | --- error_log 87 | unhealthy TCP increment (1/3) for '(127.0.0.1:2120)' 88 | unhealthy TCP increment (2/3) for '(127.0.0.1:2120)' 89 | unhealthy TCP increment (3/3) for '(127.0.0.1:2120)' 90 | event: target status '(127.0.0.1:2120)' from 'true' to 'false' 91 | unhealthy TCP increment (1/3) for '(127.0.0.1:2113)' 92 | unhealthy TCP increment (2/3) for '(127.0.0.1:2113)' 93 | unhealthy TCP increment (3/3) for '(127.0.0.1:2113)' 94 | event: target status '(127.0.0.1:2113)' from 'true' to 'false' 95 | 96 | 97 | === TEST 2: report_tcp_failure() for active is a nop when active.unhealthy.tcp_failures == 0 98 | --- http_config eval 99 | qq{ 100 | $::HttpConfig 101 | 102 | server { 103 | listen 2120; 104 | location = /status { 105 | return 200; 106 | } 107 | } 108 | } 109 | --- config 110 | location = /t { 111 | content_by_lua_block { 112 | local we = require "resty.worker.events" 113 | assert(we.configure{ shm = "my_worker_events", interval = 0.1 }) 114 | local healthcheck = require("resty.healthcheck") 115 | local checker = healthcheck.new({ 116 | name = "testing", 117 | shm_name = "test_shm", 118 | type = "http", 119 | checks = { 120 | active = { 121 | http_path = "/status", 122 | healthy = { 123 | interval = 999, -- we don't want active checks 124 | successes = 3, 125 | }, 126 | unhealthy = { 127 | interval = 999, -- we don't want active checks 128 | tcp_failures = 0, 129 | http_failures = 5, 130 | } 131 | }, 132 | passive = { 133 | healthy = { 134 | successes = 3, 135 | }, 136 | unhealthy = { 137 | tcp_failures = 3, 138 | http_failures = 5, 139 | } 140 | } 141 | } 142 | }) 143 | ngx.sleep(0.1) -- wait for initial timers to run once 144 | local ok, err = checker:add_target("127.0.0.1", 2120, nil, true) 145 | checker:report_tcp_failure("127.0.0.1", 2120, nil, nil, "active") 146 | checker:report_tcp_failure("127.0.0.1", 2120, nil, nil, "active") 147 | checker:report_tcp_failure("127.0.0.1", 2120, nil, nil, "active") 148 | ngx.say(checker:get_target_status("127.0.0.1", 2120)) -- true 149 | } 150 | } 151 | --- request 152 | GET /t 153 | --- response_body 154 | true 155 | --- no_error_log 156 | unhealthy TCP increment 157 | event: target status '(127.0.0.1:2120)' from 'true' to 'false' 158 | 159 | 160 | 161 | === TEST 3: report_tcp_failure() for passive is a nop when passive.unhealthy.tcp_failures == 0 162 | --- http_config eval 163 | qq{ 164 | $::HttpConfig 165 | 166 | server { 167 | listen 2120; 168 | location = /status { 169 | return 200; 170 | } 171 | } 172 | } 173 | --- config 174 | location = /t { 175 | content_by_lua_block { 176 | local we = require "resty.worker.events" 177 | assert(we.configure{ shm = "my_worker_events", interval = 0.1 }) 178 | local healthcheck = require("resty.healthcheck") 179 | local checker = healthcheck.new({ 180 | name = "testing", 181 | shm_name = "test_shm", 182 | type = "http", 183 | checks = { 184 | active = { 185 | http_path = "/status", 186 | healthy = { 187 | interval = 999, -- we don't want active checks 188 | successes = 3, 189 | }, 190 | unhealthy = { 191 | interval = 999, -- we don't want active checks 192 | tcp_failures = 3, 193 | http_failures = 5, 194 | } 195 | }, 196 | passive = { 197 | healthy = { 198 | successes = 3, 199 | }, 200 | unhealthy = { 201 | tcp_failures = 0, 202 | http_failures = 5, 203 | } 204 | } 205 | } 206 | }) 207 | ngx.sleep(0.1) -- wait for initial timers to run once 208 | local ok, err = checker:add_target("127.0.0.1", 2120, nil, true) 209 | checker:report_tcp_failure("127.0.0.1", 2120, nil, nil, "passive") 210 | checker:report_tcp_failure("127.0.0.1", 2120, nil, nil, "passive") 211 | checker:report_tcp_failure("127.0.0.1", 2120, nil, nil, "passive") 212 | ngx.say(checker:get_target_status("127.0.0.1", 2120)) -- true 213 | } 214 | } 215 | --- request 216 | GET /t 217 | --- response_body 218 | true 219 | --- no_error_log 220 | unhealthy TCP increment 221 | event: target status '(127.0.0.1:2120)' from 'true' to 'false' 222 | -------------------------------------------------------------------------------- /t/with_worker-events/08-report_timeout.t: -------------------------------------------------------------------------------- 1 | use Test::Nginx::Socket::Lua; 2 | use Cwd qw(cwd); 3 | 4 | workers(1); 5 | 6 | plan tests => repeat_each() * 16; 7 | 8 | my $pwd = cwd(); 9 | 10 | our $HttpConfig = qq{ 11 | lua_package_path "$pwd/lib/?.lua;;"; 12 | lua_shared_dict test_shm 8m; 13 | lua_shared_dict my_worker_events 8m; 14 | }; 15 | 16 | run_tests(); 17 | 18 | __DATA__ 19 | 20 | 21 | 22 | === TEST 1: report_timeout() active + passive 23 | --- http_config eval 24 | qq{ 25 | $::HttpConfig 26 | 27 | server { 28 | listen 2122; 29 | location = /status { 30 | return 200; 31 | } 32 | } 33 | } 34 | --- config 35 | location = /t { 36 | content_by_lua_block { 37 | local we = require "resty.worker.events" 38 | assert(we.configure{ shm = "my_worker_events", interval = 0.1 }) 39 | local healthcheck = require("resty.healthcheck") 40 | local checker = healthcheck.new({ 41 | name = "testing", 42 | shm_name = "test_shm", 43 | type = "http", 44 | checks = { 45 | active = { 46 | http_path = "/status", 47 | healthy = { 48 | interval = 999, -- we don't want active checks 49 | successes = 3, 50 | }, 51 | unhealthy = { 52 | interval = 999, -- we don't want active checks 53 | tcp_failures = 3, 54 | http_failures = 5, 55 | timeouts = 2, 56 | } 57 | }, 58 | passive = { 59 | healthy = { 60 | successes = 3, 61 | }, 62 | unhealthy = { 63 | tcp_failures = 3, 64 | http_failures = 5, 65 | timeouts = 2, 66 | } 67 | } 68 | } 69 | }) 70 | ngx.sleep(0.1) -- wait for initial timers to run once 71 | local ok, err = checker:add_target("127.0.0.1", 2122, nil, true) 72 | local ok, err = checker:add_target("127.0.0.1", 2113, nil, true) 73 | checker:report_timeout("127.0.0.1", 2122, nil, "active") 74 | checker:report_timeout("127.0.0.1", 2113, nil, "passive") 75 | checker:report_timeout("127.0.0.1", 2122, nil, "active") 76 | checker:report_timeout("127.0.0.1", 2113, nil, "passive") 77 | ngx.say(checker:get_target_status("127.0.0.1", 2122)) -- false 78 | ngx.say(checker:get_target_status("127.0.0.1", 2113)) -- false 79 | } 80 | } 81 | --- request 82 | GET /t 83 | --- response_body 84 | false 85 | false 86 | --- error_log 87 | unhealthy TIMEOUT increment (1/2) for '(127.0.0.1:2122)' 88 | unhealthy TIMEOUT increment (2/2) for '(127.0.0.1:2122)' 89 | event: target status '(127.0.0.1:2122)' from 'true' to 'false' 90 | unhealthy TIMEOUT increment (1/2) for '(127.0.0.1:2113)' 91 | unhealthy TIMEOUT increment (2/2) for '(127.0.0.1:2113)' 92 | event: target status '(127.0.0.1:2113)' from 'true' to 'false' 93 | 94 | 95 | === TEST 2: report_timeout() for active is a nop when active.unhealthy.timeouts == 0 96 | --- http_config eval 97 | qq{ 98 | $::HttpConfig 99 | 100 | server { 101 | listen 2122; 102 | location = /status { 103 | return 200; 104 | } 105 | } 106 | } 107 | --- config 108 | location = /t { 109 | content_by_lua_block { 110 | local we = require "resty.worker.events" 111 | assert(we.configure{ shm = "my_worker_events", interval = 0.1 }) 112 | local healthcheck = require("resty.healthcheck") 113 | local checker = healthcheck.new({ 114 | name = "testing", 115 | shm_name = "test_shm", 116 | type = "http", 117 | checks = { 118 | active = { 119 | http_path = "/status", 120 | healthy = { 121 | interval = 999, -- we don't want active checks 122 | successes = 3, 123 | }, 124 | unhealthy = { 125 | interval = 999, -- we don't want active checks 126 | tcp_failures = 3, 127 | http_failures = 5, 128 | timeouts = 0, 129 | } 130 | }, 131 | passive = { 132 | healthy = { 133 | successes = 3, 134 | }, 135 | unhealthy = { 136 | tcp_failures = 3, 137 | http_failures = 5, 138 | timeouts = 2, 139 | } 140 | } 141 | } 142 | }) 143 | ngx.sleep(0.1) -- wait for initial timers to run once 144 | local ok, err = checker:add_target("127.0.0.1", 2122, nil, true) 145 | checker:report_timeout("127.0.0.1", 2122, nil, "active") 146 | checker:report_timeout("127.0.0.1", 2122, nil, "active") 147 | checker:report_timeout("127.0.0.1", 2122, nil, "active") 148 | ngx.say(checker:get_target_status("127.0.0.1", 2122)) -- true 149 | } 150 | } 151 | --- request 152 | GET /t 153 | --- response_body 154 | true 155 | --- no_error_log 156 | unhealthy TCP increment 157 | event: target status '(127.0.0.1:2122)' from 'true' to 'false' 158 | 159 | 160 | 161 | === TEST 3: report_timeout() for passive is a nop when passive.unhealthy.timeouts == 0 162 | --- http_config eval 163 | qq{ 164 | $::HttpConfig 165 | 166 | server { 167 | listen 2122; 168 | location = /status { 169 | return 200; 170 | } 171 | } 172 | } 173 | --- config 174 | location = /t { 175 | content_by_lua_block { 176 | local we = require "resty.worker.events" 177 | assert(we.configure{ shm = "my_worker_events", interval = 0.1 }) 178 | local healthcheck = require("resty.healthcheck") 179 | local checker = healthcheck.new({ 180 | name = "testing", 181 | shm_name = "test_shm", 182 | type = "http", 183 | checks = { 184 | active = { 185 | http_path = "/status", 186 | healthy = { 187 | interval = 999, -- we don't want active checks 188 | successes = 3, 189 | }, 190 | unhealthy = { 191 | interval = 999, -- we don't want active checks 192 | tcp_failures = 3, 193 | http_failures = 5, 194 | timeouts = 2, 195 | } 196 | }, 197 | passive = { 198 | healthy = { 199 | successes = 3, 200 | }, 201 | unhealthy = { 202 | tcp_failures = 3, 203 | http_failures = 5, 204 | timeouts = 0, 205 | } 206 | } 207 | } 208 | }) 209 | ngx.sleep(0.1) -- wait for initial timers to run once 210 | local ok, err = checker:add_target("127.0.0.1", 2122, nil, true) 211 | checker:report_timeout("127.0.0.1", 2122, nil, "passive") 212 | checker:report_timeout("127.0.0.1", 2122, nil, "passive") 213 | checker:report_timeout("127.0.0.1", 2122, nil, "passive") 214 | ngx.say(checker:get_target_status("127.0.0.1", 2122)) -- true 215 | } 216 | } 217 | --- request 218 | GET /t 219 | --- response_body 220 | true 221 | --- no_error_log 222 | unhealthy TCP increment 223 | event: target status '(127.0.0.1:2122)' from 'true' to 'false' 224 | -------------------------------------------------------------------------------- /t/with_worker-events/10-garbagecollect.t: -------------------------------------------------------------------------------- 1 | use Test::Nginx::Socket::Lua; 2 | use Cwd qw(cwd); 3 | 4 | workers(1); 5 | 6 | plan tests => repeat_each() * (blocks() * 3); 7 | 8 | my $pwd = cwd(); 9 | 10 | our $HttpConfig = qq{ 11 | lua_package_path "$pwd/lib/?.lua;;"; 12 | lua_shared_dict test_shm 8m; 13 | lua_shared_dict my_worker_events 8m; 14 | }; 15 | 16 | run_tests(); 17 | 18 | __DATA__ 19 | 20 | 21 | 22 | === TEST 1: garbage collect the checker object 23 | --- http_config eval 24 | qq{ 25 | $::HttpConfig 26 | 27 | server { 28 | listen 2121; 29 | location = /status { 30 | return 200; 31 | } 32 | } 33 | } 34 | --- config 35 | location = /t { 36 | content_by_lua_block { 37 | ngx.shared.my_worker_events:flush_all() 38 | local dump = function(...) ngx.log(ngx.DEBUG,"\027[31m\n", require("pl.pretty").write({...}),"\027[0m") end 39 | local we = require "resty.worker.events" 40 | assert(we.configure { 41 | shm = "my_worker_events", 42 | interval = 0.1, 43 | debug = true, 44 | }) 45 | local healthcheck = require("resty.healthcheck") 46 | local checker = healthcheck.new({ 47 | name = "testing", 48 | shm_name = "test_shm", 49 | type = "http", 50 | checks = { 51 | active = { 52 | http_path = "/status", 53 | healthy = { 54 | interval = 0.1, 55 | successes = 3, 56 | }, 57 | unhealthy = { 58 | interval = 0.1, 59 | http_failures = 3, 60 | } 61 | }, 62 | } 63 | }) 64 | assert(checker:add_target("127.0.0.1", 2121, nil, true)) 65 | local weak_table = setmetatable({ checker },{ 66 | __mode = "v", 67 | }) 68 | checker = nil -- now only anchored in weak table above 69 | collectgarbage() 70 | collectgarbage() 71 | collectgarbage() 72 | collectgarbage() 73 | ngx.sleep(0.5) -- leave room for timers to run (they shouldn't, but we want to be sure) 74 | ngx.say(#weak_table) -- after GC, should be 0 length 75 | } 76 | } 77 | --- request 78 | GET /t 79 | --- response_body 80 | 0 81 | --- no_error_log 82 | checking healthy targets: #1 83 | -------------------------------------------------------------------------------- /t/with_worker-events/12-set_target_status.t: -------------------------------------------------------------------------------- 1 | use Test::Nginx::Socket::Lua; 2 | use Cwd qw(cwd); 3 | 4 | workers(1); 5 | 6 | plan tests => repeat_each() * blocks() * 2; 7 | 8 | my $pwd = cwd(); 9 | 10 | our $HttpConfig = qq{ 11 | lua_package_path "$pwd/lib/?.lua;;"; 12 | lua_shared_dict test_shm 8m; 13 | lua_shared_dict my_worker_events 8m; 14 | }; 15 | 16 | run_tests(); 17 | 18 | __DATA__ 19 | 20 | === TEST 1: set_target_status() updates a status 21 | --- http_config eval 22 | qq{ 23 | $::HttpConfig 24 | } 25 | --- config 26 | location = /t { 27 | content_by_lua_block { 28 | local we = require "resty.worker.events" 29 | assert(we.configure{ shm = "my_worker_events", interval = 0.1 }) 30 | local healthcheck = require("resty.healthcheck") 31 | local checker = healthcheck.new({ 32 | name = "testing", 33 | shm_name = "test_shm", 34 | }) 35 | ngx.sleep(0.1) -- wait for initial timers to run once 36 | local ok, err = checker:add_target("127.0.0.1", 2112, nil, true) 37 | ngx.say(checker:get_target_status("127.0.0.1", 2112)) -- true 38 | checker:set_target_status("127.0.0.1", 2112, nil, false) 39 | ngx.say(checker:get_target_status("127.0.0.1", 2112)) -- false 40 | checker:set_target_status("127.0.0.1", 2112, nil, true) 41 | ngx.say(checker:get_target_status("127.0.0.1", 2112)) -- true 42 | } 43 | } 44 | --- request 45 | GET /t 46 | --- response_body 47 | true 48 | false 49 | true 50 | === TEST 2: set_target_status() restores node after passive check disables it 51 | --- http_config eval 52 | qq{ 53 | $::HttpConfig 54 | } 55 | --- config 56 | location = /t { 57 | content_by_lua_block { 58 | local we = require "resty.worker.events" 59 | assert(we.configure{ shm = "my_worker_events", interval = 0.1 }) 60 | local healthcheck = require("resty.healthcheck") 61 | local checker = healthcheck.new({ 62 | name = "testing", 63 | shm_name = "test_shm", 64 | checks = { 65 | passive = { 66 | unhealthy = { 67 | tcp_failures = 2, 68 | http_failures = 2, 69 | } 70 | } 71 | } 72 | }) 73 | ngx.sleep(0.1) -- wait for initial timers to run once 74 | local ok, err = checker:add_target("127.0.0.1", 2112, nil, true) 75 | ngx.say(checker:get_target_status("127.0.0.1", 2112)) -- true 76 | checker:report_http_status("127.0.0.1", 2112, nil, 500) 77 | checker:report_http_status("127.0.0.1", 2112, nil, 500) 78 | ngx.say(checker:get_target_status("127.0.0.1", 2112)) -- false 79 | checker:set_target_status("127.0.0.1", 2112, nil, true) 80 | ngx.say(checker:get_target_status("127.0.0.1", 2112)) -- true 81 | } 82 | } 83 | --- request 84 | GET /t 85 | --- response_body 86 | true 87 | false 88 | true 89 | === TEST 3: set_target_status() resets the failure counters 90 | --- http_config eval 91 | qq{ 92 | $::HttpConfig 93 | } 94 | --- config 95 | location = /t { 96 | content_by_lua_block { 97 | local we = require "resty.worker.events" 98 | assert(we.configure{ shm = "my_worker_events", interval = 0.1 }) 99 | local healthcheck = require("resty.healthcheck") 100 | local checker = healthcheck.new({ 101 | name = "testing", 102 | shm_name = "test_shm", 103 | checks = { 104 | passive = { 105 | healthy = { 106 | successes = 2, 107 | }, 108 | unhealthy = { 109 | tcp_failures = 2, 110 | http_failures = 2, 111 | } 112 | } 113 | } 114 | }) 115 | ngx.sleep(0.1) -- wait for initial timers to run once 116 | local ok, err = checker:add_target("127.0.0.1", 2112, nil, true) 117 | ngx.say(checker:get_target_status("127.0.0.1", 2112)) -- true 118 | checker:report_http_status("127.0.0.1", 2112, nil, 500) 119 | checker:set_target_status("127.0.0.1", 2112, nil, true) 120 | checker:report_http_status("127.0.0.1", 2112, nil, 500) 121 | ngx.say(checker:get_target_status("127.0.0.1", 2112)) -- true 122 | checker:report_http_status("127.0.0.1", 2112, nil, 500) 123 | ngx.say(checker:get_target_status("127.0.0.1", 2112)) -- false 124 | } 125 | } 126 | --- request 127 | GET /t 128 | --- response_body 129 | true 130 | true 131 | false 132 | === TEST 3: set_target_status() resets the success counters 133 | --- http_config eval 134 | qq{ 135 | $::HttpConfig 136 | } 137 | --- config 138 | location = /t { 139 | content_by_lua_block { 140 | local we = require "resty.worker.events" 141 | assert(we.configure{ shm = "my_worker_events", interval = 0.1 }) 142 | local healthcheck = require("resty.healthcheck") 143 | local checker = healthcheck.new({ 144 | name = "testing", 145 | shm_name = "test_shm", 146 | checks = { 147 | passive = { 148 | healthy = { 149 | successes = 2, 150 | }, 151 | unhealthy = { 152 | tcp_failures = 2, 153 | http_failures = 2, 154 | } 155 | } 156 | } 157 | }) 158 | ngx.sleep(0.1) -- wait for initial timers to run once 159 | local ok, err = checker:add_target("127.0.0.1", 2112, nil, true) 160 | checker:set_target_status("127.0.0.1", 2112, nil, false) 161 | ngx.say(checker:get_target_status("127.0.0.1", 2112)) -- false 162 | checker:report_http_status("127.0.0.1", 2112, nil, 200) 163 | checker:set_target_status("127.0.0.1", 2112, nil, false) 164 | checker:report_http_status("127.0.0.1", 2112, nil, 200) 165 | ngx.say(checker:get_target_status("127.0.0.1", 2112)) -- false 166 | checker:report_http_status("127.0.0.1", 2112, nil, 200) 167 | ngx.say(checker:get_target_status("127.0.0.1", 2112)) -- true 168 | } 169 | } 170 | --- request 171 | GET /t 172 | --- response_body 173 | false 174 | false 175 | true 176 | -------------------------------------------------------------------------------- /t/with_worker-events/13-integration.t: -------------------------------------------------------------------------------- 1 | use Test::Nginx::Socket::Lua; 2 | use Cwd qw(cwd); 3 | 4 | workers(1); 5 | 6 | plan tests => repeat_each() * 2; 7 | 8 | my $pwd = cwd(); 9 | 10 | our $HttpConfig = qq{ 11 | lua_package_path "$pwd/lib/?.lua;;"; 12 | lua_shared_dict test_shm 8m; 13 | lua_shared_dict my_worker_events 8m; 14 | }; 15 | 16 | run_tests(); 17 | 18 | __DATA__ 19 | 20 | 21 | 22 | === TEST 1: ensure counters work properly 23 | --- http_config eval 24 | qq{ 25 | $::HttpConfig 26 | } 27 | --- config eval 28 | qq{ 29 | location = /t { 30 | content_by_lua_block { 31 | local host = "127.0.0.1" 32 | local port = 2112 33 | 34 | local we = require "resty.worker.events" 35 | assert(we.configure{ shm = "my_worker_events", interval = 0.1 }) 36 | local healthcheck = require("resty.healthcheck") 37 | local checker = healthcheck.new({ 38 | test = true, 39 | name = "testing", 40 | shm_name = "test_shm", 41 | type = "http", 42 | checks = { 43 | active = { 44 | http_path = "/status", 45 | healthy = { 46 | interval = 0, 47 | successes = 4, 48 | }, 49 | unhealthy = { 50 | interval = 0, 51 | tcp_failures = 2, 52 | http_failures = 0, 53 | } 54 | }, 55 | passive = { 56 | healthy = { 57 | successes = 2, 58 | }, 59 | unhealthy = { 60 | tcp_failures = 2, 61 | http_failures = 2, 62 | timeouts = 2, 63 | } 64 | } 65 | } 66 | }) 67 | 68 | local ok, err = checker:add_target(host, port, nil, true) 69 | 70 | -- S = successes counter 71 | -- F = http_failures counter 72 | -- T = tcp_failures counter 73 | -- O = timeouts counter 74 | 75 | local cases = {} 76 | 77 | local function incr(idxs, i, max) 78 | idxs[i] = idxs[i] + 1 79 | if idxs[i] > max and i > 1 then 80 | idxs[i] = 1 81 | incr(idxs, i - 1, max) 82 | end 83 | end 84 | 85 | local function add_cases(cases, len, m) 86 | local idxs = {} 87 | for i = 1, len do 88 | idxs[i] = 1 89 | end 90 | local word = {} 91 | for _ = 1, (#m) ^ len do 92 | for c = 1, len do 93 | word[c] = m[idxs[c]] 94 | end 95 | table.insert(cases, table.concat(word)) 96 | incr(idxs, len, #m) 97 | end 98 | end 99 | 100 | local m = { "S", "F", "T", "O" } 101 | 102 | -- There are 324 (3*3*3*3*4) possible internal states 103 | -- to the above healthcheck configuration where all limits are set to 2. 104 | -- We need at least five events (4*4*4*4) to be able 105 | -- to exercise all of them 106 | for i = 1, 5 do 107 | add_cases(cases, i, m) 108 | end 109 | 110 | -- Brute-force test all combinations of health events up to 5 events 111 | -- and compares the results given by the library with a simple simulation 112 | -- that implements the specified behavior. 113 | local function run_test_case(case) 114 | assert(checker:set_target_status(host, port, nil, true)) 115 | local i = 1 116 | local s, f, t, o = 0, 0, 0, 0 117 | local mode = true 118 | for c in case:gmatch(".") do 119 | if c == "S" then 120 | checker:report_http_status(host, port, nil, 200, "passive") 121 | s = s + 1 122 | f, t, o = 0, 0, 0 123 | if s == 2 then 124 | mode = true 125 | end 126 | elseif c == "F" then 127 | checker:report_http_status(host, port, nil, 500, "passive") 128 | f = f + 1 129 | s = 0 130 | if f == 2 then 131 | mode = false 132 | end 133 | elseif c == "T" then 134 | checker:report_tcp_failure(host, port, nil, "read", "passive") 135 | t = t + 1 136 | s = 0 137 | if t == 2 then 138 | mode = false 139 | end 140 | elseif c == "O" then 141 | checker:report_timeout(host, port, nil, "passive") 142 | o = o + 1 143 | s = 0 144 | if o == 2 then 145 | mode = false 146 | end 147 | end 148 | 149 | --local ctr, state = checker:test_get_counter(host, port, nil) 150 | --ngx.say(case, ": ", c, " ", string.format("%08x", ctr), " ", state) 151 | --ngx.log(ngx.DEBUG, case, ": ", c, " ", string.format("%08x", ctr), " ", state) 152 | 153 | if checker:get_target_status(host, port, nil) ~= mode then 154 | ngx.say("failed: ", case, " step ", i, " expected ", mode) 155 | return false 156 | end 157 | i = i + 1 158 | end 159 | return true 160 | end 161 | 162 | for _, case in ipairs(cases) do 163 | ngx.log(ngx.ERR, "Case: ", case) 164 | run_test_case(case) 165 | end 166 | ngx.say("all ok!") 167 | } 168 | } 169 | } 170 | --- request 171 | GET /t 172 | --- response_body 173 | all ok! 174 | --- error_log 175 | --- no_error_log 176 | -------------------------------------------------------------------------------- /t/with_worker-events/14-tls_active_probes.t: -------------------------------------------------------------------------------- 1 | use Test::Nginx::Socket::Lua; 2 | use Cwd qw(cwd); 3 | 4 | workers(1); 5 | 6 | plan tests => blocks() * 2; 7 | 8 | my $pwd = cwd(); 9 | 10 | our $HttpConfig = qq{ 11 | lua_package_path "$pwd/lib/?.lua;;"; 12 | lua_shared_dict test_shm 8m; 13 | lua_shared_dict my_worker_events 8m; 14 | }; 15 | 16 | run_tests(); 17 | 18 | __DATA__ 19 | 20 | 21 | 22 | === TEST 1: active probes, valid https 23 | --- http_config eval: $::HttpConfig 24 | --- config 25 | location = /t { 26 | lua_ssl_trusted_certificate /etc/ssl/certs/ca-certificates.crt; 27 | lua_ssl_verify_depth 2; 28 | content_by_lua_block { 29 | local we = require "resty.worker.events" 30 | assert(we.configure{ shm = "my_worker_events", interval = 0.1 }) 31 | local healthcheck = require("resty.healthcheck") 32 | local checker = healthcheck.new({ 33 | name = "testing", 34 | shm_name = "test_shm", 35 | checks = { 36 | active = { 37 | timeout = 2, 38 | type = "https", 39 | http_path = "/", 40 | healthy = { 41 | interval = 2, 42 | successes = 2, 43 | }, 44 | unhealthy = { 45 | interval = 2, 46 | tcp_failures = 2, 47 | } 48 | }, 49 | } 50 | }) 51 | local ok, err = checker:add_target("104.154.89.105", 443, "badssl.com", false) 52 | ngx.sleep(16) -- wait for 4x the check interval 53 | ngx.say(checker:get_target_status("104.154.89.105", 443, "badssl.com")) -- true 54 | } 55 | } 56 | --- request 57 | GET /t 58 | --- response_body 59 | true 60 | --- timeout 61 | 20 62 | 63 | === TEST 2: active probes, invalid cert 64 | --- http_config eval: $::HttpConfig 65 | --- config 66 | location = /t { 67 | lua_ssl_trusted_certificate /etc/ssl/certs/ca-certificates.crt; 68 | lua_ssl_verify_depth 2; 69 | content_by_lua_block { 70 | local we = require "resty.worker.events" 71 | assert(we.configure{ shm = "my_worker_events", interval = 0.1 }) 72 | local healthcheck = require("resty.healthcheck") 73 | local checker = healthcheck.new({ 74 | name = "testing", 75 | shm_name = "test_shm", 76 | checks = { 77 | active = { 78 | timeout = 2, 79 | type = "https", 80 | http_path = "/", 81 | healthy = { 82 | interval = 2, 83 | successes = 2, 84 | }, 85 | unhealthy = { 86 | interval = 2, 87 | tcp_failures = 2, 88 | } 89 | }, 90 | } 91 | }) 92 | local ok, err = checker:add_target("104.154.89.105", 443, "wrong.host.badssl.com", true) 93 | ngx.sleep(16) -- wait for 4x the check interval 94 | ngx.say(checker:get_target_status("104.154.89.105", 443, "wrong.host.badssl.com")) -- false 95 | } 96 | } 97 | --- request 98 | GET /t 99 | --- response_body 100 | false 101 | --- timeout 102 | 20 103 | 104 | === TEST 3: active probes, accept invalid cert when disabling check 105 | --- http_config eval: $::HttpConfig 106 | --- config 107 | location = /t { 108 | lua_ssl_trusted_certificate /etc/ssl/certs/ca-certificates.crt; 109 | lua_ssl_verify_depth 2; 110 | content_by_lua_block { 111 | local we = require "resty.worker.events" 112 | assert(we.configure{ shm = "my_worker_events", interval = 0.1 }) 113 | local healthcheck = require("resty.healthcheck") 114 | local checker = healthcheck.new({ 115 | name = "testing", 116 | shm_name = "test_shm", 117 | checks = { 118 | active = { 119 | timeout = 2, 120 | type = "https", 121 | https_verify_certificate = false, 122 | http_path = "/", 123 | healthy = { 124 | interval = 2, 125 | successes = 2, 126 | }, 127 | unhealthy = { 128 | interval = 2, 129 | tcp_failures = 2, 130 | } 131 | }, 132 | } 133 | }) 134 | local ok, err = checker:add_target("104.154.89.105", 443, "wrong.host.badssl.com", false) 135 | ngx.sleep(16) -- wait for 4x the check interval 136 | ngx.say(checker:get_target_status("104.154.89.105", 443, "wrong.host.badssl.com")) -- true 137 | } 138 | } 139 | --- request 140 | GET /t 141 | --- response_body 142 | true 143 | --- timeout 144 | 20 145 | -------------------------------------------------------------------------------- /t/with_worker-events/16-set_all_target_statuses_for_hostname.t: -------------------------------------------------------------------------------- 1 | use Test::Nginx::Socket::Lua; 2 | use Cwd qw(cwd); 3 | 4 | workers(1); 5 | 6 | plan tests => repeat_each() * blocks() * 2; 7 | 8 | my $pwd = cwd(); 9 | 10 | our $HttpConfig = qq{ 11 | lua_package_path "$pwd/lib/?.lua;;"; 12 | lua_shared_dict test_shm 8m; 13 | lua_shared_dict my_worker_events 8m; 14 | }; 15 | 16 | run_tests(); 17 | 18 | __DATA__ 19 | 20 | === TEST 1: set_all_target_statuses_for_hostname() updates statuses 21 | --- http_config eval 22 | qq{ 23 | $::HttpConfig 24 | } 25 | --- config 26 | location = /t { 27 | content_by_lua_block { 28 | local we = require "resty.worker.events" 29 | assert(we.configure{ shm = "my_worker_events", interval = 0.1 }) 30 | local healthcheck = require("resty.healthcheck") 31 | local checker = healthcheck.new({ 32 | name = "testing", 33 | shm_name = "test_shm", 34 | }) 35 | ngx.sleep(0.1) -- wait for initial timers to run once 36 | checker:add_target("127.0.0.1", 2112, "rush", true) 37 | checker:add_target("127.0.0.2", 2112, "rush", true) 38 | ngx.say(checker:get_target_status("127.0.0.1", 2112, "rush")) -- true 39 | ngx.say(checker:get_target_status("127.0.0.2", 2112, "rush")) -- true 40 | checker:set_all_target_statuses_for_hostname("rush", 2112, false) 41 | ngx.say(checker:get_target_status("127.0.0.1", 2112, "rush")) -- false 42 | ngx.say(checker:get_target_status("127.0.0.2", 2112, "rush")) -- false 43 | checker:set_all_target_statuses_for_hostname("rush", 2112, true) 44 | ngx.say(checker:get_target_status("127.0.0.1", 2112, "rush")) -- true 45 | ngx.say(checker:get_target_status("127.0.0.2", 2112, "rush")) -- true 46 | } 47 | } 48 | --- request 49 | GET /t 50 | --- response_body 51 | true 52 | true 53 | false 54 | false 55 | true 56 | true 57 | === TEST 2: set_all_target_statuses_for_hostname() restores node after passive check disables it 58 | --- http_config eval 59 | qq{ 60 | $::HttpConfig 61 | } 62 | --- config 63 | location = /t { 64 | content_by_lua_block { 65 | local we = require "resty.worker.events" 66 | assert(we.configure{ shm = "my_worker_events", interval = 0.1 }) 67 | local healthcheck = require("resty.healthcheck") 68 | local checker = healthcheck.new({ 69 | name = "testing", 70 | shm_name = "test_shm", 71 | checks = { 72 | passive = { 73 | unhealthy = { 74 | tcp_failures = 2, 75 | http_failures = 2, 76 | } 77 | } 78 | } 79 | }) 80 | ngx.sleep(0.1) -- wait for initial timers to run once 81 | checker:add_target("127.0.0.1", 2112, "rush", true) 82 | checker:add_target("127.0.0.2", 2112, "rush", true) 83 | ngx.say(checker:get_target_status("127.0.0.1", 2112, "rush")) -- true 84 | ngx.say(checker:get_target_status("127.0.0.2", 2112, "rush")) -- true 85 | checker:report_http_status("127.0.0.1", 2112, "rush", 500) 86 | checker:report_http_status("127.0.0.1", 2112, "rush", 500) 87 | ngx.say(checker:get_target_status("127.0.0.1", 2112, "rush")) -- false 88 | checker:set_all_target_statuses_for_hostname("rush", 2112, true) 89 | ngx.say(checker:get_target_status("127.0.0.1", 2112, "rush")) -- true 90 | ngx.say(checker:get_target_status("127.0.0.2", 2112, "rush")) -- true 91 | } 92 | } 93 | --- request 94 | GET /t 95 | --- response_body 96 | true 97 | true 98 | false 99 | true 100 | true 101 | === TEST 3: set_all_target_statuses_for_hostname() resets failure counters 102 | --- http_config eval 103 | qq{ 104 | $::HttpConfig 105 | } 106 | --- config 107 | location = /t { 108 | content_by_lua_block { 109 | local we = require "resty.worker.events" 110 | assert(we.configure{ shm = "my_worker_events", interval = 0.1 }) 111 | local healthcheck = require("resty.healthcheck") 112 | local checker = healthcheck.new({ 113 | name = "testing", 114 | shm_name = "test_shm", 115 | checks = { 116 | passive = { 117 | healthy = { 118 | successes = 2, 119 | }, 120 | unhealthy = { 121 | tcp_failures = 2, 122 | http_failures = 2, 123 | } 124 | } 125 | } 126 | }) 127 | ngx.sleep(0.1) -- wait for initial timers to run once 128 | checker:add_target("127.0.0.1", 2112, "rush", true) 129 | checker:add_target("127.0.0.2", 2112, "rush", true) 130 | ngx.say(checker:get_target_status("127.0.0.1", 2112, "rush")) -- true 131 | ngx.say(checker:get_target_status("127.0.0.2", 2112, "rush")) -- true 132 | checker:report_http_status("127.0.0.1", 2112, "rush", 500) 133 | checker:set_all_target_statuses_for_hostname("rush", 2112, true) 134 | checker:report_http_status("127.0.0.1", 2112, "rush", 500) 135 | ngx.say(checker:get_target_status("127.0.0.1", 2112, "rush")) -- true 136 | ngx.say(checker:get_target_status("127.0.0.2", 2112, "rush")) -- true 137 | checker:report_http_status("127.0.0.1", 2112, "rush", 500) 138 | ngx.say(checker:get_target_status("127.0.0.1", 2112, "rush")) -- false 139 | ngx.say(checker:get_target_status("127.0.0.2", 2112, "rush")) -- true 140 | } 141 | } 142 | --- request 143 | GET /t 144 | --- response_body 145 | true 146 | true 147 | true 148 | true 149 | false 150 | true 151 | === TEST 4: set_target_status() resets the success counters 152 | --- http_config eval 153 | qq{ 154 | $::HttpConfig 155 | } 156 | --- config 157 | location = /t { 158 | content_by_lua_block { 159 | local we = require "resty.worker.events" 160 | assert(we.configure{ shm = "my_worker_events", interval = 0.1 }) 161 | local healthcheck = require("resty.healthcheck") 162 | local checker = healthcheck.new({ 163 | name = "testing", 164 | shm_name = "test_shm", 165 | checks = { 166 | passive = { 167 | healthy = { 168 | successes = 2, 169 | }, 170 | unhealthy = { 171 | tcp_failures = 2, 172 | http_failures = 2, 173 | } 174 | } 175 | } 176 | }) 177 | ngx.sleep(0.1) -- wait for initial timers to run once 178 | checker:add_target("127.0.0.1", 2112, "rush", true) 179 | checker:add_target("127.0.0.2", 2112, "rush", true) 180 | checker:set_all_target_statuses_for_hostname("rush", 2112, false) 181 | ngx.say(checker:get_target_status("127.0.0.1", 2112, "rush")) -- false 182 | ngx.say(checker:get_target_status("127.0.0.2", 2112, "rush")) -- false 183 | checker:report_http_status("127.0.0.1", 2112, "rush", 200) 184 | checker:set_all_target_statuses_for_hostname("rush", 2112, false) 185 | checker:report_http_status("127.0.0.1", 2112, "rush", 200) 186 | ngx.say(checker:get_target_status("127.0.0.1", 2112, "rush")) -- false 187 | ngx.say(checker:get_target_status("127.0.0.2", 2112, "rush")) -- false 188 | checker:report_http_status("127.0.0.1", 2112, "rush", 200) 189 | ngx.say(checker:get_target_status("127.0.0.1", 2112, "rush")) -- true 190 | ngx.say(checker:get_target_status("127.0.0.2", 2112, "rush")) -- false 191 | } 192 | } 193 | --- request 194 | GET /t 195 | --- response_body 196 | false 197 | false 198 | false 199 | false 200 | true 201 | false 202 | -------------------------------------------------------------------------------- /t/with_worker-events/17-mtls.t: -------------------------------------------------------------------------------- 1 | use Test::Nginx::Socket::Lua; 2 | use Cwd qw(cwd); 3 | 4 | workers(1); 5 | 6 | plan tests => repeat_each() * 4; 7 | 8 | my $pwd = cwd(); 9 | 10 | our $HttpConfig = qq{ 11 | lua_package_path "$pwd/lib/?.lua;;"; 12 | lua_shared_dict test_shm 8m; 13 | lua_shared_dict my_worker_events 8m; 14 | }; 15 | 16 | run_tests(); 17 | 18 | __DATA__ 19 | 20 | === TEST 1: configure a MTLS probe 21 | --- http_config eval 22 | qq{ 23 | $::HttpConfig 24 | } 25 | --- config 26 | location = /t { 27 | content_by_lua_block { 28 | local we = require "resty.worker.events" 29 | assert(we.configure{ shm = "my_worker_events", interval = 0.1 }) 30 | 31 | local pl_file = require "pl.file" 32 | local cert = pl_file.read("t/with_worker-events/util/cert.pem", true) 33 | local key = pl_file.read("t/with_worker-events/util/key.pem", true) 34 | 35 | local healthcheck = require("resty.healthcheck") 36 | local checker = healthcheck.new({ 37 | name = "testing_mtls", 38 | shm_name = "test_shm", 39 | type = "http", 40 | ssl_cert = cert, 41 | ssl_key = key, 42 | checks = { 43 | active = { 44 | http_path = "/status", 45 | healthy = { 46 | interval = 999, -- we don't want active checks 47 | successes = 3, 48 | }, 49 | unhealthy = { 50 | interval = 999, -- we don't want active checks 51 | tcp_failures = 3, 52 | http_failures = 3, 53 | } 54 | }, 55 | passive = { 56 | healthy = { 57 | successes = 3, 58 | }, 59 | unhealthy = { 60 | tcp_failures = 3, 61 | http_failures = 3, 62 | } 63 | } 64 | } 65 | }) 66 | ngx.say(checker ~= nil) -- true 67 | } 68 | } 69 | --- request 70 | GET /t 71 | --- response_body 72 | true 73 | 74 | 75 | === TEST 2: configure a MTLS probe with parsed cert/key 76 | --- http_config eval 77 | qq{ 78 | $::HttpConfig 79 | } 80 | --- config 81 | location = /t { 82 | content_by_lua_block { 83 | local we = require "resty.worker.events" 84 | assert(we.configure{ shm = "my_worker_events", interval = 0.1 }) 85 | 86 | local pl_file = require "pl.file" 87 | local ssl = require "ngx.ssl" 88 | local cert = ssl.parse_pem_cert(pl_file.read("t/with_worker-events/util/cert.pem", true)) 89 | local key = ssl.parse_pem_priv_key(pl_file.read("t/with_worker-events/util/key.pem", true)) 90 | 91 | local healthcheck = require("resty.healthcheck") 92 | local checker = healthcheck.new({ 93 | name = "testing_mtls", 94 | shm_name = "test_shm", 95 | type = "http", 96 | ssl_cert = cert, 97 | ssl_key = key, 98 | checks = { 99 | active = { 100 | http_path = "/status", 101 | healthy = { 102 | interval = 999, -- we don't want active checks 103 | successes = 3, 104 | }, 105 | unhealthy = { 106 | interval = 999, -- we don't want active checks 107 | tcp_failures = 3, 108 | http_failures = 3, 109 | } 110 | }, 111 | passive = { 112 | healthy = { 113 | successes = 3, 114 | }, 115 | unhealthy = { 116 | tcp_failures = 3, 117 | http_failures = 3, 118 | } 119 | } 120 | } 121 | }) 122 | ngx.say(checker ~= nil) -- true 123 | } 124 | } 125 | --- request 126 | GET /t 127 | --- response_body 128 | true 129 | -------------------------------------------------------------------------------- /t/with_worker-events/18-req-headers.t: -------------------------------------------------------------------------------- 1 | use Test::Nginx::Socket::Lua 'no_plan'; 2 | use Cwd qw(cwd); 3 | 4 | workers(1); 5 | 6 | my $pwd = cwd(); 7 | 8 | our $HttpConfig = qq{ 9 | lua_package_path "$pwd/lib/?.lua;;"; 10 | lua_shared_dict test_shm 8m; 11 | lua_shared_dict my_worker_events 8m; 12 | }; 13 | 14 | run_tests(); 15 | 16 | __DATA__ 17 | 18 | === TEST 1: headers: {"User-Agent: curl/7.29.0"} 19 | --- http_config eval 20 | qq{ 21 | $::HttpConfig 22 | 23 | server { 24 | listen 2112; 25 | location = /status { 26 | return 200; 27 | } 28 | } 29 | } 30 | --- config 31 | location = /t { 32 | content_by_lua_block { 33 | local we = require "resty.worker.events" 34 | assert(we.configure{ shm = "my_worker_events", interval = 0.1 }) 35 | local healthcheck = require("resty.healthcheck") 36 | local checker = healthcheck.new({ 37 | name = "testing", 38 | shm_name = "test_shm", 39 | checks = { 40 | active = { 41 | http_path = "/status", 42 | healthy = { 43 | interval = 0.1 44 | }, 45 | headers = {"User-Agent: curl/7.29.0"} 46 | } 47 | } 48 | }) 49 | ngx.sleep(0.2) -- wait twice the interval 50 | local ok, err = checker:add_target("127.0.0.1", 2112, nil, true) 51 | ngx.say(ok) 52 | ngx.sleep(0.2) -- wait twice the interval 53 | } 54 | } 55 | --- request 56 | GET /t 57 | --- response_body 58 | true 59 | --- error_log 60 | checking healthy targets: nothing to do 61 | checking healthy targets: #1 62 | GET /status HTTP/1.0 63 | User-Agent: curl/7.29.0 64 | Host: 127.0.0.1 65 | 66 | 67 | 68 | === TEST 2: headers: {"User-Agent: curl"} 69 | --- http_config eval 70 | qq{ 71 | $::HttpConfig 72 | 73 | server { 74 | listen 2112; 75 | location = /status { 76 | return 200; 77 | } 78 | } 79 | } 80 | --- config 81 | location = /t { 82 | content_by_lua_block { 83 | local we = require "resty.worker.events" 84 | assert(we.configure{ shm = "my_worker_events", interval = 0.1 }) 85 | local healthcheck = require("resty.healthcheck") 86 | local checker = healthcheck.new({ 87 | name = "testing", 88 | shm_name = "test_shm", 89 | checks = { 90 | active = { 91 | http_path = "/status", 92 | healthy = { 93 | interval = 0.1 94 | }, 95 | headers = {"User-Agent: curl"} 96 | } 97 | } 98 | }) 99 | ngx.sleep(0.2) -- wait twice the interval 100 | local ok, err = checker:add_target("127.0.0.1", 2112, nil, true) 101 | ngx.say(ok) 102 | ngx.sleep(0.2) -- wait twice the interval 103 | } 104 | } 105 | --- request 106 | GET /t 107 | --- response_body 108 | true 109 | --- error_log 110 | checking healthy targets: nothing to do 111 | checking healthy targets: #1 112 | GET /status HTTP/1.0 113 | User-Agent: curl 114 | Host: 127.0.0.1 115 | 116 | 117 | === TEST 3: headers: { ["User-Agent"] = "curl" } 118 | --- http_config eval 119 | qq{ 120 | $::HttpConfig 121 | 122 | server { 123 | listen 2112; 124 | location = /status { 125 | return 200; 126 | } 127 | } 128 | } 129 | --- config 130 | location = /t { 131 | content_by_lua_block { 132 | local we = require "resty.worker.events" 133 | assert(we.configure{ shm = "my_worker_events", interval = 0.1 }) 134 | local healthcheck = require("resty.healthcheck") 135 | local checker = healthcheck.new({ 136 | name = "testing", 137 | shm_name = "test_shm", 138 | checks = { 139 | active = { 140 | http_path = "/status", 141 | healthy = { 142 | interval = 0.1 143 | }, 144 | headers = { ["User-Agent"] = "curl" } 145 | } 146 | } 147 | }) 148 | ngx.sleep(0.2) -- wait twice the interval 149 | local ok, err = checker:add_target("127.0.0.1", 2112, nil, true) 150 | ngx.say(ok) 151 | ngx.sleep(0.2) -- wait twice the interval 152 | } 153 | } 154 | --- request 155 | GET /t 156 | --- response_body 157 | true 158 | --- error_log 159 | checking healthy targets: nothing to do 160 | checking healthy targets: #1 161 | GET /status HTTP/1.0 162 | User-Agent: curl 163 | Host: 127.0.0.1 164 | 165 | 166 | 167 | === TEST 4: headers: { ["User-Agent"] = {"curl"} } 168 | --- http_config eval 169 | qq{ 170 | $::HttpConfig 171 | 172 | server { 173 | listen 2112; 174 | location = /status { 175 | return 200; 176 | } 177 | } 178 | } 179 | --- config 180 | location = /t { 181 | content_by_lua_block { 182 | local we = require "resty.worker.events" 183 | assert(we.configure{ shm = "my_worker_events", interval = 0.1 }) 184 | local healthcheck = require("resty.healthcheck") 185 | local checker = healthcheck.new({ 186 | name = "testing", 187 | shm_name = "test_shm", 188 | checks = { 189 | active = { 190 | http_path = "/status", 191 | healthy = { 192 | interval = 0.1 193 | }, 194 | headers = { ["User-Agent"] = {"curl"} } 195 | } 196 | } 197 | }) 198 | ngx.sleep(0.2) -- wait twice the interval 199 | local ok, err = checker:add_target("127.0.0.1", 2112, nil, true) 200 | ngx.say(ok) 201 | ngx.sleep(0.2) -- wait twice the interval 202 | } 203 | } 204 | --- request 205 | GET /t 206 | --- response_body 207 | true 208 | --- error_log 209 | checking healthy targets: nothing to do 210 | checking healthy targets: #1 211 | GET /status HTTP/1.0 212 | User-Agent: curl 213 | Host: 127.0.0.1 214 | 215 | 216 | 217 | === TEST 5: headers: { ["User-Agent"] = {"curl", "nginx"} } 218 | --- http_config eval 219 | qq{ 220 | $::HttpConfig 221 | 222 | server { 223 | listen 2112; 224 | location = /status { 225 | return 200; 226 | } 227 | } 228 | } 229 | --- config 230 | location = /t { 231 | content_by_lua_block { 232 | local we = require "resty.worker.events" 233 | assert(we.configure{ shm = "my_worker_events", interval = 0.1 }) 234 | local healthcheck = require("resty.healthcheck") 235 | local checker = healthcheck.new({ 236 | name = "testing", 237 | shm_name = "test_shm", 238 | checks = { 239 | active = { 240 | http_path = "/status", 241 | healthy = { 242 | interval = 0.1 243 | }, 244 | headers = { ["User-Agent"] = {"curl", "nginx"} } 245 | } 246 | } 247 | }) 248 | ngx.sleep(0.2) -- wait twice the interval 249 | local ok, err = checker:add_target("127.0.0.1", 2112, nil, true) 250 | ngx.say(ok) 251 | ngx.sleep(0.2) -- wait twice the interval 252 | } 253 | } 254 | --- request 255 | GET /t 256 | --- response_body 257 | true 258 | --- error_log 259 | checking healthy targets: nothing to do 260 | checking healthy targets: #1 261 | GET /status HTTP/1.0 262 | User-Agent: curl 263 | User-Agent: nginx 264 | Host: 127.0.0.1 265 | -------------------------------------------------------------------------------- /t/with_worker-events/19-timer.t: -------------------------------------------------------------------------------- 1 | use Test::Nginx::Socket::Lua; 2 | use Cwd qw(cwd); 3 | 4 | workers(1); 5 | 6 | plan tests => repeat_each() * blocks() * 2; 7 | 8 | my $pwd = cwd(); 9 | 10 | our $HttpConfig = qq{ 11 | lua_package_path "$pwd/lib/?.lua;;"; 12 | lua_shared_dict test_shm 8m; 13 | lua_shared_dict my_worker_events 8m; 14 | }; 15 | 16 | run_tests(); 17 | 18 | __DATA__ 19 | 20 | 21 | 22 | === TEST 1: active probes, http node failing 23 | --- http_config eval 24 | qq{ 25 | $::HttpConfig 26 | 27 | server { 28 | listen 2130; 29 | location = /status { 30 | content_by_lua_block { 31 | ngx.sleep(2) 32 | ngx.exit(500); 33 | } 34 | } 35 | } 36 | } 37 | --- config 38 | location = /t { 39 | content_by_lua_block { 40 | local we = require "resty.worker.events" 41 | assert(we.configure{ shm = "my_worker_events", interval = 0.1 }) 42 | local healthcheck = require("resty.healthcheck") 43 | local checker = healthcheck.new({ 44 | name = "testing", 45 | shm_name = "test_shm", 46 | type = "http", 47 | checks = { 48 | active = { 49 | timeout = 1, 50 | http_path = "/status", 51 | healthy = { 52 | interval = 0.1, 53 | successes = 3, 54 | }, 55 | unhealthy = { 56 | interval = 0.1, 57 | http_failures = 3, 58 | } 59 | }, 60 | } 61 | }) 62 | local ok, err = checker:add_target("127.0.0.1", 2130, nil, true) 63 | ngx.sleep(3) -- wait for some time to let the checks run 64 | -- There should be no more than 3 timers running atm, but 65 | -- add a few spaces for worker events 66 | ngx.say(tonumber(ngx.timer.running_count()) <= 5) 67 | } 68 | } 69 | --- request 70 | GET /t 71 | --- response_body 72 | true 73 | -------------------------------------------------------------------------------- /t/with_worker-events/20-event_handler.t: -------------------------------------------------------------------------------- 1 | use Test::Nginx::Socket::Lua; 2 | use Cwd qw(cwd); 3 | 4 | workers(1); 5 | 6 | plan tests => repeat_each() * 4; 7 | 8 | my $pwd = cwd(); 9 | 10 | our $HttpConfig = qq{ 11 | lua_package_path "$pwd/lib/?.lua;;"; 12 | lua_shared_dict test_shm 8m; 13 | lua_shared_dict my_worker_events 8m; 14 | }; 15 | 16 | run_tests(); 17 | 18 | __DATA__ 19 | 20 | === TEST 1: add_target() without hostname, remove_target() with same ip:port 21 | --- http_config eval 22 | qq{ 23 | $::HttpConfig 24 | 25 | server { 26 | listen 2112; 27 | location = /status { 28 | return 200; 29 | } 30 | } 31 | } 32 | --- config 33 | location = /t { 34 | content_by_lua_block { 35 | local we = require "resty.worker.events" 36 | assert(we.configure{ shm = "my_worker_events", interval = 0.1 }) 37 | local healthcheck = require("resty.healthcheck") 38 | local checker = healthcheck.new({ 39 | name = "testing", 40 | shm_name = "test_shm", 41 | checks = { 42 | active = { 43 | http_path = "/status", 44 | healthy = { 45 | interval = 0.1 46 | }, 47 | unhealthy = { 48 | interval = 0.1 49 | } 50 | } 51 | } 52 | }) 53 | ngx.sleep(0.2) -- wait twice the interval 54 | local ok, err = checker:add_target("127.0.0.1", 2112) 55 | ngx.say(ok) 56 | ngx.sleep(0.2) -- wait twice the interval 57 | ok, err = checker:remove_target("127.0.0.1", 2112) 58 | ngx.sleep(0.2) -- wait twice the interval 59 | } 60 | } 61 | --- request 62 | GET /t 63 | --- response_body 64 | true 65 | 66 | === TEST 2: add_target() with hostname, remove_target() on same target 67 | --- http_config eval 68 | qq{ 69 | $::HttpConfig 70 | 71 | server { 72 | listen 2112; 73 | location = /status { 74 | return 200; 75 | } 76 | } 77 | } 78 | --- config 79 | location = /t { 80 | content_by_lua_block { 81 | local we = require "resty.worker.events" 82 | assert(we.configure{ shm = "my_worker_events", interval = 0.1 }) 83 | local healthcheck = require("resty.healthcheck") 84 | local checker = healthcheck.new({ 85 | name = "testing", 86 | shm_name = "test_shm", 87 | checks = { 88 | active = { 89 | http_path = "/status", 90 | healthy = { 91 | interval = 0.1 92 | }, 93 | unhealthy = { 94 | interval = 0.1 95 | } 96 | } 97 | } 98 | }) 99 | ngx.sleep(0.2) -- wait twice the interval 100 | local ok, err = checker:add_target("127.0.0.1", 2112, "localhost") 101 | ngx.say(ok) 102 | ngx.sleep(0.2) -- wait twice the interval 103 | ok, err = checker:remove_target("127.0.0.1", 2112, "localhost") 104 | ngx.sleep(0.2) -- wait twice the interval 105 | } 106 | } 107 | --- request 108 | GET /t 109 | --- response_body 110 | true 111 | 112 | -------------------------------------------------------------------------------- /t/with_worker-events/util/cert.pem: -------------------------------------------------------------------------------- 1 | -----BEGIN CERTIFICATE----- 2 | MIIDCTCCAfGgAwIBAgIUWWntedJ1yLAJE2baK/Mg06osmGAwDQYJKoZIhvcNAQEL 3 | BQAwFDESMBAGA1UECgwJS29uZyBJbmMuMB4XDTIwMDQyMzIwMjcwMFoXDTMwMDQy 4 | MTIwMjcwMFowFDESMBAGA1UECgwJS29uZyBJbmMuMIIBIjANBgkqhkiG9w0BAQEF 5 | AAOCAQ8AMIIBCgKCAQEAvVBrEH34MzwKlkBapiNyXr9huSShuojy+7i/01BSFng3 6 | 1TiejXJ3pEjykZqt7ENkZ6+BTYUdb9klK221yXiSyX71x97O0WHHuhH/m4XwGiIH 7 | YPBHdg+ExdMRflXgwtlW3of2hTWxkPkPQDPhoSQVMc5DkU7EOgrTxkv1rUWVAed4 8 | gSK4IT2AkhKwOSkewZANj2bnK5Evf71ACyJd7IQbJAIYoKBwRJAUXJMA7XAreIB+ 9 | nEr9whNYTklhB4aEa2wtOQuiQubIMJzdOryEX5nufH+tL4p1QKhRPFAqqtJ2Czgw 10 | YZY/v9IrThl19r0nL7FIvxFDNIMeOamJxDLQqsh9NwIDAQABo1MwUTAdBgNVHQ4E 11 | FgQU9t6YAdQ5mOXeqvptN5l3yYZGibEwHwYDVR0jBBgwFoAU9t6YAdQ5mOXeqvpt 12 | N5l3yYZGibEwDwYDVR0TAQH/BAUwAwEB/zANBgkqhkiG9w0BAQsFAAOCAQEAhi83 13 | aXsfJGqr9Zb1guWxbI8uKoG6o88ptXjV2c6dJnxXag0A/Rj+bX2bcPkN2kvQksNl 14 | MBUQlniOydZfsBUAoC0V7yyGUv9eO2RIeFnnNpRXNu+n+Kg2bvgvu8BKNNNOASZv 15 | +Vmzvo9lbfhS9MNAxYk9eTiPNUZ3zn2RfFyT6YWWJbRjk//EAlchyud3XGug9/hw 16 | c05dtzWEYT8GdzMd+Y1/2kR5r/CapSj7GEqL5T3+zDIfjbhTokV7WBrw6og2avoZ 17 | vzrF8xWucry5/2mKQbRxMyCtKYUKTcoLzF4HrNQCETm0n9qUODrHER7Wit9fQFZX 18 | 1GEA3BkX2tsbIVVaig== 19 | -----END CERTIFICATE----- 20 | -------------------------------------------------------------------------------- /t/with_worker-events/util/key.pem: -------------------------------------------------------------------------------- 1 | -----BEGIN PRIVATE KEY----- 2 | MIIEvQIBADANBgkqhkiG9w0BAQEFAASCBKcwggSjAgEAAoIBAQC9UGsQffgzPAqW 3 | QFqmI3Jev2G5JKG6iPL7uL/TUFIWeDfVOJ6NcnekSPKRmq3sQ2Rnr4FNhR1v2SUr 4 | bbXJeJLJfvXH3s7RYce6Ef+bhfAaIgdg8Ed2D4TF0xF+VeDC2Vbeh/aFNbGQ+Q9A 5 | M+GhJBUxzkORTsQ6CtPGS/WtRZUB53iBIrghPYCSErA5KR7BkA2PZucrkS9/vUAL 6 | Il3shBskAhigoHBEkBRckwDtcCt4gH6cSv3CE1hOSWEHhoRrbC05C6JC5sgwnN06 7 | vIRfme58f60vinVAqFE8UCqq0nYLODBhlj+/0itOGXX2vScvsUi/EUM0gx45qYnE 8 | MtCqyH03AgMBAAECggEAA1hWa/Yt2onnDfyZHXJm5PGwwlq5WNhuorADA7LZoHgD 9 | VIspkgpBvu9jCduX0yLltUdOm5YMjRtjIr9PhP3SaikKIrv3H5AAvXLv90mIko2j 10 | X70fJiDkEbLHDlpqHEdG16vDWVs3hf5AnLvN8tD2ZujkHL8tjHEAiPJyptsh5OSw 11 | XaltCD67U940XXJ89x0zFZ/3RoRk78wX3ELz7/dY0cMnslMavON+LYTq9hQZyVmm 12 | nOhZICWerKjax4t5f9PZ/zM6IhEVrUhw2WrC31tgRo+ITCIA/nkKid8vNhkiLVdw 13 | jTyAYDLgYW7K8/zVrzmV9TOr3CaZHLQxnF/LMpIEAQKBgQDjnA/G4g2mDD7lsqU1 14 | N3it87v2VBnZPFNW6L17Qig+2BDTXg1kadFBlp8qtEJI+H5axVSmzsrlmATJVhUK 15 | iYOQwiEsQnt4tGmWZI268NAIUtv0TX0i9yscsezmvGABMcyBCF7ZwFhUfhy0pn1t 16 | kzmbYN4AjYdcisCnSusoMD92NwKBgQDU7YVNuieMIZCIuSxG61N1+ZyX3Ul5l6KU 17 | m1xw1PZvugqXnQlOLV/4Iaz86Vvlt2aDqTWO/iv4LU7ixNdhRtxFIU/b2a8DzDOw 18 | ijhzMGRJqJOdi1NfciiIWHyrjRmGbhCgm784vqV7qbQomiIsjgnDvjoZkossZMiJ 19 | 63vs7huxAQKBgQDiQjT8w6JFuk6cD+Zi7G2unmfvCtNXO7ys3Fffu3g+YJL5SrmN 20 | ZBN8W7qFvQNXfo48tYTc/Rx8941qh4QLIYAD2rcXRE9xQgbkVbj+aHykiZnVVWJb 21 | 69CTidux0vist1BPxH5lf+tOsr7eZdKxpnTRnI2Thx1URSoWI0d4f93WKQKBgBXn 22 | kW0bl3HtCgdmtU1ebCmY0ik1VJezp8AN84aQAgIga3KJbymhtVu7ayZhg1iwc1Vc 23 | FOxu7WsMji75/QY+2e4qrSJ61GxZl3+z2HbRJaAGPZlZeew5vD26jKjBTTztGbzM 24 | CPH3euKr5KLAqH9Y5VxDt4pl7vdULuUxWoBXRnYBAoGAHIFMYiCdXETtrFHKVTzc 25 | vm4P24PnsNHoDTGMXPeRYRKF2+3VEJrwp1Q3fue4Go4zFB8I6nhNVIbh4dIHxFab 26 | hyxZvGWGUgRvTvD4VYn/YHVoSf2/xNZ0r/S2LKomp+jwoWKfukbCoDjAOWvnK5iD 27 | o41Tn0yhzBdnrYguKznGR3g= 28 | -----END PRIVATE KEY----- 29 | -------------------------------------------------------------------------------- /t/with_worker-events/util/reindex: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env lua 2 | 3 | if not arg[1] then 4 | io.stderr:write("Usage: "..arg[0].." t/*.t\n") 5 | os.exit(1) 6 | end 7 | 8 | for _, name in ipairs(arg) do 9 | local i = 1 10 | local fd = io.open(name, "r") 11 | if fd then 12 | local new = name.."~" 13 | local out = io.open(new, "w") 14 | for line in fd:lines() do 15 | local test, n, desc = line:match("^(===%s*TEST%s*)(%d+)(.*)$") 16 | if test then 17 | out:write(test .. tostring(i) .. desc .. "\n") 18 | i = i + 1 19 | else 20 | out:write(line .. "\n") 21 | end 22 | end 23 | out:close() 24 | fd:close() 25 | os.execute("mv " .. new .. " " .. name) 26 | end 27 | end 28 | -------------------------------------------------------------------------------- /valgrind.suppress: -------------------------------------------------------------------------------- 1 | # Valgrind suppression file for LuaJIT 2.0. 2 | { 3 | Optimized string compare 4 | Memcheck:Addr4 5 | fun:lj_str_cmp 6 | } 7 | { 8 | Optimized string compare 9 | Memcheck:Addr1 10 | fun:lj_str_cmp 11 | } 12 | { 13 | Optimized string compare 14 | Memcheck:Addr4 15 | fun:lj_str_new 16 | } 17 | { 18 | Optimized string compare 19 | Memcheck:Addr1 20 | fun:lj_str_new 21 | } 22 | { 23 | Optimized string compare 24 | Memcheck:Cond 25 | fun:lj_str_new 26 | } 27 | { 28 | 29 | Memcheck:Leak 30 | fun:malloc 31 | fun:ngx_alloc 32 | fun:ngx_event_process_init 33 | } 34 | { 35 | 36 | Memcheck:Param 37 | epoll_ctl(event) 38 | fun:epoll_ctl 39 | fun:ngx_epoll_add_event 40 | } 41 | { 42 | 43 | Memcheck:Param 44 | epoll_ctl(event) 45 | fun:epoll_ctl 46 | fun:ngx_epoll_add_connection 47 | } 48 | { 49 | 50 | Memcheck:Addr4 51 | fun:ngx_init_cycle 52 | fun:ngx_master_process_cycle 53 | fun:main 54 | } 55 | { 56 | 57 | Memcheck:Cond 58 | fun:ngx_init_cycle 59 | fun:ngx_master_process_cycle 60 | fun:main 61 | } 62 | { 63 | 64 | Memcheck:Cond 65 | fun:index 66 | fun:expand_dynamic_string_token 67 | fun:_dl_map_object 68 | fun:map_doit 69 | fun:_dl_catch_error 70 | fun:do_preload 71 | fun:dl_main 72 | fun:_dl_sysdep_start 73 | fun:_dl_start 74 | } 75 | { 76 | 77 | Memcheck:Param 78 | epoll_ctl(event) 79 | fun:epoll_ctl 80 | fun:ngx_epoll_init 81 | fun:ngx_event_process_init 82 | } 83 | { 84 | 85 | Memcheck:Param 86 | epoll_ctl(event) 87 | fun:epoll_ctl 88 | fun:ngx_epoll_notify_init 89 | fun:ngx_epoll_init 90 | fun:ngx_event_process_init 91 | } 92 | --------------------------------------------------------------------------------