├── .gitignore
├── .travis.yml
├── example_icingaservice.conf
├── icingaexchange.yml
├── composer.json
├── CHANGELOG.md
├── checkcommand_httpexpect.conf
├── LICENSE
├── README.md
└── check_http_expect
/.gitignore:
--------------------------------------------------------------------------------
1 | /.idea/
2 |
--------------------------------------------------------------------------------
/.travis.yml:
--------------------------------------------------------------------------------
1 | language: sh
2 |
3 | before_script:
4 | - sudo add-apt-repository "deb http://archive.ubuntu.com/ubuntu/ trusty-backports restricted main universe"
5 | - sudo apt-get update -qq
6 | - sudo apt-get install -qq shellcheck
7 |
8 | script:
9 | - shellcheck --shell=bash check_http_expect
10 |
11 |
--------------------------------------------------------------------------------
/example_icingaservice.conf:
--------------------------------------------------------------------------------
1 | object Service "check_title" {
2 | import "generic-service"
3 |
4 | host_name = "duckduckgo.com"
5 | check_command = "http_expect"
6 | check_interval = 5m
7 | retry_interval = 2m
8 |
9 | display_name = "example check with multiple search-strings"
10 |
11 | vars.http_expect_url = "https://duckduckgo.com"
12 | vars.http_expect_find = [ "
DuckDuckGo — Privacy, simplified.", "Duck it" ]
13 | vars.http_expect_ua = "curl/7.37.0"
14 | }
15 |
--------------------------------------------------------------------------------
/icingaexchange.yml:
--------------------------------------------------------------------------------
1 | name: check_http_expect
2 | description: "file:///README.md"
3 | url: "https://github.com/cytopia/check_http_expect"
4 | tags: Web, Http
5 | vendor: cytopia
6 | target: Operating System,Website
7 | type: Plugin
8 | license: MIT
9 | releases:
10 | -
11 | name: 0.4
12 | description: "0.4 Release"
13 | files:
14 | -
15 | name: check_http_expect
16 | url: "file:///check_http_expect"
17 | description: "Second release"
18 | checksum: 0d681a27cd116d46749da3a92cd66a82
19 |
20 |
--------------------------------------------------------------------------------
/composer.json:
--------------------------------------------------------------------------------
1 | {
2 | "name": "cytopia/check_http_expect",
3 | "description": "Nagios plugin to check for a string/regex on a webpage. Also works behind .htaccess and POST login.",
4 | "type": "library",
5 | "keywords": ["nagios", "nagios-plugin", "icinga", "http", "http POST", "http htaccess"],
6 | "homepage": "https://github.com/cytopia/check_http_expect",
7 | "license": "MIT",
8 | "authors": [
9 | {
10 | "name" : "Patrick Plocke",
11 | "homepage": "https://github.com/cytopia",
12 | "role": "Developer"
13 | }
14 | ]
15 | }
16 |
--------------------------------------------------------------------------------
/CHANGELOG.md:
--------------------------------------------------------------------------------
1 | Version 0.5
2 | -----------
3 | - [Enh] Support specifying multiple search-strings
4 | - [Enh] Support changing the user-agent
5 | - [Enh] Added simple config/example for an icinga CheckCommand and Service
6 | - [Cha] Use bash instead of sh
7 |
8 | Version 0.3 (unreleased)
9 | -----------
10 |
11 |
12 |
13 | Version 0.2
14 | -----------
15 |
16 | - [Fix] Fixed useragent escaping for normal request
17 | - [Enh] Code cleaning
18 |
19 |
20 | Version 0.1
21 | -----------
22 |
23 | - [Enh] Be able to find a string via `grep -E`-style regex
24 | - [Enh] Be able to log into .htaccess
25 | - [Enh] Be able to log into website via POST
26 |
27 |
--------------------------------------------------------------------------------
/checkcommand_httpexpect.conf:
--------------------------------------------------------------------------------
1 |
2 | object CheckCommand "http_expect" {
3 | command = [ PluginContribDir + "/check_http_expect" ]
4 |
5 | arguments = {
6 | "--url" = {
7 | value = "$http_expect_url$" // The service parameter would then be defined as 'vars.http_expect_url = "narf.zort"'
8 | description = "URL of the page to be checked"
9 | required = true
10 | }
11 | "--find" = {
12 | value = "$http_expect_find$"
13 | description = "Text or RegEx to search for"
14 | required = true
15 | }
16 | "--ua" = {
17 | value = "$http_expect_useragent$"
18 | description = "custom user-agent"
19 | required = false
20 | }
21 | }
22 | }
23 |
--------------------------------------------------------------------------------
/LICENSE:
--------------------------------------------------------------------------------
1 | The MIT License (MIT)
2 |
3 | Copyright (c) 2015 cytopia
4 |
5 | Permission is hereby granted, free of charge, to any person obtaining a copy
6 | of this software and associated documentation files (the "Software"), to deal
7 | in the Software without restriction, including without limitation the rights
8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
9 | copies of the Software, and to permit persons to whom the Software is
10 | furnished to do so, subject to the following conditions:
11 |
12 | The above copyright notice and this permission notice shall be included in all
13 | copies or substantial portions of the Software.
14 |
15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
21 | SOFTWARE.
22 |
23 |
--------------------------------------------------------------------------------
/README.md:
--------------------------------------------------------------------------------
1 | # check_http_expect
2 |
3 | Nagios plugin that will check a website (behind .htacess and/or behind POST login) for an expected string based on a simple string or regex expression.
4 |
5 | [](https://travis-ci.org/cytopia/check_http_expect)
6 | [](https://packagist.org/packages/cytopia/check_http_expect) [](https://packagist.org/packages/cytopia/check_http_expect) [](https://packagist.org/packages/cytopia/check_http_expect) [](http://opensource.org/licenses/MIT)
7 | [](https://en.wikipedia.org/?title=POSIX)
8 | [](https://en.wikipedia.org/?title=Bourne_shell)
9 |
10 |
11 | ---
12 |
13 | | [](https://github.com/cytopia/awesome-nagios-plugins) | Find more plugins at [Awesome Nagios](https://github.com/cytopia/awesome-nagios-plugins) |
14 | |---|---|
15 | | [](https://exchange.icinga.com/cytopia) | **Find more plugins at [Icinga Exchange](https://exchange.icinga.com/cytopia)** |
16 | | [](https://exchange.nagios.org/directory/Owner/cytopia/1) | **Find more plugins at [Nagios Exchange](https://exchange.nagios.org/directory/Owner/cytopia/1)** |
17 |
18 | ---
19 |
20 | ## 1. Usage
21 |
22 | ```shell
23 | Usage: check_http_expect --url --find [--find ] [--huser ] [--hpass ] [--lurl ] [--cookie [--cookie ]] [--ldata [--ldata ]]
24 |
25 | --url Target URL
26 | --find Find string in source of Target URL ('grep -E'-style regex allowed / can be specified multiple times)
27 | --huser (Optional) htaccess username
28 | --hpass (Optional) htaccess password
29 | --lurl (Optional) Url for POST login
30 | --ldata (Optional) POST data (can be specified multiple times)
31 | --cookie (Optional) set cookies for request
32 | --ua (Optional) use this user-agent instead of the default one
33 | ```
34 |
35 | ## 2. Generic Examples
36 |
37 | ### 2.1 Search simple string
38 |
39 | Check if a website contains the word `google`
40 | ```shell
41 | $ check_http_expect --url https://google.com --find google
42 | [OK] 1 match found for: "google".
43 | Http version: HTTP/1.1
44 | Http code: 302
45 | Http info: Found
46 | Server: GFE/2.0
47 | Url: https://google.com
48 | Search: google
49 | Num matches: 1
50 | Matches:
51 | ----------------------------------------
52 | here
53 | ```
54 |
55 | ### 2.2 Search via regex
56 |
57 | Check if a website contains the the following regex `[0-9]+`
58 | ```shell
59 | $ check_http_expect --url https://google.com --find '[0-9]+'
60 | [OK] 4 matches found for: "[0-9]+".
61 | Http version: HTTP/1.1
62 | Http code: 302
63 | Http info: Found
64 | Server: GFE/2.0
65 | Url: https://google.com
66 | Search: [0-9]+
67 | Num matches: 4
68 | Matches:
69 | ----------------------------------------
70 | 302 Moved 302 Moved
here.
71 | ```
72 |
73 | ### 2.3 .htaccess protection
74 |
75 | Check behind a .htaccess protected website for the string `Your site is secured`
76 | ```
77 | $ check_http_expect --url "http://www.example.com" --find 'Your site is secured' --huser john --hpass "Password"
78 | [ERROR] No matches found for: "Your site is secured".
79 | Http version: HTTP/1.1
80 | Http code: 302
81 | Http info: Found
82 | Server: Apache/2.4.16 (Amazon) PHP/5.5.30
83 | Url: http://www.example.com
84 | Search: Your site is secured
85 | Num matches: 0
86 | Matches:
87 | ----------------------------------------
88 | ```
89 |
90 | ### 2.4 POST Login
91 |
92 | Login to at `http://www.example.com/login.php` with POST data `usernameFieldName=John`, `passwordFieldName=pass`, `submit=1`, go to `http://www.example.com` and check for the regex `[0-9]+`
93 | ```
94 | $ check_http_expect --url "http://www.example.com" --find '[0-9]+' --lurl "http://www.example.com/login.php" --ldata "usernameFieldName=John" --ldata "passwordFieldName=pass" --ldata "submit=1"
95 | [ERROR] No matches found for: "[0-9]+".
96 | Http version: HTTP/1.1
97 | Http code: 302
98 | Http info: Found
99 | Server: Apache/2.4.16 (Amazon) PHP/5.5.30
100 | Url: http://www.example.com
101 | Search: Your site is secured
102 | Num matches: 0
103 | Matches:
104 | ```
105 |
106 | **Note:** htaccess and POST login can also be combined.
107 |
108 | ### 2.5 Set Cookies
109 | Send a request with a custom cookie
110 | ```shell
111 | $ check_http_expect --url https://google.com --find google --cookie "username=test"
112 | [OK] 1 match found for: "google".
113 | Http version: HTTP/1.1
114 | Http code: 302
115 | Http info: Found
116 | Server: GFE/2.0
117 | Url: https://google.com
118 | Search: google
119 | Num matches: 1
120 | Matches:
121 | ----------------------------------------
122 | here
123 | ```
124 |
125 | ### 2.6 Multiple Search-Strings and different user-agent
126 | When searching for multiple strings on the same page, its not necessary to do multiple requests.
127 | Just specify as many `find`-parameters as you need.
128 | You may want to change the user-agent with the parameter `ua` as well, in order to avoid undesired redirects.
129 | ```shell
130 | $ check_hmg_http_expect --url "https://duckduckgo.com" --find "DuckDuckGo — Privacy, simplified." --find "Duck it\!" --find "NO-MATCH" --ua "curl/7.37.0"
131 | [WARN] Not all matches found for: "DuckDuckGo — Privacy, simplified.
132 | Duck it\!
133 | NO-MATCH" | 'Results'=2 but expected: 3
134 | Http version: HTTP/2
135 | Http code: 200
136 | Http info:
137 | Server:
138 | Url: https://duckduckgo.com
139 | Search: DuckDuckGo — Privacy, simplified.
140 | Duck it\!
141 | NO-MATCH
142 |
143 | Num matches: 2
144 | Matches:
145 | ----------------------------------------
146 | DuckDuckGo — Privacy, simplified. Duck it!
147 | ```
148 | If only some of the specified search-strings are matched, then a warning is returned.
149 |
150 | ## 3. Specific Examples
151 |
152 | ### 3.1 Log into wordpress.com
153 |
154 | Find String in Wordpress.com dashboard
155 |
156 | ```shell
157 | $ check_http_expect --url "https://dashboard.wordpress.com/wp-admin/" --find "Recently Published" --lurl "https://wordpress.com/wp-login.php" --ldata "log=USER@EMAIL" --ldata "pwd=PASSWORD" --ldata "rememberme=forever" --ldata "testcookie=1"
158 | [OK] 1 match found for: "Recently Published".
159 | Http version: HTTP/1.1
160 | Http code: 200
161 | Http info: OK
162 | Server: nginx
163 | Url: https://dashboard.wordpress.com/wp-admin/
164 | Search: Recently Published
165 | Num matches: 1
166 | Matches:
167 | ----------------------------------------
168 |
169 | ```
170 |
171 |
172 | ## 4. Awesome
173 |
174 | Added by the following [](https://github.com/sindresorhus/awesome) lists:
175 |
176 | * [awesome-nagios-plugins](https://github.com/cytopia/awesome-nagios-plugins)
177 |
--------------------------------------------------------------------------------
/check_http_expect:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env bash
2 |
3 | ################################################################################
4 | #
5 | # V A R I A B L E S
6 | #
7 | ################################################################################
8 |
9 | # Some creds
10 | INFO_NAME="check_http_expect"
11 | INFO_AUTHOR="Patrick Plocke "
12 | INFO_GPGKEY="0x28BF179F"
13 | INFO_DATE="2021-07-16"
14 | INFO_LICENSE="MIT"
15 | INFO_VERSION="0.5"
16 | # ADD Additional authors here
17 | INFO_CO_AUTHOR1="Mathias Scherer "
18 | INFO_CO_AUTHOR2="Sylvia van Os "
19 | INFO_CO_AUTHOR3="Martin Drößler "
20 |
21 |
22 | # Get the path
23 | export PATH="$PATH:/bin:/sbin:/usr/bin:/usr/sbin:/usr/local/bin:/usr/local/sbin"
24 |
25 | # Nagios error codes
26 | EXIT_OK=0
27 | EXIT_WARN=1
28 | EXIT_ERR=2
29 | EXIT_UNKNOWN=3
30 |
31 | # defaults
32 | useragent="Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1.6) Gecko/20070725 Firefox/2.0.0.6"
33 | searchStrings=()
34 |
35 | ################################################################################
36 | #
37 | # F U N C T I O N S
38 | #
39 | ################################################################################
40 |
41 | ############################################################
42 | # Program Functions
43 | ############################################################
44 |
45 | # Check program requirements
46 | # @output string Parsed, failed requirements
47 | # @return integer 0|3
48 | check_requirements() {
49 | if ! command -v curl > /dev/null 2>&1; then
50 | printf "[Failed] 'curl' must be installed.\n"
51 | return $EXIT_UNKNOWN
52 | else
53 | printf "[OK] 'curl' is installed.\n"
54 | return $EXIT_OK
55 | fi
56 | }
57 |
58 | # Give some creds
59 | # @output string The creds.
60 | # @return integer 0
61 | print_version() {
62 | printf "Name: %s\n" "${INFO_NAME}"
63 | printf "Version: %s (%s)\n" "${INFO_VERSION}" "${INFO_DATE}"
64 | printf "Author: %s (%s)\n" "${INFO_AUTHOR}" "${INFO_GPGKEY}"
65 | printf "Co-Authors: %s\n" "${INFO_CO_AUTHOR1}"
66 | printf " %s\n" "${INFO_CO_AUTHOR2}"
67 | printf " %s\n" "${INFO_CO_AUTHOR3}"
68 | printf "License: %s\n" "${INFO_LICENSE}"
69 | return 0
70 | }
71 |
72 |
73 | # Usage
74 | # @output string The usage screen.
75 | # @return integer 0
76 | print_usage() {
77 | printf "Usage: check_http_expect --url --find [--insecure] [--huser ] [--hpass ] [--lurl ] [--cookie [--cookie ]][--ldata [--ldata ]]\n"
78 | printf "OR %s --check\n" "${INFO_NAME}"
79 | printf "OR %s --help\n" "${INFO_NAME}"
80 | printf "OR %s --version\n\n" "${INFO_NAME}"
81 | return 0
82 | }
83 |
84 |
85 | # Help
86 | # @output string The help screen.
87 | # @return integer 0
88 | print_help() {
89 |
90 | # Show usage first
91 | print_usage
92 |
93 | # Show description
94 | printf "Check a website (behind .htacess and/or behind POST login) for\n"
95 | printf "an expected string or regex expression.\n\n"
96 |
97 | # Show defaults
98 | printf " --url Target URL\n"
99 | printf " --find Find string in source of Target URL ('grep -E'-style regex allowed)\n"
100 | printf " --insecure [optional] allow insecure SSL connections\n"
101 | printf " --huser [optional] htaccess username\n"
102 | printf " --hpass [optional] htaccess password\n"
103 | printf " --lurl [optional] Url for POST login\n"
104 | printf " --ldata [optional] POST data (can be specified multiple times)\n"
105 | printf " --cookie [optional] set cookies for request\n\n"
106 |
107 | printf " --check Check for program requirements.\n"
108 | printf " --help Show this help\n"
109 | printf " --version Show version information.\n"
110 | return 0
111 | }
112 |
113 |
114 |
115 | ################################################################################
116 | #
117 | # M A I N E N T R Y P O I N T
118 | #
119 | ################################################################################
120 |
121 | ############################################################
122 | # Check for --check, --help or --version arguments
123 | ############################################################
124 | if [ "${1}" = "--check" ]; then
125 | if ! check_requirements; then
126 | exit $EXIT_UNKNOWN
127 | fi
128 | exit $EXIT_OK
129 | fi
130 | if [ "${1}" = "--help" ]; then
131 | print_help
132 | exit $EXIT_OK
133 | fi
134 | if [ "${1}" = "--version" ]; then
135 | print_version
136 | exit $EXIT_OK
137 | fi
138 |
139 | ############################################################
140 | # Check requirements
141 | ############################################################
142 |
143 | if ! command -v curl > /dev/null 2>&1; then
144 | printf "[UNKNOWN] 'curl' is required\n"
145 | exit $EXIT_UNKNOWN
146 | fi
147 |
148 |
149 | ############################################################
150 | # Retrieve arguments
151 | ############################################################
152 |
153 | while [ $# -gt 0 ]; do
154 | key=$1
155 | shift
156 | case $key in
157 |
158 | # Final target url
159 | --url)
160 | url="$1"
161 | shift
162 | ;;
163 |
164 | # Post parameter
165 | --lurl)
166 | lurl="$1"
167 | shift
168 | ;;
169 | --ldata)
170 | # Append multiple data fields
171 | if [ -z "$ldata" ]; then
172 | ldata="--data \"$1\""
173 | else
174 | ldata="${ldata} --data \"$1\""
175 | fi
176 | shift
177 | ;;
178 |
179 | # Htaccess parameters
180 | --huser)
181 | huser="$1"
182 | shift
183 | ;;
184 | --hpass)
185 | hpass="$1"
186 | shift
187 | ;;
188 |
189 | # String to look for on final url
190 | --find)
191 | searchStrings+=("$1")
192 | shift
193 | ;;
194 | # Cookies
195 | --cookie)
196 | if [ -z "$cookies" ]; then
197 | cookies="--cookie \"$1\""
198 | else
199 | cookies="${cookies} --cookie \"$1\""
200 | fi
201 | shift
202 | ;;
203 | # Insecure
204 | --insecure)
205 | insecure="1"
206 | ;;
207 | --ua)
208 | useragent="$1"
209 | shift
210 | ;;
211 | *)
212 | echo "Invalid argument: '${1}'"
213 | print_usage
214 | exit 1
215 | ;;
216 | esac
217 | done
218 |
219 |
220 | ############################################################
221 | # Validate arguments
222 | ############################################################
223 |
224 | if [ -z "$url" ]; then
225 | echo "Error, You must specify an Url."
226 | print_usage
227 | exit 1
228 | fi
229 |
230 | if [ "${#searchStrings[@]}" -eq 0 ]; then
231 | echo "Error, You must specify what you are looking for."
232 | print_usage
233 | exit 1
234 | fi
235 |
236 |
237 | ############################################################
238 | # Go Go Go!!!!
239 | ############################################################
240 |
241 | # Add htacess parameters
242 | if [ ! -z "$huser" ] || [ ! -z "$hpass" ]; then
243 | curl_args="--user-agent \"${useragent}\" --user \"${huser}:${hpass}\""
244 | else
245 | curl_args="--user-agent \"${useragent}\""
246 | fi
247 |
248 | if [ ! -z "$insecure" ]; then
249 | curl_args="-k ${curl_args}"
250 | fi
251 |
252 | # Add cookies
253 | if [ ! -z "$cookies" ]; then
254 | curl_args="${curl_args} ${cookies}"
255 | fi
256 |
257 | # Login required?
258 | if [ ! -z "${lurl}" ]; then
259 | login="curl -s -i ${curl_args}"
260 | login="${login} --cookie cookie.jar"
261 | login="${login} --cookie-jar cookie.jar"
262 | login="${login} --location"
263 | login="${login} ${ldata}"
264 | login="${login} ${lurl}"
265 |
266 | crawl="curl -s -i ${curl_args}"
267 | crawl="${crawl} --cookie cookie.jar"
268 | crawl="${crawl} --cookie-jar cookie.jar"
269 | crawl="${crawl} --location"
270 | crawl="${crawl} ${url}"
271 |
272 | # No fucking idea why the login only works, when it is called twice??
273 | eval "${login}" > /dev/null
274 | eval "${login}" > /dev/null
275 |
276 |
277 | output="$(eval "${crawl}")"
278 | rm -f cookie.jar
279 | else
280 | # Connect and curl
281 | crawl="curl -s -i ${curl_args} ${url}"
282 | output="$(eval "${crawl}")"
283 | fi
284 |
285 | # Split header and content
286 | head="$(echo "${output}" | awk '{if($0=="\r")exit;print}')"
287 | data="$(echo "${output}" | awk '{if(body)print;if($0=="\r")body=1}')"
288 |
289 | http_status="$(echo "${head}" | head -n1)"
290 | http_version="$(echo "${http_status}" | awk '{print $1}')"
291 | http_code="$(echo "${http_status}" | awk '{print $2}')"
292 | http_info="$(echo "${http_status}" | awk '{for (i=3; i