80 lines
1.8 KiB
Python
80 lines
1.8 KiB
Python
#!/usr/bin/env python3
|
|
|
|
#this is actually a python https requests query, its called check_http_wget cause it got replaced
|
|
|
|
from argparse import ArgumentParser
|
|
from sys import exit
|
|
|
|
import requests
|
|
|
|
OK = 0
|
|
CRITICAL = 2
|
|
|
|
parser = ArgumentParser()
|
|
parser.add_argument(
|
|
'--url',
|
|
required=True,
|
|
)
|
|
parser.add_argument(
|
|
'--check-string',
|
|
)
|
|
parser.add_argument(
|
|
'--no-follow-redirects',
|
|
action='store_true',
|
|
)
|
|
parser.add_argument(
|
|
'--timeout',
|
|
default=15,
|
|
type=int,
|
|
)
|
|
parser.add_argument(
|
|
'--useragent',
|
|
)
|
|
parser.add_argument(
|
|
'--proxy_url',
|
|
)
|
|
|
|
parser.add_argument('--no-verify-ssl', dest='verify_ssl', action='store_false')
|
|
parser.set_defaults(verify_ssl=True)
|
|
|
|
if __name__ == '__main__':
|
|
parsed_args = parser.parse_args()
|
|
requests_kwargs = {
|
|
'timeout': parsed_args.timeout,
|
|
}
|
|
|
|
if parsed_args.useragent:
|
|
requests_kwargs['headers'] = {
|
|
'User-Agent': parsed_args.useragent,
|
|
}
|
|
if parsed_args.proxy_url:
|
|
requests_kwargs['proxies'] = {
|
|
'http': parsed_args.proxy_url,
|
|
'https': parsed_args.proxy_url,
|
|
}
|
|
|
|
if not parsed_args.verify_ssl:
|
|
requests_kwargs['verify'] = False
|
|
requests.packages.urllib3.disable_warnings()
|
|
|
|
if parsed_args.no_follow_redirects:
|
|
requests_kwargs['allow_redirects'] = False
|
|
|
|
try:
|
|
r = requests.get(parsed_args.url, **requests_kwargs)
|
|
except Exception as e:
|
|
print(e)
|
|
exit(CRITICAL)
|
|
|
|
if parsed_args.check_string:
|
|
if not parsed_args.check_string in r.text:
|
|
print("String '{}' not found on URL '{}'".format(parsed_args.check_string, parsed_args.url))
|
|
exit(CRITICAL)
|
|
else:
|
|
try:
|
|
r.raise_for_status()
|
|
except Exception as e:
|
|
print("Error: {}".format(repr(e)))
|
|
exit(CRITICAL)
|
|
print('OK')
|
|
exit(OK)
|