1
 2
 3
 4
 5
 6
 7
 8
 9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
#!/usr/bin/env python
#
# Copyright (c) 2009, devdoodles
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
#     * Redistributions of source code must retain the above copyright
#       notice, this list of conditions and the following disclaimer.
#
#     * Redistributions in binary form must reproduce the above
#       copyright notice, this list of conditions and the following
#       disclaimer in the documentation and/or other materials provided
#       with the distribution.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.

import httplib
import os
import socket
import sys
import time
import urlparse

request_timeout = os.getenv('request_timeout') or 10
socket.setdefaulttimeout(float(request_timeout))

def load_urls():
    urls = []
    n = 1
    while 1:
        url = os.getenv("url%s_url" % n)
        if not url:
            break
        data = { 'url': url }
        for v in ('name', 'label', 'timeout', 'warning', 'critical'):
            data[v] = os.getenv("url%s_%s" % (n, v))
        urls.append(data)
        n += 1
    return urls

def do_request(dest, host, path, scheme):
    if scheme == 'https':
        conn = httplib.HTTPConnection(dest)
    else:
        conn = httplib.HTTPConnection(dest)
    conn.request('GET', path, headers={ 'Host': host })
    resp = conn.getresponse()
    data = resp.read()
    conn.close()
    return resp.status

urls = load_urls()

if len(sys.argv) > 1 and sys.argv[1] == 'config':
    title = os.getenv("graph_title") or "Response time"
    category = os.getenv("graph_category")
    draw = os.getenv("draw")
    if category: print "graph_category %s" % (category,)
    print "graph_title %s" % (title,)
    print "graph_vlabel Time (seconds)"
    print "graph_args --lower-limit 0"
    for url in urls:
        if draw: print "%s.draw %s" % (url['name'], draw)
        for v in ('label', 'warning', 'critical'):
            if url[v]: print "%s.%s %s" % (url['name'], v, url[v])
    sys.exit(0)

for url in urls:
    o = urlparse.urlparse(url['url'])
    try:
        t1 = time.time()
        status = do_request(o.netloc, o.hostname, o.path, o.scheme)
        t2 = time.time()
        if status == 200:
            print "%s.value %.2f" % (url['name'], t2 - t1)
        else:
            print "%s.value U" % (url['name'],)
    except socket.timeout:
        print "%s.value %s" % (url['name'], request_timeout)
    except:
        print "%s.value U" % (url['name'],)