File: //usr/lib/python2.7/site-packages/lap/check_domain.py
import yaml
import os
import time
from datetime import date, datetime, timedelta
import re
import types
import sys
from subprocess import Popen, PIPE
reload(sys)
sys.setdefaultencoding('utf-8')
def __run__(params):
try:
import mechanize
from bs4 import BeautifulSoup
except ImportError as e:
return [2, "CRITICAL - Error: Please install python-%s" % str(e).split(' ')[3]]
try:
url = 'http://whois.locaweb.com.br'
domain = yaml.load(params.get('domain'))
thresholds = yaml.load(params.get('thresholds'))
cache_time = int(params.get('cache_time', '86400'))
cache_file = '/var/cache/locaweb-plugins/check_whois_%s' % domain
infos = { 'domain': domain }
except Exception, e:
return [2, "CRITICAL - Error: %s" % repr(e)]
regex = {
'expires:\s*(?P<value>.+)': [ '%Y%m%d' ],
'Expiration Date:\s*(?P<value>.+)': [ '%d-%b-%Y', '%Y-%m-%d' ],
'Registrar Registration Expiration Date:\s?(?P<value>.+)': [ '%Y-%m-%d %H:%M:%S' ],
'Registry Expiry Date:\s*(?P<value>.+)': [ '%Y-%m-%dT%H:%M:%SZ' ],
'Domain Expiration Date:\s*(?P<value>.+)': [ '%a %b %d %H:%M:%S %Z %Y' ],
}
try:
if os.path.isfile(cache_file) and \
(time.time() - os.stat(cache_file).st_mtime) < cache_time and \
os.stat(cache_file).st_size > 0:
data = open(cache_file).read()
else:
try:
if domain.endswith('.br'):
br = mechanize.Browser()
br.open(url)
br.select_form(nr=0)
br["domain_id"] = domain
result = br.submit().read()
data = '\n'.join([ line.strip() for line in BeautifulSoup(result).text.split('\n') if line ])
with open(cache_file, 'w') as f:
f.write(data)
else:
command = 'whois %s' % domain
whois = Popen(command, shell=True, stdout=PIPE, stderr=PIPE, stdin=PIPE)
data = whois.stdout.read()
with open(cache_file, 'w') as f:
f.write(data)
except IOError, e:
return [2, 'CRITICAL - %s on %s' % (e, url)]
except Exception, e:
return [2, 'CRITICAL - Error: %s' % repr(e)]
for line in data.split('\n'):
if line.startswith('No match for'):
os.remove(cache_file)
return [2, "CRITICAL - Error: %(domain)s not found" % infos]
elif line.startswith('You exceeded the'):
os.remove(cache_file)
return [2, "CRITICAL - Error: You exceeded the maximum allowable"]
elif line.find('Permission denied') != -1:
os.remove(cache_file)
return [2, "CRITICAL - Error: Permission denied to query %(domain)s" % infos]
for rule, parse in regex.iteritems():
r = re.compile(rule).search(data)
if not isinstance(r, types.NoneType):
if not 'expiration_date' in infos:
infos.update({'expiration_date': r.group(1).strip()})
for p in parse:
try:
infos.update({'days': (datetime.strptime(infos['expiration_date'], p) - datetime.now()).days})
except:
pass
except Exception, e:
return [2, "CRITICAL - Error: %s" % repr(e)]
if 'days' in infos:
if int(infos['days']) > int(thresholds['warning']):
return [0, "OK - %(days)s days left to renew the domain %(domain)s" % infos]
elif int(infos['days']) <= int(thresholds['warning']) and int(infos['days']) > int(thresholds['critical']):
return [1, "WARNING - Only %(days)s days left to renew the domain %(domain)s" % infos]
elif int(infos['days']) <= int(thresholds['critical']) and int(infos['days']) > 0:
return [2, "CRITICAL - The domain %(domain)s expires within %(days)s days" % infos]
elif int(infos['days']) < 0:
return [2, "CRITICAL - Domain %(domain)s expired" % infos]
else:
return [2, 'CRITICAL - Error: problems to query the domain %(domain)s' % infos]