File: //usr/bin/audit-service
#!/usr/bin/python
import hashlib
import yaml
import base64
import os
import os.path
import psycopg2
import time
import urllib
import json
import ConfigParser
from datetime import datetime
from glob import glob as ls
from lwdbadmin.util import getsrvips, getservices
from lwdbadmin.mysql.lwmysrvadm import getsrvips, getservices
from mybackup import MyBackup
def cftagsandpost(information):
# bugfix: hardware audit do not include cfengine3
import cfengine3
config = ConfigParser.ConfigParser()
config.read("/etc/locaweb/audit.conf")
hostname = os.uname()[1]
tags_path = '/etc/default/locaweb'
cftags = { 'dc': [], 'description': [], 'team': [] }
for tag in cftags.keys():
path = '%s/%s' % (tags_path, tag)
if os.path.isdir(path):
for info in ls('%s/*' % path):
cftags[tag].append(info.split('/')[-1])
information.update({'cfengine': cftags})
# saruman hack!
if os.path.exists("/etc/default/locaweb/description/shared_postgresql"):
information['cfengine']['description'].append('saruman')
if os.path.exists("/etc/default/locaweb/description/shared_mysql"):
information['cfengine']['description'].append('saruman')
url = "%s/%s" % (config.get('leela', 'url'), hostname)
enable = config.getboolean('leela', 'cfengine')
data = { 'information': base64.b64encode(json.dumps(information)) }
if enable:
cf3 = cfengine3.protocol()
cf3.classes('leela_update','define')
n = urllib.urlopen(url, urllib.urlencode(data))
status = n.read()
if status != 'OK':
cf3.scalar('status', status)
else:
cf3.scalar('status', "Updated")
return information
def databasesize(information):
if os.path.exists("/etc/default/locaweb/description/postgresql"):
# use system variables
# http://www.postgresql.org/docs/current/static/libpq-envars.html
conn = psycopg2.connect('')
cursor = conn.cursor()
cursor.execute("select datname, pg_database_size(datname) from pg_database")
output = []
for k, v in cursor:
output.append({"name": k, "size": v / 1024})
information['postgres'] = output
if os.path.exists("/etc/default/locaweb/description/mysql"):
mybackup = MyBackup()
databases = mybackup.getdatabsesstats()
dbs = []
for db in databases:
x = {"name": db['db'].decode('iso-8859-1').encode('utf8'),
"size": db['datadir_size'] / 1024}
dbs.append(x)
information['mysql'] = dbs
return information
def getserviceyamls(information, yamldir="/etc/locaweb/monitoring"):
to_check = []
files = ls(os.path.join(yamldir, "*.yaml"))
for file in files:
_file = open(file)
md5 = hashlib.md5(_file.read()).hexdigest()
_file.seek(0)
data = yaml.load(_file)
try:
to_check.append(
{
'file_name': file,
'md5': md5,
'content': data,
}
)
except Exception, e:
to_check.append(('Error with %s' % file, e.__str__()))
information.update({'monitoring_data': to_check})
return information
for service, datadir in getservices():
# monkey patch os to lie the hostname to leela
fakeret = ["Linux", service] + list(os.uname()[2:])
os.uname = lambda: fakeret
# simulate audit logic
acc = {"timestamp": time.mktime(datetime.now().timetuple())}
if os.path.exists("/etc/default/locaweb/description/postgresql"):
ip1, ip2 = getsrvips(datadir)
ips = {}
ips["eth0"] = ip1
ips["eth3"] = ip2
acc["ips"] = ips
ret = databasesize(acc)
ret = getserviceyamls(acc, "/etc/locaweb/monitoring-{}".format(service))
ret = cftagsandpost(acc)
if os.path.exists("/etc/default/locaweb/description/mysql"):
ip1 = getsrvips(datadir)
ips = {}
ips["eth0"] = ip1
acc["ips"] = ips
ret = databasesize(acc)
ret = getserviceyamls(acc, "/etc/locaweb/monitoring-{}".format(service))
ret = cftagsandpost(acc)