#!/usr/bin/env python import warnings warnings.filterwarnings('ignore', 'libevent') import gevent.monkey gevent.monkey.patch_all(thread=False) import copy import datetime import errno import httplib import json import os from os import path import traceback import urlparse import gevent.pywsgi import fileio import reloader DATA_DIR = path.expanduser('~/sysvitals_data') handlers = None def main(): global handlers handlers = { 'data': get_data, 'datum': post_datum, } server = gevent.pywsgi.WSGIServer(('0.0.0.0', 8892), application) reloader.init(server) server.serve_forever() class HTTPException(Exception): def __init__(self, code, body): self.code = code self.body = body def application(environ, start_response): try: split = environ['PATH_INFO'][1:].split('/') qs = environ['QUERY_STRING'] if qs: query = urlparse.parse_qs(qs, True, True) for k, v in query.items(): if len(v) > 1: raise HTTPException(400, 'duplicate query parameter: ' + k) query[k] = v[0] else: query = {} if split[0] == 'v1': handler = handlers.get(split[2]) if handler: body = json.dumps(handler(split, query, environ)) start_response('200 OK', [('Content-type', 'application/json')]) return [body] else: print 'no handler for', split else: print 'split was', split raise HTTPException(404, 'unhandled path: ' + environ['PATH_INFO']) except HTTPException as e: response = '%d %s' % (e.code, httplib.responses[e.code]) start_response(response, [('Content-type', 'text/plain')]) return [e.body] except: traceback.print_exc() start_response('500 Internal Server Error', [('Content-type', 'text/plain')]) return ['ruh roh'] def get_data(split, query, environ): try: group = int(split[1]) server_id = int(split[3]) start = datetime.datetime.strptime(query['start'], '%Y-%m-%d').date() end = datetime.datetime.strptime(query['end'], '%Y-%m-%d').date() except (IndexError, KeyError, ValueError): raise HTTPException(400, '') server_dir = path.join(DATA_DIR, str(group), str(server_id)) rval = {} c = start while c <= end: date_str = c.isoformat() try: with open(path.join(server_dir, date_str), 'r') as f: stats = fileio.read_stats(f) except IOError as e: if e.errno == errno.ENOENT: stats = None else: raise rval[date_str] = stats c += datetime.timedelta(days=1) return rval def post_datum(split, query, environ): try: group = int(split[1]) server_id = int(split[3]) except (IndexError, ValueError): raise HTTPException(400, '') try: body = json.load(environ['wsgi.input']) except ValueError: raise HTTPException(400, 'post body was not valid JSON') if not isinstance(body, dict): raise HTTPException(400, 'post body was not a JSON dictionary') if body.keys() != fileio.TEMPLATE.keys(): diff = set(body.keys()).symmetric_difference(set(fileio.TEMPLATE.keys())) raise HTTPException(400, 'post body had missing or extra keys: ' + ','.join(diff)) server_dir = path.join(DATA_DIR, str(group), str(server_id)) try: os.makedirs(server_dir) except OSError as e: if e.errno != errno.EEXIST: raise # we floor to the minute, so this rounds to the nearest minute now = datetime.datetime.utcnow() + datetime.timedelta(seconds=29) data_path = path.join(server_dir, now.date().isoformat()) try: with open(data_path, 'r') as f: stats = fileio.read_stats(f) except IOError as e: if e.errno != errno.ENOENT: raise stats = copy.deepcopy(fileio.TEMPLATE) index = now.hour * 60 + now.minute data = {} for field, subfields in stats.items(): field_data = {} if field == 'disk': disk = stats['disk'] for mountpoint, datum in body['disk'].items(): # iterate through body to get new mountpoints disk.setdefault(mountpoint, {'total': [-1] * 1440, 'used': [-1] * 1440}) field_data[mountpoint] = {} for subfield, array in disk[mountpoint].items(): array = list(array) array[index] = datum[subfield] field_data[mountpoint][subfield] = array else: for subfield, array in subfields.items(): array = list(array) array[index] = body[field][subfield] field_data[subfield] = array data[field] = field_data with open(data_path, 'w') as f: fileio.write_datum(f, data) return {'status': 'ok'} main()