|
@@ -1,10 +1,13 @@
|
|
|
#!/usr/bin/env python
|
|
#!/usr/bin/env python
|
|
|
|
|
|
|
|
|
|
+import copy
|
|
|
|
|
+import datetime
|
|
|
import errno
|
|
import errno
|
|
|
import json
|
|
import json
|
|
|
import os
|
|
import os
|
|
|
from os import path
|
|
from os import path
|
|
|
import traceback
|
|
import traceback
|
|
|
|
|
+import urlparse
|
|
|
import warnings
|
|
import warnings
|
|
|
|
|
|
|
|
warnings.filterwarnings('ignore', 'libevent')
|
|
warnings.filterwarnings('ignore', 'libevent')
|
|
@@ -29,11 +32,21 @@ def main():
|
|
|
def application(environ, start_response):
|
|
def application(environ, start_response):
|
|
|
try:
|
|
try:
|
|
|
split = environ['PATH_INFO'][1:].split('/')
|
|
split = environ['PATH_INFO'][1:].split('/')
|
|
|
|
|
+ qs = environ['QUERY_STRING']
|
|
|
|
|
+ if qs:
|
|
|
|
|
+ query = urlparse.parse_qs(qs, True, True)
|
|
|
|
|
+ for k, v in query.items():
|
|
|
|
|
+ if len(v) > 1:
|
|
|
|
|
+ start_response('400 Bad Request', [('Content-type', 'text/plain')])
|
|
|
|
|
+ return ['duplicate query parameter: ' + k]
|
|
|
|
|
+ query[k] = v[0]
|
|
|
|
|
+ else:
|
|
|
|
|
+ query = {}
|
|
|
if split[0] == 'v1':
|
|
if split[0] == 'v1':
|
|
|
handler = handlers.get(split[2])
|
|
handler = handlers.get(split[2])
|
|
|
if handler:
|
|
if handler:
|
|
|
- body = handler(split, environ)
|
|
|
|
|
- start_response('200 OK', [('Content-type', 'text/plain')])
|
|
|
|
|
|
|
+ body = json.dumps(handler(split, query, environ))
|
|
|
|
|
+ start_response('200 OK', [('Content-type', 'application/json')])
|
|
|
return [body]
|
|
return [body]
|
|
|
else:
|
|
else:
|
|
|
print 'no handler for', split
|
|
print 'no handler for', split
|
|
@@ -46,28 +59,68 @@ def application(environ, start_response):
|
|
|
start_response('500 Internal Server Error', [('Content-type', 'text/plain')])
|
|
start_response('500 Internal Server Error', [('Content-type', 'text/plain')])
|
|
|
return ['ruh roh']
|
|
return ['ruh roh']
|
|
|
|
|
|
|
|
-def get_data(split, environ):
|
|
|
|
|
|
|
+def get_data(split, query, environ):
|
|
|
group = int(split[1])
|
|
group = int(split[1])
|
|
|
server_id = int(split[3])
|
|
server_id = int(split[3])
|
|
|
- data_path = path.join(DATA_DIR, str(group), str(server_id))
|
|
|
|
|
- with open(data_path, 'r') as f:
|
|
|
|
|
- stats = fileio.read_stats(f)
|
|
|
|
|
- return json.dumps(stats)
|
|
|
|
|
|
|
+ start = datetime.datetime.strptime(query['start'], '%Y-%m-%d').date()
|
|
|
|
|
+ end = datetime.datetime.strptime(query['end'], '%Y-%m-%d').date()
|
|
|
|
|
+
|
|
|
|
|
+ server_dir = path.join(DATA_DIR, str(group), str(server_id))
|
|
|
|
|
+ rval = {}
|
|
|
|
|
+ c = start
|
|
|
|
|
+ while c <= end:
|
|
|
|
|
+ date_str = c.isoformat()
|
|
|
|
|
+ with open(path.join(server_dir, date_str), 'r') as f:
|
|
|
|
|
+ stats = fileio.read_stats(f)
|
|
|
|
|
+ rval[date_str] = stats
|
|
|
|
|
+ c += datetime.timedelta(days=1)
|
|
|
|
|
+ return rval
|
|
|
|
|
|
|
|
-def post_datum(split, environ):
|
|
|
|
|
|
|
+def post_datum(split, query, environ):
|
|
|
group = int(split[1])
|
|
group = int(split[1])
|
|
|
server_id = int(split[3])
|
|
server_id = int(split[3])
|
|
|
body = json.load(environ['wsgi.input'])
|
|
body = json.load(environ['wsgi.input'])
|
|
|
|
|
|
|
|
- group_dir = path.join(DATA_DIR, str(group))
|
|
|
|
|
|
|
+ server_dir = path.join(DATA_DIR, str(group), str(server_id))
|
|
|
try:
|
|
try:
|
|
|
- os.makedirs(group_dir)
|
|
|
|
|
|
|
+ os.makedirs(server_dir)
|
|
|
except OSError as e:
|
|
except OSError as e:
|
|
|
if e.errno != errno.EEXIST:
|
|
if e.errno != errno.EEXIST:
|
|
|
raise
|
|
raise
|
|
|
|
|
|
|
|
- with open(path.join(group_dir, str(server_id)), 'w') as f:
|
|
|
|
|
- fileio.write_datum(f, body)
|
|
|
|
|
- return '{"status": "ok"}'
|
|
|
|
|
|
|
+ # we floor to the minute, so this rounds to the nearest minute
|
|
|
|
|
+ now = datetime.datetime.utcnow() + datetime.timedelta(seconds=29)
|
|
|
|
|
+ data_path = path.join(server_dir, now.date().isoformat())
|
|
|
|
|
+ try:
|
|
|
|
|
+ with open(data_path, 'r') as f:
|
|
|
|
|
+ stats = fileio.read_stats(f)
|
|
|
|
|
+ except IOError as e:
|
|
|
|
|
+ if e.errno != errno.ENOENT:
|
|
|
|
|
+ raise
|
|
|
|
|
+ stats = copy.deepcopy(fileio.TEMPLATE)
|
|
|
|
|
+
|
|
|
|
|
+ index = now.hour * 60 + now.minute
|
|
|
|
|
+ data = {}
|
|
|
|
|
+ for field, subfields in stats.items():
|
|
|
|
|
+ field_data = {}
|
|
|
|
|
+ if field == 'disk':
|
|
|
|
|
+ disk = stats['disk']
|
|
|
|
|
+ for mountpoint, datum in body['disk'].items(): # iterate through body to get new mountpoints
|
|
|
|
|
+ disk.setdefault(mountpoint, {'total': [-1] * 1440, 'used': [-1] * 1440})
|
|
|
|
|
+ field_data[mountpoint] = {}
|
|
|
|
|
+ for subfield, array in disk[mountpoint].items():
|
|
|
|
|
+ array = list(array)
|
|
|
|
|
+ array[index] = datum[subfield]
|
|
|
|
|
+ field_data[mountpoint][subfield] = array
|
|
|
|
|
+ else:
|
|
|
|
|
+ for subfield, array in subfields.items():
|
|
|
|
|
+ array = list(array)
|
|
|
|
|
+ array[index] = body[field][subfield]
|
|
|
|
|
+ field_data[subfield] = array
|
|
|
|
|
+ data[field] = field_data
|
|
|
|
|
+
|
|
|
|
|
+ with open(data_path, 'w') as f:
|
|
|
|
|
+ fileio.write_datum(f, data)
|
|
|
|
|
+ return {'status': 'ok'}
|
|
|
|
|
|
|
|
main()
|
|
main()
|