|
|
@@ -5,6 +5,7 @@ warnings.filterwarnings('ignore', 'libevent')
|
|
|
import gevent.monkey
|
|
|
gevent.monkey.patch_all(thread=False)
|
|
|
|
|
|
+from collections import defaultdict
|
|
|
import copy
|
|
|
import datetime
|
|
|
import errno
|
|
|
@@ -26,8 +27,9 @@ handlers = None
|
|
|
def main():
|
|
|
global handlers
|
|
|
handlers = {
|
|
|
- 'data': get_data,
|
|
|
'datum': post_datum,
|
|
|
+ 'raw': get_raw,
|
|
|
+ 'stats': get_stats,
|
|
|
}
|
|
|
server = gevent.pywsgi.WSGIServer(('0.0.0.0', 8892), application)
|
|
|
reloader.init(server)
|
|
|
@@ -77,7 +79,7 @@ def application(environ, start_response):
|
|
|
start_response('500 Internal Server Error', ERROR_HEADERS)
|
|
|
return ['ruh roh']
|
|
|
|
|
|
-def get_data(split, query, environ):
|
|
|
+def get_raw(split, query, environ):
|
|
|
try:
|
|
|
group = int(split[1])
|
|
|
server_id = int(split[3])
|
|
|
@@ -93,16 +95,79 @@ def get_data(split, query, environ):
|
|
|
date_str = c.isoformat()
|
|
|
try:
|
|
|
with open(path.join(server_dir, date_str), 'r') as f:
|
|
|
- stats = fileio.read_stats(f)
|
|
|
+ data = fileio.read_stats(f)
|
|
|
except IOError as e:
|
|
|
if e.errno == errno.ENOENT:
|
|
|
- stats = None
|
|
|
+ data = None
|
|
|
else:
|
|
|
raise
|
|
|
- rval[date_str] = stats
|
|
|
+ rval[date_str] = data
|
|
|
c += datetime.timedelta(days=1)
|
|
|
return rval
|
|
|
|
|
|
+def get_stats(split, query, environ):
|
|
|
+ raw = get_raw(split, query, environ)
|
|
|
+ last_val = fileio.gen_template(-1)
|
|
|
+ stats = defaultdict(dict)
|
|
|
+ for date, data in raw.items():
|
|
|
+ for field, subfields in data.items():
|
|
|
+ if field == 'cpu':
|
|
|
+ field_data = {}
|
|
|
+ # subtract times from last minute, calculate total time spent each minute
|
|
|
+ sums = [0] * 1440
|
|
|
+ for subfield, raw_array in subfields.items():
|
|
|
+ if subfield != 'num_cpus':
|
|
|
+ array = [None] * 1440
|
|
|
+ for i, d in enumerate(raw_array):
|
|
|
+ lv = last_val[field][subfield]
|
|
|
+ if d > 0 and lv > 0:
|
|
|
+ array[i] = d - lv
|
|
|
+ sums[i] += array[i]
|
|
|
+ last_val[field][subfield] = d
|
|
|
+ field_data[subfield] = array
|
|
|
+ # divide differences by total times
|
|
|
+ for subfield, array in field_data.items():
|
|
|
+ if subfield != 'num_cpus':
|
|
|
+ for i, d in enumerate(array):
|
|
|
+ if d > 0:
|
|
|
+ array[i] = array[i] * subfields['num_cpus'][i] * 100 / sums[i]
|
|
|
+ stats[field].setdefault(subfield, {})
|
|
|
+ stats[field][subfield][date] = array
|
|
|
+ elif field == 'mem':
|
|
|
+ # translate total to free, subtract buffers and cached from used
|
|
|
+ field_data = {}
|
|
|
+ for subfield in ['used', 'buffers', 'cached', 'free']:
|
|
|
+ field_data[subfield] = [None] * 1440
|
|
|
+ MB = 1024 * 1024
|
|
|
+ for i in xrange(1440):
|
|
|
+ field_data['free'][i] = (subfields['total'][i] - subfields['used'][i]) / MB
|
|
|
+ field_data['used'][i] = (subfields['used'][i] -
|
|
|
+ subfields['buffers'][i] - subfields['cached'][i]) / MB
|
|
|
+ field_data['buffers'][i] = subfields['buffers'][i] / MB
|
|
|
+ field_data['cached'][i] = subfields['cached'][i] / MB
|
|
|
+ for subfield, array in field_data.items():
|
|
|
+ stats[field].setdefault(subfield, {})
|
|
|
+ stats[field][subfield][date] = array
|
|
|
+ elif field == 'net':
|
|
|
+ field_data = {}
|
|
|
+ for subfield in ['kbit/s_in', 'kbit/s_out', 'err_in', 'err_out', 'drop_in', 'drop_out']:
|
|
|
+ field_data[subfield] = [None] * 1440
|
|
|
+ for i in xrange(1440):
|
|
|
+ lv_recv = last_val['net']['bytes_recv']
|
|
|
+ lv_sent = last_val['net']['bytes_sent']
|
|
|
+ if lv_recv > 0:
|
|
|
+ field_data['kbit/s_in'][i] = (subfields['bytes_recv'][i] - lv_recv) * 8.0 / 1024
|
|
|
+ if lv_sent > 0:
|
|
|
+ field_data['kbit/s_out'][i] = (subfields['bytes_sent'][i] - lv_sent) * 8.0 / 1024
|
|
|
+ last_val['net']['bytes_recv'] = subfields['bytes_recv'][i]
|
|
|
+ last_val['net']['bytes_sent'] = subfields['bytes_sent'][i]
|
|
|
+ for subfield in ['err_in', 'err_out', 'drop_in', 'drop_out']:
|
|
|
+ field_data[subfield][i] = subfields[subfield.replace('_', '')][i]
|
|
|
+ for subfield, array in field_data.items():
|
|
|
+ stats[field].setdefault(subfield, {})
|
|
|
+ stats[field][subfield][date] = array
|
|
|
+ return stats
|
|
|
+
|
|
|
def post_datum(split, query, environ):
|
|
|
try:
|
|
|
group = int(split[1])
|