Explorar o código

update stats files for datums, query by date

raylu %!s(int64=11) %!d(string=hai) anos
pai
achega
5ec6d9a42d
Modificáronse 2 ficheiros con 84 adicións e 24 borrados
  1. 18 11
      api/fileio.py
  2. 66 13
      api/server.py

+ 18 - 11
api/fileio.py

@@ -14,6 +14,17 @@ fields = [
 	('disk', 'q', ['total', 'used']),
 ]
 
+def _gen_template():
+	template = {}
+	for stat_group, _, subfields in fields:
+		field_data = {}
+		if stat_group != 'disk':
+			for field in subfields:
+				field_data[field] = [-1] * 1440
+		template[stat_group] = field_data
+	return template
+TEMPLATE = _gen_template()
+
 def read_stats(f):
 	stats = {}
 	for stat_group, format_code, subfields in fields:
@@ -50,17 +61,13 @@ def dict_insert(d, split, value):
 def write_datum(f, data):
 	for stat_group, format_code, subfields in fields:
 		fmt = '1440' + format_code
-		if stat_group == 'disk':
+		if stat_group != 'disk':
+			for field in subfields:
+				array = data[stat_group][field]
+				f.write(struct.pack(fmt, *array))
+		else:
 			for mountpoint, disk_data in data['disk'].items():
 				mountpoint = mountpoint.encode('utf-8')
 				disk_fmt = '%dp %s %s' % (len(mountpoint) + 1, fmt, fmt)
-				total = [-1] * 1440
-				total[0] = disk_data['total']
-				used = [-1] * 1440
-				used[0] = disk_data['used']
-				f.write(struct.pack(disk_fmt, mountpoint, *(total + used)))
-		else:
-			for field in subfields:
-				array = [-1] * 1440
-				array[0] = data[stat_group][field]
-				f.write(struct.pack(fmt, *array))
+				array = disk_data['total'] + disk_data['used']
+				f.write(struct.pack(disk_fmt, mountpoint, *array))

+ 66 - 13
api/server.py

@@ -1,10 +1,13 @@
 #!/usr/bin/env python
 
+import copy
+import datetime
 import errno
 import json
 import os
 from os import path
 import traceback
+import urlparse
 import warnings
 
 warnings.filterwarnings('ignore', 'libevent')
@@ -29,11 +32,21 @@ def main():
 def application(environ, start_response):
 	try:
 		split = environ['PATH_INFO'][1:].split('/')
+		qs = environ['QUERY_STRING']
+		if qs:
+			query = urlparse.parse_qs(qs, True, True)
+			for k, v in query.items():
+				if len(v) > 1:
+					start_response('400 Bad Request', [('Content-type', 'text/plain')])
+					return ['duplicate query parameter: ' + k]
+				query[k] = v[0]
+		else:
+			query = {}
 		if split[0] == 'v1':
 			handler = handlers.get(split[2])
 			if handler:
-				body = handler(split, environ)
-				start_response('200 OK', [('Content-type', 'text/plain')])
+				body = json.dumps(handler(split, query, environ))
+				start_response('200 OK', [('Content-type', 'application/json')])
 				return [body]
 			else:
 				print 'no handler for', split
@@ -46,28 +59,68 @@ def application(environ, start_response):
 		start_response('500 Internal Server Error', [('Content-type', 'text/plain')])
 		return ['ruh roh']
 
-def get_data(split, environ):
+def get_data(split, query, environ):
 	group = int(split[1])
 	server_id = int(split[3])
-	data_path = path.join(DATA_DIR, str(group), str(server_id))
-	with open(data_path, 'r') as f:
-		stats = fileio.read_stats(f)
-	return json.dumps(stats)
+	start = datetime.datetime.strptime(query['start'], '%Y-%m-%d').date()
+	end = datetime.datetime.strptime(query['end'], '%Y-%m-%d').date()
+
+	server_dir = path.join(DATA_DIR, str(group), str(server_id))
+	rval = {}
+	c = start
+	while c <= end:
+		date_str = c.isoformat()
+		with open(path.join(server_dir, date_str), 'r') as f:
+			stats = fileio.read_stats(f)
+			rval[date_str] = stats
+		c += datetime.timedelta(days=1)
+	return rval
 
-def post_datum(split, environ):
+def post_datum(split, query, environ):
 	group = int(split[1])
 	server_id = int(split[3])
 	body = json.load(environ['wsgi.input'])
 
-	group_dir = path.join(DATA_DIR, str(group))
+	server_dir = path.join(DATA_DIR, str(group), str(server_id))
 	try:
-		os.makedirs(group_dir)
+		os.makedirs(server_dir)
 	except OSError as e:
 		if e.errno != errno.EEXIST:
 			raise
 
-	with open(path.join(group_dir, str(server_id)), 'w') as f:
-		fileio.write_datum(f, body)
-	return '{"status": "ok"}'
+	# we floor to the minute, so this rounds to the nearest minute
+	now = datetime.datetime.utcnow() + datetime.timedelta(seconds=29)
+	data_path = path.join(server_dir, now.date().isoformat())
+	try:
+		with open(data_path, 'r') as f:
+			stats = fileio.read_stats(f)
+	except IOError as e:
+		if e.errno != errno.ENOENT:
+			raise
+		stats = copy.deepcopy(fileio.TEMPLATE)
+
+	index = now.hour * 60 + now.minute
+	data = {}
+	for field, subfields in stats.items():
+		field_data = {}
+		if field == 'disk':
+			disk = stats['disk']
+			for mountpoint, datum in body['disk'].items(): # iterate through body to get new mountpoints
+				disk.setdefault(mountpoint, {'total': [-1] * 1440, 'used': [-1] * 1440})
+				field_data[mountpoint] = {}
+				for subfield, array in disk[mountpoint].items():
+					array = list(array)
+					array[index] = datum[subfield]
+					field_data[mountpoint][subfield] = array
+		else:
+			for subfield, array in subfields.items():
+				array = list(array)
+				array[index] = body[field][subfield]
+				field_data[subfield] = array
+		data[field] = field_data
+
+	with open(data_path, 'w') as f:
+		fileio.write_datum(f, data)
+	return {'status': 'ok'}
 
 main()