server.py 4.1 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151
  1. #!/usr/bin/env python
  2. import warnings
  3. warnings.filterwarnings('ignore', 'libevent')
  4. import gevent.monkey
  5. gevent.monkey.patch_all(thread=False)
  6. import copy
  7. import datetime
  8. import errno
  9. import httplib
  10. import json
  11. import os
  12. from os import path
  13. import traceback
  14. import urlparse
  15. import gevent.pywsgi
  16. import fileio
  17. import reloader
  18. DATA_DIR = path.expanduser('~/sysvitals_data')
  19. handlers = None
  20. def main():
  21. global handlers
  22. handlers = {
  23. 'data': get_data,
  24. 'datum': post_datum,
  25. }
  26. server = gevent.pywsgi.WSGIServer(('0.0.0.0', 8892), application)
  27. reloader.init(server)
  28. server.serve_forever()
  29. class HTTPException(Exception):
  30. def __init__(self, code, body):
  31. self.code = code
  32. self.body = body
  33. def application(environ, start_response):
  34. try:
  35. split = environ['PATH_INFO'][1:].split('/')
  36. qs = environ['QUERY_STRING']
  37. if qs:
  38. query = urlparse.parse_qs(qs, True, True)
  39. for k, v in query.items():
  40. if len(v) > 1:
  41. raise HTTPException(400, 'duplicate query parameter: ' + k)
  42. query[k] = v[0]
  43. else:
  44. query = {}
  45. if split[0] == 'v1':
  46. handler = handlers.get(split[2])
  47. if handler:
  48. body = json.dumps(handler(split, query, environ))
  49. start_response('200 OK', [('Content-type', 'application/json')])
  50. return [body]
  51. else:
  52. print 'no handler for', split
  53. else:
  54. print 'split was', split
  55. raise HTTPException(404, 'unhandled path: ' + environ['PATH_INFO'])
  56. except HTTPException as e:
  57. response = '%d %s' % (e.code, httplib.responses[e.code])
  58. start_response(response, [('Content-type', 'text/plain')])
  59. return [e.body]
  60. except:
  61. traceback.print_exc()
  62. start_response('500 Internal Server Error', [('Content-type', 'text/plain')])
  63. return ['ruh roh']
  64. def get_data(split, query, environ):
  65. try:
  66. group = int(split[1])
  67. server_id = int(split[3])
  68. start = datetime.datetime.strptime(query['start'], '%Y-%m-%d').date()
  69. end = datetime.datetime.strptime(query['end'], '%Y-%m-%d').date()
  70. except (IndexError, KeyError, ValueError):
  71. raise HTTPException(400, '')
  72. server_dir = path.join(DATA_DIR, str(group), str(server_id))
  73. rval = {}
  74. c = start
  75. while c <= end:
  76. date_str = c.isoformat()
  77. with open(path.join(server_dir, date_str), 'r') as f:
  78. stats = fileio.read_stats(f)
  79. rval[date_str] = stats
  80. c += datetime.timedelta(days=1)
  81. return rval
  82. def post_datum(split, query, environ):
  83. try:
  84. group = int(split[1])
  85. server_id = int(split[3])
  86. except (IndexError, ValueError):
  87. raise HTTPException(400, '')
  88. try:
  89. body = json.load(environ['wsgi.input'])
  90. except ValueError:
  91. raise HTTPException(400, 'post body was not valid JSON')
  92. if not isinstance(body, dict):
  93. raise HTTPException(400, 'post body was not a JSON dictionary')
  94. if body.keys() != fileio.TEMPLATE.keys():
  95. diff = set(body.keys()).symmetric_difference(set(fileio.TEMPLATE.keys()))
  96. raise HTTPException(400, 'post body had missing or extra keys: ' + ','.join(diff))
  97. server_dir = path.join(DATA_DIR, str(group), str(server_id))
  98. try:
  99. os.makedirs(server_dir)
  100. except OSError as e:
  101. if e.errno != errno.EEXIST:
  102. raise
  103. # we floor to the minute, so this rounds to the nearest minute
  104. now = datetime.datetime.utcnow() + datetime.timedelta(seconds=29)
  105. data_path = path.join(server_dir, now.date().isoformat())
  106. try:
  107. with open(data_path, 'r') as f:
  108. stats = fileio.read_stats(f)
  109. except IOError as e:
  110. if e.errno != errno.ENOENT:
  111. raise
  112. stats = copy.deepcopy(fileio.TEMPLATE)
  113. index = now.hour * 60 + now.minute
  114. data = {}
  115. for field, subfields in stats.items():
  116. field_data = {}
  117. if field == 'disk':
  118. disk = stats['disk']
  119. for mountpoint, datum in body['disk'].items(): # iterate through body to get new mountpoints
  120. disk.setdefault(mountpoint, {'total': [-1] * 1440, 'used': [-1] * 1440})
  121. field_data[mountpoint] = {}
  122. for subfield, array in disk[mountpoint].items():
  123. array = list(array)
  124. array[index] = datum[subfield]
  125. field_data[mountpoint][subfield] = array
  126. else:
  127. for subfield, array in subfields.items():
  128. array = list(array)
  129. array[index] = body[field][subfield]
  130. field_data[subfield] = array
  131. data[field] = field_data
  132. with open(data_path, 'w') as f:
  133. fileio.write_datum(f, data)
  134. return {'status': 'ok'}
  135. main()