server.py 4.2 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157
  1. #!/usr/bin/env python
  2. import warnings
  3. warnings.filterwarnings('ignore', 'libevent')
  4. import gevent.monkey
  5. gevent.monkey.patch_all(thread=False)
  6. import copy
  7. import datetime
  8. import errno
  9. import httplib
  10. import json
  11. import os
  12. from os import path
  13. import traceback
  14. import urlparse
  15. import gevent.pywsgi
  16. import fileio
  17. import reloader
  18. DATA_DIR = path.expanduser('~/sysvitals_data')
  19. handlers = None
  20. def main():
  21. global handlers
  22. handlers = {
  23. 'data': get_data,
  24. 'datum': post_datum,
  25. }
  26. server = gevent.pywsgi.WSGIServer(('0.0.0.0', 8892), application)
  27. reloader.init(server)
  28. server.serve_forever()
  29. class HTTPException(Exception):
  30. def __init__(self, code, body):
  31. self.code = code
  32. self.body = body
  33. def application(environ, start_response):
  34. try:
  35. split = environ['PATH_INFO'][1:].split('/')
  36. qs = environ['QUERY_STRING']
  37. if qs:
  38. query = urlparse.parse_qs(qs, True, True)
  39. for k, v in query.items():
  40. if len(v) > 1:
  41. raise HTTPException(400, 'duplicate query parameter: ' + k)
  42. query[k] = v[0]
  43. else:
  44. query = {}
  45. if split[0] == 'v1':
  46. handler = handlers.get(split[2])
  47. if handler:
  48. body = json.dumps(handler(split, query, environ))
  49. start_response('200 OK', [('Content-type', 'application/json')])
  50. return [body]
  51. else:
  52. print 'no handler for', split
  53. else:
  54. print 'split was', split
  55. raise HTTPException(404, 'unhandled path: ' + environ['PATH_INFO'])
  56. except HTTPException as e:
  57. response = '%d %s' % (e.code, httplib.responses[e.code])
  58. start_response(response, [('Content-type', 'text/plain')])
  59. return [e.body]
  60. except:
  61. traceback.print_exc()
  62. start_response('500 Internal Server Error', [('Content-type', 'text/plain')])
  63. return ['ruh roh']
  64. def get_data(split, query, environ):
  65. try:
  66. group = int(split[1])
  67. server_id = int(split[3])
  68. start = datetime.datetime.strptime(query['start'], '%Y-%m-%d').date()
  69. end = datetime.datetime.strptime(query['end'], '%Y-%m-%d').date()
  70. except (IndexError, KeyError, ValueError):
  71. raise HTTPException(400, '')
  72. server_dir = path.join(DATA_DIR, str(group), str(server_id))
  73. rval = {}
  74. c = start
  75. while c <= end:
  76. date_str = c.isoformat()
  77. try:
  78. with open(path.join(server_dir, date_str), 'r') as f:
  79. stats = fileio.read_stats(f)
  80. except IOError as e:
  81. if e.errno == errno.ENOENT:
  82. stats = None
  83. else:
  84. raise
  85. rval[date_str] = stats
  86. c += datetime.timedelta(days=1)
  87. return rval
  88. def post_datum(split, query, environ):
  89. try:
  90. group = int(split[1])
  91. server_id = int(split[3])
  92. except (IndexError, ValueError):
  93. raise HTTPException(400, '')
  94. try:
  95. body = json.load(environ['wsgi.input'])
  96. except ValueError:
  97. raise HTTPException(400, 'post body was not valid JSON')
  98. if not isinstance(body, dict):
  99. raise HTTPException(400, 'post body was not a JSON dictionary')
  100. if body.keys() != fileio.TEMPLATE.keys():
  101. diff = set(body.keys()).symmetric_difference(set(fileio.TEMPLATE.keys()))
  102. raise HTTPException(400, 'post body had missing or extra keys: ' + ','.join(diff))
  103. server_dir = path.join(DATA_DIR, str(group), str(server_id))
  104. try:
  105. os.makedirs(server_dir)
  106. except OSError as e:
  107. if e.errno != errno.EEXIST:
  108. raise
  109. # we floor to the minute, so this rounds to the nearest minute
  110. now = datetime.datetime.utcnow() + datetime.timedelta(seconds=29)
  111. data_path = path.join(server_dir, now.date().isoformat())
  112. try:
  113. with open(data_path, 'r') as f:
  114. stats = fileio.read_stats(f)
  115. except IOError as e:
  116. if e.errno != errno.ENOENT:
  117. raise
  118. stats = copy.deepcopy(fileio.TEMPLATE)
  119. index = now.hour * 60 + now.minute
  120. data = {}
  121. for field, subfields in stats.items():
  122. field_data = {}
  123. if field == 'disk':
  124. disk = stats['disk']
  125. for mountpoint, datum in body['disk'].items(): # iterate through body to get new mountpoints
  126. disk.setdefault(mountpoint, {'total': [-1] * 1440, 'used': [-1] * 1440})
  127. field_data[mountpoint] = {}
  128. for subfield, array in disk[mountpoint].items():
  129. array = list(array)
  130. array[index] = datum[subfield]
  131. field_data[mountpoint][subfield] = array
  132. else:
  133. for subfield, array in subfields.items():
  134. array = list(array)
  135. array[index] = body[field][subfield]
  136. field_data[subfield] = array
  137. data[field] = field_data
  138. with open(data_path, 'w') as f:
  139. fileio.write_datum(f, data)
  140. return {'status': 'ok'}
  141. main()