data_helpers.py 1.5 KB

1234567891011121314151617181920212223242526272829303132333435363738394041424344454647484950515253
  1. import numpy as np
  2. import gzip
  3. import pickle
  4. import json
  5. #
  6. def read_server_results(filename):
  7. with gzip.GzipFile(filename, 'rb') as f:
  8. results = pickle.load(f)
  9. for x in range(len(results)):
  10. results[x]['deltas']['bytes'] = np.array(results[x]['deltas']['bytes'])
  11. results[x]['deltas']['timestamps'] = np.array(results[x]['deltas']['timestamps'])
  12. results[x]['custom_data'] = json.loads(results[x]['custom_data'].decode('utf-8'))
  13. if np.sum(results[x]['deltas']['bytes']) != results[x]['data_size']:
  14. print('Note: missing some data')
  15. #
  16. #
  17. return results
  18. #
  19. #
  20. def read_server_results_brief(filename):
  21. with open(filename, 'r') as f:
  22. return json.load(f)
  23. #
  24. #
  25. def read_remote_cpu_usage(filename):
  26. with gzip.GzipFile(filename, 'rb') as f:
  27. return pickle.load(f)
  28. #
  29. #
  30. def read_client_info(filename):
  31. with gzip.GzipFile(filename, 'rb') as f:
  32. return pickle.load(f)
  33. #
  34. #
  35. def read_relay_throughput(filename):
  36. with open(filename, 'r') as f:
  37. header = f.readline()
  38. data_unprocessed = [[float(x) for x in line.split(',')] for line in f.readlines()]
  39. data_unprocessed = np.array(data_unprocessed)
  40. #
  41. data = {}
  42. data['timestamps'] = data_unprocessed[:,0]
  43. data['threads'] = data_unprocessed[:,1:]
  44. #
  45. return (header, data)
  46. #
  47. def normalize_relay_throughput(data):
  48. time_deltas = data['timestamps'][1:]-data['timestamps'][:-1]
  49. normalized_data = {}
  50. normalized_data['timestamps'] = np.copy(data['timestamps'][:-1])
  51. normalized_data['threads'] = data['threads'][:-1,:]/(time_deltas[:,None])
  52. return normalized_data
  53. #