data_helpers.py 1.3 KB

12345678910111213141516171819202122232425262728293031323334353637383940414243
  1. import numpy as np
  2. import gzip
  3. import pickle
  4. import json
  5. #
  6. def read_server_results(filename):
  7. with gzip.GzipFile(filename, 'rb') as f:
  8. results = pickle.load(f)
  9. for x in range(len(results)):
  10. results[x]['deltas']['bytes'] = np.array(results[x]['deltas']['bytes'])
  11. results[x]['deltas']['timestamps'] = np.array(results[x]['deltas']['timestamps'])
  12. results[x]['custom_data'] = json.loads(results[x]['custom_data'].decode('utf-8'))
  13. if np.sum(results[x]['deltas']['bytes']) != results[x]['data_size']:
  14. print('Note: missing some data')
  15. #
  16. #
  17. return results
  18. #
  19. #
  20. def read_client_info(filename):
  21. with gzip.GzipFile(filename, 'rb') as f:
  22. return pickle.load(f)
  23. #
  24. #
  25. def read_relay_throughput(filename):
  26. with open(filename, 'r') as f:
  27. header = f.readline()
  28. data_unprocessed = [[float(x) for x in line.split(',')] for line in f.readlines()]
  29. data_unprocessed = np.array(data_unprocessed)
  30. #
  31. data = {}
  32. data['timestamps'] = data_unprocessed[:,0]
  33. data['threads'] = data_unprocessed[:,1:]
  34. #
  35. return (header, data)
  36. #
  37. def normalize_relay_throughput(data):
  38. time_deltas = data['timestamps'][1:]-data['timestamps'][:-1]
  39. normalized_data = {}
  40. normalized_data['timestamps'] = np.copy(data['timestamps'][:-1])
  41. normalized_data['threads'] = data['threads'][:-1,:]/(time_deltas[:,None])
  42. return normalized_data
  43. #