1234567891011121314151617181920212223242526272829303132333435363738394041424344454647484950515253 |
- import numpy as np
- import gzip
- import pickle
- import json
- #
- def read_server_results(filename):
- with gzip.GzipFile(filename, 'rb') as f:
- results = pickle.load(f)
- for x in range(len(results)):
- results[x]['deltas']['bytes'] = np.array(results[x]['deltas']['bytes'])
- results[x]['deltas']['timestamps'] = np.array(results[x]['deltas']['timestamps'])
- results[x]['custom_data'] = json.loads(results[x]['custom_data'].decode('utf-8'))
- if np.sum(results[x]['deltas']['bytes']) != results[x]['data_size']:
- print('Note: missing some data')
- #
- #
- return results
- #
- #
- def read_server_results_brief(filename):
- with open(filename, 'r') as f:
- return json.load(f)
- #
- #
- def read_remote_cpu_usage(filename):
- with gzip.GzipFile(filename, 'rb') as f:
- return pickle.load(f)
- #
- #
- def read_client_info(filename):
- with gzip.GzipFile(filename, 'rb') as f:
- return pickle.load(f)
- #
- #
- def read_relay_throughput(filename):
- with open(filename, 'r') as f:
- header = f.readline()
- data_unprocessed = [[float(x) for x in line.split(',')] for line in f.readlines()]
- data_unprocessed = np.array(data_unprocessed)
- #
- data = {}
- data['timestamps'] = data_unprocessed[:,0]
- data['threads'] = data_unprocessed[:,1:]
- #
- return (header, data)
- #
- def normalize_relay_throughput(data):
- time_deltas = data['timestamps'][1:]-data['timestamps'][:-1]
- normalized_data = {}
- normalized_data['timestamps'] = np.copy(data['timestamps'][:-1])
- normalized_data['threads'] = data['threads'][:-1,:]/(time_deltas[:,None])
- return normalized_data
- #
|