123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102 |
- import csv
- import sys
- import pandas as pd
- import numpy as np
- def main():
- perf = open("performance_stats"+".csv", "w", newline='')
- # latency = open("latency.csv", "w", newline='')
- # load = open("load.csv", "w", newline='')
- # bursty = open("burst.csv", "w",newline='')
- protocols=["Open Invitation", "Trust Promotion(0->1)",
- "Trust Migration (0->1)", "Level Up (1->4)", "Issue Invitation",
- "Redeem Invitation", "Check Blockage 5%", "Check Blockage 50%", "Check Blockage 100%", "Blockage Migration"]
- files = ["updated_levels.csv", "trust_promo.csv", "trust_mig.csv", "level2.csv",
- "issue_invites.csv", "redeem_invites.csv","checkblockage5.csv",
- "checkblockage50.csv","checkblockage100.csv","updated_blockage50.csv"]
- csv_cols = ["RequestS", "RequestT","Rtstdev","ResponseS","ResponseT",
- "ReTstdev", "ResponseHT", "RHTstdev"]
- perf_columns = ["Protocol","Request Size", "Request Time", "sigma",
- "Response Size","Response Time", "sigma",
- "Response Handling Time", "sigma"]
- # lat_columns = ["Protocol", "Client Side Latency (ms)", "Users/s Response",
- # "ms/Response"]
- # load_columns = ["Protocol", "Users/core", "User/day Requests",
- # "Users/week Requests", "Users/month Requests"]
- worst_resp = 0
- perfwriter = csv.writer(perf, delimiter=',')
- perfwriter.writerow(perf_columns)
- # latwriter = csv.writer(latency, delimiter=',')
- # latwriter.writerow(lat_columns)
- # loadwriter = csv.writer(load, delimiter=',')
- # loadwriter.writerow(load_columns)
- for i, protocol in enumerate(protocols):
- columns = ["Percent","Bridges", "RequestS", "Rsstdev", "RequestT",
- "Rtstdev", "ResponseS","Restdev","ResponseT",
- "ReTstdev", "ResponseHT", "RHTstdev"]
- df = pd.read_csv(files[i], usecols=columns)
- perf_in = []
- # lat_in = []
- # req = 0.0
- perf_in.append(protocol)
- # lat_in.append(protocol)
- for item in csv_cols:
- row = df[item].loc[df['Bridges']==900].values
- if "stdev" in item:
- rounded = np.round(row[0], decimals=1)
- else:
- rounded = np.round(row[0], decimals=3)
- perf_in.append(rounded)
- rounded = np.round(row[0], decimals=1)
- if item == "RequestT":
- req = np.round(rounded, decimals=1)
- # lat_in.append(req)
- # elif item == "ResponseHT":
- # lat_in[1]=np.round(req+rounded, decimals=1)
- elif item == "ResponseT":
- resp_sec = np.round(1000/rounded, decimals=1)
- resp_core = resp_sec/(1/(60*60*24))
- if rounded > worst_resp:
- worst_resp = rounded
- # lat_in.append(resp_sec)
- # lat_in.append(rounded)
- perfwriter.writerow(perf_in)
- # latwriter.writerow(lat_in)
- for i, protocol in enumerate(protocols):
- columns = ["Percent","Bridges", "RequestS", "Rsstdev", "RequestT",
- "Rtstdev", "ResponseS","Restdev","ResponseT",
- "ReTstdev", "ResponseHT", "RHTstdev"]
- df = pd.read_csv(files[i], usecols=columns)
- # load_in = []
- # load_in.append(protocol)
- row = df['ResponseT'].loc[df['Bridges']==900].values
- rounded = np.round(row[0], decimals=3)
- resp_sec = np.round(1000/rounded, decimals=3)
- resp_core = int(resp_sec/(1/(60*60*24)))
- # load_in.append(resp_core)
- if worst_resp > rounded:
- secs = int(worst_resp/1000)
- # load_in.append(resp_core*secs)
- # load_in.append(resp_core*7*secs)
- # load_in.append(resp_core*30*secs)
- # else:
- # load_in.append(resp_core)
- # load_in.append(resp_core*7)
- # load_in.append(resp_core*30)
- # loadwriter.writerow(load_in)
- perf.close()
- # latency.close()
- # load.close()
- print("\nDone Tables.\nTable data output to: performance_stats.csv,\n")# latency.csv, load.csv\n")
- if __name__ == "__main__":
- sys.exit(main())
|