Browse Source

Additional refinements and deletions

Lindsey Tulloch 1 year ago
parent
commit
ded48c7366

+ 3 - 0
Dockerfile

@@ -8,5 +8,8 @@ ADD Parsing-results ./Parsing-results
 ADD README.md README.md
 RUN apt-get update -y
 RUN apt-get install -y python3 python3-pip
+RUN pip3 install pandas 
+RUN pip3 install numpy
+RUN pip3 install matplotlib
 RUN cargo build --release
 ENV SHELL=/bin/bash

+ 0 - 61
Parsing-results/check_blockages_server.py

@@ -1,61 +0,0 @@
-import sys
-import pandas as pd
-import matplotlib
-import matplotlib.pyplot as plt
-from matplotlib.lines import Line2D
-
-def main():
-    set_plot_options()
-    fig, axs = plt.subplots(1, 2, figsize=(24, 7))
-    columns = ["Percent", "RequestT", "Rtstdev", "ResponseS", "ResponseT",
-               "ReTstdev"]
-    df = pd.read_csv("check_blockage.csv", usecols=columns)
-    fig.supxlabel('Blocked Bridges (%)')
-    axs[0].set_ylabel('Response Time (ms)')
-    axs[0].plot(df.Percent, df.ResponseT, color='#CC4F1B',
-                label='Response Time for Percentage of Bridges Blocked')
-    axs[0].fill_between(df.Percent, df.ResponseT-df.ReTstdev,
-                        df.ResponseT+df.ReTstdev, alpha=0.5, edgecolor='#CC4F1B',
-                        facecolor='#FF9848')
-    axs[1].set_ylabel('Response Size (ms)')
-    axs[1].plot(df.Percent, df.ResponseS, color='#CC4F1B',
-                label='Response Size for Percentage of Bridges Blocked')
-    fig.savefig("Performance2.pdf")
-
-
-def set_plot_options():
-      options = {
-          'font.size': 12,
-          'figure.figsize': (4,2),
-          'figure.dpi': 100.0,
-          'figure.subplot.left': 0.20,
-          'figure.subplot.right': 0.97,
-          'figure.subplot.bottom': 0.20,
-          'figure.subplot.top': 0.90,
-          'grid.color': '0.1',
-          'grid.linestyle': ':',
-          #'grid.linewidth': 0.5,
-          'axes.grid' : True,
-          #'axes.grid.axis' : 'y',
-          #'axes.axisbelow': True,
-          'axes.titlesize' : 'x-large',
-          'axes.labelsize' : 'x-large',
-          'axes.formatter.limits': (-4,4),
-          'xtick.labelsize' : 20,#get_tick_font_size_10(),
-          'ytick.labelsize' : 20,#get_tick_font_size_10(),
-          'lines.linewidth' : 2.0,
-          'lines.markeredgewidth' : 0.5,
-          'lines.markersize' : 10,
-      }
-
-      for option_key in options:
-          matplotlib.rcParams[option_key] = options[option_key]
-      if 'figure.max_num_figures' in matplotlib.rcParams:
-          matplotlib.rcParams['figure.max_num_figures'] = 100
-      if 'figure.max_open_warning' in matplotlib.rcParams:
-          matplotlib.rcParams['figure.max_open_warning'] = 100
-      if 'legend.ncol' in matplotlib.rcParams:
-         matplotlib.rcParams['legend.ncol'] = 100
-
-if __name__ == "__main__":
-    sys.exit(main())

+ 0 - 152
Parsing-results/make_invited_plot.py

@@ -1,152 +0,0 @@
-#!/usr/bin/env python
-
-import sys
-import json
-import lzma
-import datetime
-import numpy as np
-
-import matplotlib
-import matplotlib.pyplot as pyplot
-import matplotlib.colors as mcolors
-
-
-def main():
-    set_plot_options()
-    days=np.arange(180)
-    ft = []
-
-    # initialize users to 20
-    users = 20
-    total_users = users
-    g = 0
-    h = 0
-    i = 0
-    g_two = 0
-    h_two = 0
-    i_two = 0
-    g_two_two = 0
-    h_two_two = 0
-    i_two_two = 0
-    g_two_four = 0
-    h_two_four = 0
-    i_two_four = 0
-    g_four = 0
-    g_four_two = 0
-    h_four = 0
-    i_four = 0
-    g_six = 0
-    h_six = 0
-    i_six = 0
-    g_users = 0
-    h_users = 0
-    i_users = 0
-    g_two_users = 0
-    h_two_users = 0
-
-    pyplot.figure()
-    for f in days:
-        if g > 1:
-            g = g+1
-        if h > 1:
-            h = h+1
-        if g_two > 1:
-            g_two = g_two+1
-        if h_two > 1:
-            h_two = h_two+1
-        if g_two_two > 1:
-            g_two_two = g_two_two+1
-        if h_two_two > 1:
-            h_two_two = h_two_two+1
-        if g_two_four > 1:
-            g_two_four = g_two_four+1
-        if h_two_four > 1:
-            h_two_four = h_two_four+1
-        if g_four > 1:
-            g_four = g_four+1
-        if g_four_two > 1:
-            g_four_two = g_four_two+1
-        if f == 72:
-            g = 44
-            g_users = users*2
-            total_users = total_users+g_users
-        if f == 128:
-            h= 44
-            h_users = users*4
-            total_users = total_users+h_users
-        if g == 72:
-            g_two_users = g_users * 2
-            total_users = total_users+g_two_users
-            g_two = 44
-        if g_two == 72:
-            g_two_two = 44
-            g_two_two_users = g_two_users*2
-            total_users = total_users+g_two_two_users
-        if g_two_two == 72:
-            g_six_users = g_two_two_users*2
-            total_users = total_users+g_six_users
-        if h == 72:
-            h_two_users = h_users * 2
-            total_users = total_users+h_two_users
-            h_two =44
-        if g == 128:
-            g_four = 44
-            g_four_users = g_users* 4
-            total_users = total_users+g_four_users
-        if h_two == 72:
-            h_two_two = 44
-            h_two_two_users = h_two_users*2
-            total_users = total_users+h_two_two_users
-        if g_four == 72:
-            g_four_two = 44
-            g_four_two_users = g_four_users*2
-            total_users = total_users+g_four_two_users
-        x = total_users
-        ft.append(x)
-
-    pyplot.plot(days, ft, label='Invited Users')
-
-    pyplot.ylabel("Users")
-    pyplot.xlabel("Number of Days")
-#    pyplot.title("Average Number of Bridge Users for 1 Month Old Bridges November 2021")
-
-
-#    pyplot.legend(title="Protocols", loc="upper left")
-    pyplot.tight_layout(pad=1)
-    pyplot.savefig("invited-users.pdf")
-
-def set_plot_options():
-    options = {
-        #'backend': 'PDF',
-        'font.size': 12,
-        'figure.figsize': (7,5),
-        'figure.dpi': 100.0,
-        'axes.grid' : True,
-        'axes.xmargin' : 0.01,
-        'axes.grid.axis' : 'y',
-        'axes.axisbelow': True,
-        'axes.titlesize' : 'medium',
-        'axes.labelsize' : 'large',
-        'axes.formatter.limits': (-6,6),
-        'xtick.labelsize' : 10,#get_tick_font_size_10(),
-        'ytick.labelsize' : 10,
-        'lines.linewidth' : 2.0,
-        'lines.markersize' : 10,
-        # turn on the following to embedd fonts; requires latex
-        'ps.useafm' : True,
-        'pdf.use14corefonts' : True,
-        'text.usetex' : False,
-    }
-
-    for option_key in options:
-        matplotlib.rcParams[option_key] = options[option_key]
-
-    if 'figure.max_num_figures' in matplotlib.rcParams:
-        matplotlib.rcParams['figure.max_num_figures'] = 100
-    if 'figure.max_open_warning' in matplotlib.rcParams:
-        matplotlib.rcParams['figure.max_open_warning'] = 100
-    if 'legend.ncol' in matplotlib.rcParams:
-        matplotlib.rcParams['legend.ncol'] = 100
-
-if __name__ == "__main__":
-    sys.exit(main())

+ 4 - 38
Parsing-results/make_tables.py

@@ -6,31 +6,20 @@ import numpy as np
 
 def main():
     perf = open("performance_stats"+".csv", "w", newline='')
-#    latency = open("latency.csv", "w", newline='')
-#    load = open("load.csv", "w", newline='')
-#    bursty = open("burst.csv", "w",newline='')
     protocols=["Open Invitation", "Trust Promotion(0->1)",
                "Trust Migration (0->1)", "Level Up (1->4)", "Issue Invitation",
                     "Redeem Invitation", "Check Blockage 5%", "Check Blockage 50%", "Check Blockage 100%", "Blockage Migration"]
-    files = ["updated_levels.csv", "trust_promo.csv", "trust_mig.csv", "level2.csv",
-             "issue_invites.csv", "redeem_invites.csv","checkblockage5.csv",
-             "checkblockage50.csv","checkblockage100.csv","updated_blockage50.csv"]
+    files = ["trust_levels.csv", "trust_promo.csv", "trust_mig.csv", "level2.csv",
+             "issue_invites.csv", "redeem_invites.csv","check_blockage5.csv",
+             "check_blockage50.csv","check_blockage100.csv","check_blockage50.csv"]
     csv_cols = ["RequestS", "RequestT","Rtstdev","ResponseS","ResponseT",
                 "ReTstdev", "ResponseHT", "RHTstdev"]
     perf_columns = ["Protocol","Request Size", "Request Time", "sigma",
                     "Response Size","Response Time", "sigma",
                     "Response Handling Time", "sigma"]
-#    lat_columns = ["Protocol", "Client Side Latency (ms)", "Users/s Response",
-#                   "ms/Response"]
-#    load_columns = ["Protocol", "Users/core", "User/day Requests",
-#                   "Users/week Requests", "Users/month Requests"]
     worst_resp = 0
     perfwriter = csv.writer(perf, delimiter=',')
     perfwriter.writerow(perf_columns)
-#    latwriter = csv.writer(latency, delimiter=',')
-#    latwriter.writerow(lat_columns)
-#    loadwriter = csv.writer(load, delimiter=',')
-#    loadwriter.writerow(load_columns)
 
     for i, protocol in enumerate(protocols):
         columns = ["Percent","Bridges", "RequestS", "Rsstdev", "RequestT",
@@ -38,11 +27,8 @@ def main():
                    "ReTstdev", "ResponseHT", "RHTstdev"]
         df = pd.read_csv(files[i], usecols=columns)
         perf_in = []
- #       lat_in = []
- #       req = 0.0
 
         perf_in.append(protocol)
- #       lat_in.append(protocol)
         for item in csv_cols:
             row = df[item].loc[df['Bridges']==900].values
             if "stdev" in item:
@@ -53,49 +39,29 @@ def main():
             rounded = np.round(row[0], decimals=1)
             if item == "RequestT":
                 req = np.round(rounded, decimals=1)
- #               lat_in.append(req)
- #           elif item == "ResponseHT":
- #               lat_in[1]=np.round(req+rounded, decimals=1)
             elif item == "ResponseT":
                 resp_sec = np.round(1000/rounded, decimals=1)
                 resp_core = resp_sec/(1/(60*60*24))
                 if rounded > worst_resp:
                     worst_resp = rounded
 
-#                lat_in.append(resp_sec)
-#                lat_in.append(rounded)
-
 
         perfwriter.writerow(perf_in)
-#        latwriter.writerow(lat_in)
 
     for i, protocol in enumerate(protocols):
         columns = ["Percent","Bridges", "RequestS", "Rsstdev", "RequestT",
                    "Rtstdev", "ResponseS","Restdev","ResponseT",
                    "ReTstdev", "ResponseHT", "RHTstdev"]
         df = pd.read_csv(files[i], usecols=columns)
-#        load_in = []
-#        load_in.append(protocol)
         row = df['ResponseT'].loc[df['Bridges']==900].values
         rounded = np.round(row[0], decimals=3)
         resp_sec = np.round(1000/rounded, decimals=3)
         resp_core = int(resp_sec/(1/(60*60*24)))
-#        load_in.append(resp_core)
         if worst_resp > rounded:
             secs = int(worst_resp/1000)
-#            load_in.append(resp_core*secs)
-#            load_in.append(resp_core*7*secs)
-#            load_in.append(resp_core*30*secs)
- #       else:
- #           load_in.append(resp_core)
- #           load_in.append(resp_core*7)
- #           load_in.append(resp_core*30)
- #       loadwriter.writerow(load_in)
 
     perf.close()
-#    latency.close()
-#    load.close()
-    print("\nDone Tables.\nTable data output to: performance_stats.csv,\n")# latency.csv, load.csv\n")
+    print("\nDone Tables.\nTable data output to: performance_stats.csv,\n")
 
 
 if __name__ == "__main__":

+ 3 - 6
Parsing-results/parse_data.sh

@@ -1,17 +1,14 @@
 #!/bin/bash
 
 # Parse results from Lox stat tests
-echo 'Install python dependencies'
-
-pip install -r requirements.txt
-
 echo 'Parse raw output to csv'
 
 python3 raw_to_csv.py
 
 echo 'Make plots for data'
 
-python3 check_blockages.py
-python3 trust_promo_plot.py
+#python3 check_blockages.py
+python3 pets_plots.py
+#python3 trust_promo_plot.py
 python3 make_tables.py
 python3 make_steady.py

+ 0 - 1
Parsing-results/performance_stats.csv

@@ -1 +0,0 @@
-Protocol,Request Size,Request Time,sigma,Response Size,Response Time,sigma,Response Handling Time,sigma

+ 1 - 1
Parsing-results/pets_plots.py

@@ -23,7 +23,7 @@ def main():
                 df.ResponseT+df.ReTstdev, alpha=0.5, edgecolor='#CC4F1B',
                 facecolor='#FF9848')
     plt.tight_layout(pad=1)
-    plt.savefig("../../PETS22/figures/StandardCheck.pdf")
+    plt.savefig("StandardCheck.pdf")
     plt.close('all')
 
 def set_plot_options():

+ 3 - 5
Parsing-results/raw_to_csv.py

@@ -9,7 +9,7 @@ for p in Path('.').glob('*.log'):
         test_file = open(p.name.strip('.log')+".csv", "w")
         test_file.write("Percent,Bridges,RequestS,Rsstdev,RequestT,Rtstdev,ResponseS,Restdev,ResponseT,ReTstdev,ResponseHT,RHTstdev\n")
         bridges = 0
-        c=0
+        c=False
         red = 0
         check = 0
         level = 0
@@ -37,9 +37,9 @@ for p in Path('.').glob('*.log'):
                 if int(bridges) == 900:
                     check =1
                 if not c:
-                    check_b = open("checkblockage"+str(num)+".csv", "w")
+                    check_b = open("check_blockage"+str(num)+".csv", "w")
                     check_b.write("Percent,Bridges,RequestS,Rsstdev,RequestT,Rtstdev,ResponseS,Restdev,ResponseT,ReTstdev,ResponseHT,RHTstdev\n")
-                    c=1
+                    c=True
             elif "BLOCKAGE-MIGRATION" in line:
                 protocol = 2
                 num = line.split("-")[6].strip('-')
@@ -169,8 +169,6 @@ for p in Path('.').glob('*.log'):
             level_file_f.close()
             trust_promo.close()
             mig_file.close()
-        if check_b:
-            check_b.close()
         if red:
             redeem.close()
         test_file.close()

+ 0 - 1
Parsing-results/standard_check.csv

@@ -1 +0,0 @@
-Percent,Bridges,RequestS,Rsstdev,RequestT,Rtstdev,ResponseS,Restdev,ResponseT,ReTstdev,ResponseHT,RHTstdev

+ 1 - 1
Parsing-results/trust_promo.py

@@ -1,4 +1,4 @@
-log_file = open("updated_levels.log", "r").readlines()
+log_file = open("trust_levels.log", "r").readlines()
 trust_promo_file = open("trust_promo"+".csv", "w")
 trust_promo_file.write("Bridges,RequestS,Rsstdev,RequestT,Rtstdev,ResponseS,Restdev,ResponseT,ReTstdev,ResponseHT,RHTstdev\n")
 

+ 19 - 19
run_tests_fast.sh

@@ -2,25 +2,25 @@
 
 cargo test --release --features=fast -- --nocapture stats_test_trust_levels > trust_levels.log
 cargo test --release --features=fast -- --nocapture stats_test_invitations > invitations.log
-cargo test --release --features=fast -- --nocapture stats_test_percent_blockage_migration_05 > blockage_migration_05.log
-cargo test --release --features=fast -- --nocapture stats_test_percent_blockage_migration_010 > blockage_migration_010.log
-cargo test --release --features=fast -- --nocapture stats_test_percent_blockage_migration_20 > blockage_migration_20.log
-cargo test --release --features=fast -- --nocapture stats_test_percent_blockage_migration_25 > blockage_migration_25.log
-cargo test --release --features=fast -- --nocapture stats_test_percent_blockage_migration_30 > blockage_migration_30.log
-cargo test --release --features=fast -- --nocapture stats_test_percent_blockage_migration_35 > blockage_migration_35.log
-cargo test --release --features=fast -- --nocapture stats_test_percent_blockage_migration_40 > blockage_migration_40.log
-cargo test --release --features=fast -- --nocapture stats_test_percent_blockage_migration_45 > blockage_migration_45.log
-cargo test --release --features=fast -- --nocapture stats_test_percent_blockage_migration_50 > blockage_migration_50.log
-cargo test --release --features=fast -- --nocapture stats_test_percent_blockage_migration_55 > blockage_migration_55.log
-cargo test --release --features=fast -- --nocapture stats_test_percent_blockage_migration_60 > blockage_migration_60.log
-cargo test --release --features=fast -- --nocapture stats_test_percent_blockage_migration_65 > blockage_migration_65.log
-cargo test --release --features=fast -- --nocapture stats_test_percent_blockage_migration_70 > blockage_migration_70.log
-cargo test --release --features=fast -- --nocapture stats_test_percent_blockage_migration_75 > blockage_migration_75.log
-cargo test --release --features=fast -- --nocapture stats_test_percent_blockage_migration_80 > blockage_migration_80.log
-cargo test --release --features=fast -- --nocapture stats_test_percent_blockage_migration_85 > blockage_migration_85.log
-cargo test --release --features=fast -- --nocapture stats_test_percent_blockage_migration_90 > blockage_migration_90.log
-cargo test --release --features=fast -- --nocapture stats_test_percent_blockage_migration_95 > blockage_migration_95.log
-cargo test release --features=fast -- --nocapture stats_test_percent_blockage_migration_100 > blockage_migration_100.log
+cargo test --release --features=fast -- --nocapture stats_test_percent_check_blockage_05 > check_blockage05.log
+cargo test --release --features=fast -- --nocapture stats_test_percent_check_blockage_010 > check_blockage010.log
+cargo test --release --features=fast -- --nocapture stats_test_percent_check_blockage_20 > check_blockage20.log
+cargo test --release --features=fast -- --nocapture stats_test_percent_check_blockage_25 > check_blockage25.log
+cargo test --release --features=fast -- --nocapture stats_test_percent_check_blockage_30 > check_blockage30.log
+cargo test --release --features=fast -- --nocapture stats_test_percent_check_blockage_35 > check_blockage35.log
+cargo test --release --features=fast -- --nocapture stats_test_percent_check_blockage_40 > check_blockage40.log
+cargo test --release --features=fast -- --nocapture stats_test_percent_check_blockage_45 > check_blockage45.log
+cargo test --release --features=fast -- --nocapture stats_test_percent_check_blockage_50 > check_blockage50.log
+cargo test --release --features=fast -- --nocapture stats_test_percent_check_blockage_55 > check_blockage55.log
+cargo test --release --features=fast -- --nocapture stats_test_percent_check_blockage_60 > check_blockage60.log
+cargo test --release --features=fast -- --nocapture stats_test_percent_check_blockage_65 > check_blockage65.log
+cargo test --release --features=fast -- --nocapture stats_test_percent_check_blockage_70 > check_blockage70.log
+cargo test --release --features=fast -- --nocapture stats_test_percent_check_blockage_75 > check_blockage75.log
+cargo test --release --features=fast -- --nocapture stats_test_percent_check_blockage_80 > check_blockage80.log
+cargo test --release --features=fast -- --nocapture stats_test_percent_check_blockage_85 > check_blockage85.log
+cargo test --release --features=fast -- --nocapture stats_test_percent_check_blockage_90 > check_blockage90.log
+cargo test --release --features=fast -- --nocapture stats_test_percent_check_blockage_95 > check_blockage95.log
+cargo test release --features=fast -- --nocapture stats_test_percent_check_blockage_100 > check_blockage100.log
 echo "Completed all tests, now parsing results"
 mv *.log Parsing-results
 cd Parsing-results

+ 0 - 71
src/migration.txt

@@ -1,71 +0,0 @@
-
-running 2 tests
-test tests::stats_test_blockage_migration_40 has been running for over 60 seconds
-test tests::stats_test_blockage_migration_45 has been running for over 60 seconds
-
----------------Check Blockage 40---------------
-
-Average Request size = 744
-Request Standard Deviation = 0
-Average Request Time = 17.560349ms
-Request time Standard Deviation = 527.939µs
-Average Response size = 1364
-Response Standard Deviation = 0
-Average Response Time = 29.160633ms
-Response Time Standard Deviation = 957.851µs
-Average Response Handling Time = 1.632675ms
-Response Handling Time Standard Deviation = 80.419µs
-
----------------Blockage Migration 40---------------
-
-Average Request size = 1224
-Request Standard Deviation = 0
-Average Request Time = 31.246734ms
-Request time Standard Deviation = 863.903µs
-Average Response size = 840
-Response Standard Deviation = 0
-Average Response Time = 47.96312ms
-Response Time Standard Deviation = 1.132074ms
-Average Response Handling Time = 20.067194ms
-Response Handling Time Standard Deviation = 560.91µs
-test tests::stats_test_blockage_migration_40 ... ok
-
----------------Check Blockage 45---------------
-
-Average Request size = 744
-Request Standard Deviation = 0
-Average Request Time = 16.879235ms
-Request time Standard Deviation = 1.012876ms
-Average Response size = 2204
-Response Standard Deviation = 0
-Average Response Time = 33.453128ms
-Response Time Standard Deviation = 2.007504ms
-Average Response Handling Time = 1.873567ms
-Response Handling Time Standard Deviation = 134.745µs
-
----------------Blockage Migration 45---------------
-
-Average Request size = 1224
-Request Standard Deviation = 0
-Average Request Time = 29.964257ms
-Request time Standard Deviation = 1.732654ms
-Average Response size = 840
-Response Standard Deviation = 0
-Average Response Time = 46.019155ms
-Response Time Standard Deviation = 2.632306ms
-Average Response Handling Time = 19.222321ms
-Response Handling Time Standard Deviation = 1.180166ms
-test tests::stats_test_blockage_migration_45 ... ok
-
-test result: ok. 2 passed; 0 failed; 0 ignored; 0 measured; 21 filtered out; finished in 11763.27s
-
-
-running 0 tests
-
-test result: ok. 0 passed; 0 failed; 0 ignored; 0 measured; 2 filtered out; finished in 0.00s
-
-
-running 0 tests
-
-test result: ok. 0 passed; 0 failed; 0 ignored; 0 measured; 0 filtered out; finished in 0.00s
-