practracker.py 4.8 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151
  1. #!/usr/bin/python3
  2. """
  3. Best-practices tracker for Tor source code.
  4. Go through the various .c files and collect metrics about them. If the metrics
  5. violate some of our best practices and they are not found in the optional
  6. exceptions file, then log a problem about them.
  7. We currently do metrics about file size, function size and number of includes.
  8. practracker.py should be run with its second argument pointing to the Tor
  9. top-level source directory like this:
  10. $ python3 ./scripts/maint/practracker/practracker.py .
  11. The exceptions file is meant to be initialized once with the current state of
  12. the source code and then get saved in the repository for ever after:
  13. $ python3 ./scripts/maint/practracker/practracker.py . > ./scripts/maint/practracker/exceptions.txt
  14. """
  15. from __future__ import print_function
  16. import os, sys
  17. import metrics
  18. import util
  19. import problem
  20. # The filename of the exceptions file (it should be placed in the practracker directory)
  21. EXCEPTIONS_FNAME = "./exceptions.txt"
  22. # Recommended file size
  23. MAX_FILE_SIZE = 3000 # lines
  24. # Recommended function size
  25. MAX_FUNCTION_SIZE = 100 # lines
  26. # Recommended number of #includes
  27. MAX_INCLUDE_COUNT = 50
  28. #######################################################
  29. # ProblemVault singleton
  30. ProblemVault = None
  31. # The Tor source code topdir
  32. TOR_TOPDIR = None
  33. #######################################################
  34. def consider_file_size(fname, f):
  35. """Consider file size issues for 'f' and return True if a new issue was found"""
  36. file_size = metrics.get_file_len(f)
  37. if file_size > MAX_FILE_SIZE:
  38. p = problem.FileSizeProblem(fname, file_size)
  39. return ProblemVault.register_problem(p)
  40. return False
  41. def consider_includes(fname, f):
  42. """Consider #include issues for 'f' and return True if a new issue was found"""
  43. include_count = metrics.get_include_count(f)
  44. if include_count > MAX_INCLUDE_COUNT:
  45. p = problem.IncludeCountProblem(fname, include_count)
  46. return ProblemVault.register_problem(p)
  47. return False
  48. def consider_function_size(fname, f):
  49. """Consider the function sizes for 'f' and return True if a new issue was found"""
  50. found_new_issues = False
  51. for name, lines in metrics.get_function_lines(f):
  52. # Don't worry about functions within our limits
  53. if lines <= MAX_FUNCTION_SIZE:
  54. continue
  55. # That's a big function! Issue a problem!
  56. canonical_function_name = "%s:%s()" % (fname, name)
  57. p = problem.FunctionSizeProblem(canonical_function_name, lines)
  58. found_new_issues |= ProblemVault.register_problem(p)
  59. return found_new_issues
  60. #######################################################
  61. def consider_all_metrics(files_list):
  62. """Consider metrics for all files, and return True if new issues were found"""
  63. found_new_issues = False
  64. for fname in files_list:
  65. with open(fname, 'r') as f:
  66. found_new_issues |= consider_metrics_for_file(fname, f)
  67. return found_new_issues
  68. def consider_metrics_for_file(fname, f):
  69. """
  70. Consider the various metrics for file with filename 'fname' and file descriptor 'f'.
  71. Return True if we found new issues.
  72. """
  73. # Strip the useless part of the path
  74. if fname.startswith(TOR_TOPDIR):
  75. fname = fname[len(TOR_TOPDIR):]
  76. found_new_issues = False
  77. # Get file length
  78. found_new_issues |= consider_file_size(fname, f)
  79. # Consider number of #includes
  80. f.seek(0)
  81. found_new_issues |= consider_includes(fname, f)
  82. # Get function length
  83. f.seek(0)
  84. found_new_issues |= consider_function_size(fname, f)
  85. return found_new_issues
  86. def main():
  87. if (len(sys.argv) != 2):
  88. print("Usage:\n\t$ practracker.py <tor topdir>\n\t(e.g. $ practracker.py ~/tor/)")
  89. return
  90. global TOR_TOPDIR
  91. TOR_TOPDIR = sys.argv[1]
  92. exceptions_file = os.path.join(TOR_TOPDIR, "scripts/maint/practracker", EXCEPTIONS_FNAME)
  93. # 1) Get all the .c files we care about
  94. files_list = util.get_tor_c_files(TOR_TOPDIR)
  95. # 2) Initialize problem vault and load an optional exceptions file so that
  96. # we don't warn about the past
  97. global ProblemVault
  98. ProblemVault = problem.ProblemVault(exceptions_file)
  99. # 3) Go through all the files and report problems if they are not exceptions
  100. found_new_issues = consider_all_metrics(files_list)
  101. # If new issues were found, try to give out some advice to the developer on how to resolve it.
  102. if (found_new_issues):
  103. new_issues_str = """\
  104. FAILURE: practracker found new problems in the code: see warnings above.
  105. Please fix the problems if you can, and update the exceptions file
  106. ({}) if you can't.
  107. See doc/HACKING/HelpfulTools.md for more information on using practracker.\
  108. """.format(exceptions_file)
  109. print(new_issues_str)
  110. sys.exit(found_new_issues)
  111. if __name__ == '__main__':
  112. main()