practracker.py 7.9 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246
  1. #!/usr/bin/python
  2. """
  3. Best-practices tracker for Tor source code.
  4. Go through the various .c files and collect metrics about them. If the metrics
  5. violate some of our best practices and they are not found in the optional
  6. exceptions file, then log a problem about them.
  7. We currently do metrics about file size, function size and number of includes.
  8. practracker.py should be run with its second argument pointing to the Tor
  9. top-level source directory like this:
  10. $ python3 ./scripts/maint/practracker/practracker.py .
  11. To regenerate the exceptions file so that it allows all current
  12. problems in the Tor source, use the --regen flag:
  13. $ python3 --regen ./scripts/maint/practracker/practracker.py .
  14. """
  15. from __future__ import print_function
  16. import os, sys
  17. import metrics
  18. import util
  19. import problem
  20. # The filename of the exceptions file (it should be placed in the practracker directory)
  21. EXCEPTIONS_FNAME = "./exceptions.txt"
  22. # Recommended file size
  23. MAX_FILE_SIZE = 3000 # lines
  24. # Recommended function size
  25. MAX_FUNCTION_SIZE = 100 # lines
  26. # Recommended number of #includes
  27. MAX_INCLUDE_COUNT = 50
  28. # Map from problem type to functions that adjust for tolerance
  29. TOLERANCE_FNS = {
  30. 'include-count': lambda n: int(n*1.1),
  31. 'function-size': lambda n: int(n*1.1),
  32. 'file-size': lambda n: int(n*1.02)
  33. }
  34. #######################################################
  35. # ProblemVault singleton
  36. ProblemVault = None
  37. # The Tor source code topdir
  38. TOR_TOPDIR = None
  39. # ProblemFilter singleton.
  40. FILTER = problem.ProblemFilter()
  41. FILTER.addThreshold(problem.FileSizeItem("*", MAX_FILE_SIZE))
  42. FILTER.addThreshold(problem.IncludeCountItem("*", MAX_INCLUDE_COUNT))
  43. FILTER.addThreshold(problem.FunctionSizeItem("*", MAX_FUNCTION_SIZE))
  44. #######################################################
  45. if sys.version_info[0] <= 2:
  46. def open_file(fname):
  47. return open(fname, 'r')
  48. else:
  49. def open_file(fname):
  50. return open(fname, 'r', encoding='utf-8')
  51. def consider_file_size(fname, f):
  52. """Consider the size of 'f' and yield an FileSizeItem for it.
  53. """
  54. file_size = metrics.get_file_len(f)
  55. yield problem.FileSizeItem(fname, file_size)
  56. def consider_includes(fname, f):
  57. """Consider the #include count in for 'f' and yield an IncludeCountItem
  58. for it.
  59. """
  60. include_count = metrics.get_include_count(f)
  61. yield problem.IncludeCountItem(fname, include_count)
  62. def consider_function_size(fname, f):
  63. """yield a FunctionSizeItem for every function in f.
  64. """
  65. for name, lines in metrics.get_function_lines(f):
  66. canonical_function_name = "%s:%s()" % (fname, name)
  67. yield problem.FunctionSizeItem(canonical_function_name, lines)
  68. #######################################################
  69. def consider_all_metrics(files_list):
  70. """Consider metrics for all files, and yield a sequence of problem.Item
  71. object for those issues."""
  72. for fname in files_list:
  73. with open_file(fname) as f:
  74. for item in consider_metrics_for_file(fname, f):
  75. yield item
  76. def consider_metrics_for_file(fname, f):
  77. """
  78. Yield a sequence of problem.Item objects for all of the metrics in
  79. 'f'.
  80. """
  81. # Strip the useless part of the path
  82. if fname.startswith(TOR_TOPDIR):
  83. fname = fname[len(TOR_TOPDIR):]
  84. # Get file length
  85. for item in consider_file_size(fname, f):
  86. yield item
  87. # Consider number of #includes
  88. f.seek(0)
  89. for item in consider_includes(fname, f):
  90. yield item
  91. # Get function length
  92. f.seek(0)
  93. for item in consider_function_size(fname, f):
  94. yield item
  95. HEADER="""\
  96. # Welcome to the exceptions file for Tor's best-practices tracker!
  97. #
  98. # Each line of this file represents a single violation of Tor's best
  99. # practices -- typically, a violation that we had before practracker.py
  100. # first existed.
  101. #
  102. # There are three kinds of problems that we recognize right now:
  103. # function-size -- a function of more than {MAX_FUNCTION_SIZE} lines.
  104. # file-size -- a file of more than {MAX_FILE_SIZE} lines.
  105. # include-count -- a file with more than {MAX_INCLUDE_COUNT} #includes.
  106. #
  107. # Each line below represents a single exception that practracker should
  108. # _ignore_. Each line has four parts:
  109. # 1. The word "problem".
  110. # 2. The kind of problem.
  111. # 3. The location of the problem: either a filename, or a
  112. # filename:functionname pair.
  113. # 4. The magnitude of the problem to ignore.
  114. #
  115. # So for example, consider this line:
  116. # problem file-size /src/core/or/connection_or.c 3200
  117. #
  118. # It tells practracker to allow the mentioned file to be up to 3200 lines
  119. # long, even though ordinarily it would warn about any file with more than
  120. # {MAX_FILE_SIZE} lines.
  121. #
  122. # You can either edit this file by hand, or regenerate it completely by
  123. # running `make practracker-regen`.
  124. #
  125. # Remember: It is better to fix the problem than to add a new exception!
  126. """.format(**globals())
  127. def main(argv):
  128. import argparse
  129. progname = argv[0]
  130. parser = argparse.ArgumentParser(prog=progname)
  131. parser.add_argument("--regen", action="store_true",
  132. help="Regenerate the exceptions file")
  133. parser.add_argument("--list-overstrict", action="store_true",
  134. help="List over-strict exceptions")
  135. parser.add_argument("--exceptions",
  136. help="Override the location for the exceptions file")
  137. parser.add_argument("--strict", action="store_true",
  138. help="Make all warnings into errors")
  139. parser.add_argument("topdir", default=".", nargs="?",
  140. help="Top-level directory for the tor source")
  141. args = parser.parse_args(argv[1:])
  142. global TOR_TOPDIR
  143. TOR_TOPDIR = args.topdir
  144. if args.exceptions:
  145. exceptions_file = args.exceptions
  146. else:
  147. exceptions_file = os.path.join(TOR_TOPDIR, "scripts/maint/practracker", EXCEPTIONS_FNAME)
  148. # 1) Get all the .c files we care about
  149. files_list = util.get_tor_c_files(TOR_TOPDIR)
  150. # 2) Initialize problem vault and load an optional exceptions file so that
  151. # we don't warn about the past
  152. global ProblemVault
  153. if args.regen:
  154. tmpname = exceptions_file + ".tmp"
  155. tmpfile = open(tmpname, "w")
  156. sys.stdout = tmpfile
  157. sys.stdout.write(HEADER)
  158. ProblemVault = problem.ProblemVault()
  159. else:
  160. ProblemVault = problem.ProblemVault(exceptions_file)
  161. # 2.1) Adjust the exceptions so that we warn only about small problems,
  162. # and produce errors on big ones.
  163. if not (args.regen or args.list_overstrict or args.strict):
  164. ProblemVault.set_tolerances(TOLERANCE_FNS)
  165. # 3) Go through all the files and report problems if they are not exceptions
  166. found_new_issues = 0
  167. for item in FILTER.filter(consider_all_metrics(files_list)):
  168. status = ProblemVault.register_problem(item)
  169. if status == problem.STATUS_ERR:
  170. print(item)
  171. found_new_issues += 1
  172. elif status == problem.STATUS_WARN:
  173. item.warn()
  174. if args.regen:
  175. tmpfile.close()
  176. os.rename(tmpname, exceptions_file)
  177. sys.exit(0)
  178. # If new issues were found, try to give out some advice to the developer on how to resolve it.
  179. if found_new_issues and not args.regen:
  180. new_issues_str = """\
  181. FAILURE: practracker found {} new problem(s) in the code: see warnings above.
  182. Please fix the problems if you can, and update the exceptions file
  183. ({}) if you can't.
  184. See doc/HACKING/HelpfulTools.md for more information on using practracker.\
  185. You can disable this message by setting the TOR_DISABLE_PRACTRACKER environment
  186. variable.
  187. """.format(found_new_issues, exceptions_file)
  188. print(new_issues_str)
  189. if args.list_overstrict:
  190. def k_fn(tup):
  191. return tup[0].key()
  192. for (ex,p) in sorted(ProblemVault.list_overstrict_exceptions(), key=k_fn):
  193. if p is None:
  194. print(ex, "->", 0)
  195. else:
  196. print(ex, "->", p.metric_value)
  197. sys.exit(found_new_issues)
  198. if __name__ == '__main__':
  199. main(sys.argv)