practracker.py 9.9 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283
  1. #!/usr/bin/python
  2. """
  3. Best-practices tracker for Tor source code.
  4. Go through the various .c files and collect metrics about them. If the metrics
  5. violate some of our best practices and they are not found in the optional
  6. exceptions file, then log a problem about them.
  7. We currently do metrics about file size, function size and number of includes.
  8. practracker.py should be run with its second argument pointing to the Tor
  9. top-level source directory like this:
  10. $ python3 ./scripts/maint/practracker/practracker.py .
  11. To regenerate the exceptions file so that it allows all current
  12. problems in the Tor source, use the --regen flag:
  13. $ python3 --regen ./scripts/maint/practracker/practracker.py .
  14. """
  15. from __future__ import print_function
  16. import os, sys
  17. import metrics
  18. import util
  19. import problem
  20. import includes
  21. # The filename of the exceptions file (it should be placed in the practracker directory)
  22. EXCEPTIONS_FNAME = "./exceptions.txt"
  23. # Recommended file size
  24. MAX_FILE_SIZE = 3000 # lines
  25. # Recommended function size
  26. MAX_FUNCTION_SIZE = 100 # lines
  27. # Recommended number of #includes
  28. MAX_INCLUDE_COUNT = 50
  29. # Recommended file size for headers
  30. MAX_H_FILE_SIZE = 500
  31. # Recommended include count for headers
  32. MAX_H_INCLUDE_COUNT = 15
  33. # Recommended number of dependency violations
  34. MAX_DEP_VIOLATIONS = 0
  35. # Map from problem type to functions that adjust for tolerance
  36. TOLERANCE_FNS = {
  37. 'include-count': lambda n: int(n*1.1),
  38. 'function-size': lambda n: int(n*1.1),
  39. 'file-size': lambda n: int(n*1.02),
  40. 'dependency-violation': lambda n: (n+2)
  41. }
  42. #######################################################
  43. # The Tor source code topdir
  44. TOR_TOPDIR = None
  45. #######################################################
  46. if sys.version_info[0] <= 2:
  47. def open_file(fname):
  48. return open(fname, 'r')
  49. else:
  50. def open_file(fname):
  51. return open(fname, 'r', encoding='utf-8')
  52. def consider_file_size(fname, f):
  53. """Consider the size of 'f' and yield an FileSizeItem for it.
  54. """
  55. file_size = metrics.get_file_len(f)
  56. yield problem.FileSizeItem(fname, file_size)
  57. def consider_includes(fname, f):
  58. """Consider the #include count in for 'f' and yield an IncludeCountItem
  59. for it.
  60. """
  61. include_count = metrics.get_include_count(f)
  62. yield problem.IncludeCountItem(fname, include_count)
  63. def consider_function_size(fname, f):
  64. """yield a FunctionSizeItem for every function in f.
  65. """
  66. for name, lines in metrics.get_function_lines(f):
  67. canonical_function_name = "%s:%s()" % (fname, name)
  68. yield problem.FunctionSizeItem(canonical_function_name, lines)
  69. def consider_include_violations(fname, real_fname, f):
  70. n = 0
  71. for item in includes.consider_include_rules(real_fname, f):
  72. n += 1
  73. if n:
  74. yield problem.DependencyViolationItem(fname, n)
  75. #######################################################
  76. def consider_all_metrics(files_list):
  77. """Consider metrics for all files, and yield a sequence of problem.Item
  78. object for those issues."""
  79. for fname in files_list:
  80. with open_file(fname) as f:
  81. for item in consider_metrics_for_file(fname, f):
  82. yield item
  83. def consider_metrics_for_file(fname, f):
  84. """
  85. Yield a sequence of problem.Item objects for all of the metrics in
  86. 'f'.
  87. """
  88. real_fname = fname
  89. # Strip the useless part of the path
  90. if fname.startswith(TOR_TOPDIR):
  91. fname = fname[len(TOR_TOPDIR):]
  92. # Get file length
  93. for item in consider_file_size(fname, f):
  94. yield item
  95. # Consider number of #includes
  96. f.seek(0)
  97. for item in consider_includes(fname, f):
  98. yield item
  99. # Get function length
  100. f.seek(0)
  101. for item in consider_function_size(fname, f):
  102. yield item
  103. # Check for "upward" includes
  104. f.seek(0)
  105. for item in consider_include_violations(fname, real_fname, f):
  106. yield item
  107. HEADER="""\
  108. # Welcome to the exceptions file for Tor's best-practices tracker!
  109. #
  110. # Each line of this file represents a single violation of Tor's best
  111. # practices -- typically, a violation that we had before practracker.py
  112. # first existed.
  113. #
  114. # There are three kinds of problems that we recognize right now:
  115. # function-size -- a function of more than {MAX_FUNCTION_SIZE} lines.
  116. # file-size -- a file of more than {MAX_FILE_SIZE} lines.
  117. # include-count -- a file with more than {MAX_INCLUDE_COUNT} #includes.
  118. #
  119. # Each line below represents a single exception that practracker should
  120. # _ignore_. Each line has four parts:
  121. # 1. The word "problem".
  122. # 2. The kind of problem.
  123. # 3. The location of the problem: either a filename, or a
  124. # filename:functionname pair.
  125. # 4. The magnitude of the problem to ignore.
  126. #
  127. # So for example, consider this line:
  128. # problem file-size /src/core/or/connection_or.c 3200
  129. #
  130. # It tells practracker to allow the mentioned file to be up to 3200 lines
  131. # long, even though ordinarily it would warn about any file with more than
  132. # {MAX_FILE_SIZE} lines.
  133. #
  134. # You can either edit this file by hand, or regenerate it completely by
  135. # running `make practracker-regen`.
  136. #
  137. # Remember: It is better to fix the problem than to add a new exception!
  138. """.format(**globals())
  139. def main(argv):
  140. import argparse
  141. progname = argv[0]
  142. parser = argparse.ArgumentParser(prog=progname)
  143. parser.add_argument("--regen", action="store_true",
  144. help="Regenerate the exceptions file")
  145. parser.add_argument("--list-overbroad", action="store_true",
  146. help="List over-strict exceptions")
  147. parser.add_argument("--exceptions",
  148. help="Override the location for the exceptions file")
  149. parser.add_argument("--strict", action="store_true",
  150. help="Make all warnings into errors")
  151. parser.add_argument("--terse", action="store_true",
  152. help="Do not emit helpful instructions.")
  153. parser.add_argument("--max-h-file-size", default=MAX_H_FILE_SIZE,
  154. help="Maximum lines per .H file")
  155. parser.add_argument("--max-h-include-count", default=MAX_H_INCLUDE_COUNT,
  156. help="Maximum includes per .H file")
  157. parser.add_argument("--max-file-size", default=MAX_FILE_SIZE,
  158. help="Maximum lines per C file")
  159. parser.add_argument("--max-include-count", default=MAX_INCLUDE_COUNT,
  160. help="Maximum includes per C file")
  161. parser.add_argument("--max-function-size", default=MAX_FUNCTION_SIZE,
  162. help="Maximum lines per function")
  163. parser.add_argument("--max-dependency-violations", default=MAX_DEP_VIOLATIONS,
  164. help="Maximum number of dependency violations to allow")
  165. parser.add_argument("topdir", default=".", nargs="?",
  166. help="Top-level directory for the tor source")
  167. args = parser.parse_args(argv[1:])
  168. global TOR_TOPDIR
  169. TOR_TOPDIR = args.topdir
  170. if args.exceptions:
  171. exceptions_file = args.exceptions
  172. else:
  173. exceptions_file = os.path.join(TOR_TOPDIR, "scripts/maint/practracker", EXCEPTIONS_FNAME)
  174. # 0) Configure our thresholds of "what is a problem actually"
  175. filt = problem.ProblemFilter()
  176. filt.addThreshold(problem.FileSizeItem("*.c", int(args.max_file_size)))
  177. filt.addThreshold(problem.IncludeCountItem("*.c", int(args.max_include_count)))
  178. filt.addThreshold(problem.FileSizeItem("*.h", int(args.max_h_file_size)))
  179. filt.addThreshold(problem.IncludeCountItem("*.h", int(args.max_h_include_count)))
  180. filt.addThreshold(problem.FunctionSizeItem("*.c", int(args.max_function_size)))
  181. filt.addThreshold(problem.DependencyViolationItem("*", int(args.max_dependency_violations)))
  182. # 1) Get all the .c files we care about
  183. files_list = util.get_tor_c_files(TOR_TOPDIR)
  184. # 2) Initialize problem vault and load an optional exceptions file so that
  185. # we don't warn about the past
  186. if args.regen:
  187. tmpname = exceptions_file + ".tmp"
  188. tmpfile = open(tmpname, "w")
  189. problem_file = tmpfile
  190. problem_file.write(HEADER)
  191. ProblemVault = problem.ProblemVault()
  192. else:
  193. ProblemVault = problem.ProblemVault(exceptions_file)
  194. problem_file = sys.stdout
  195. # 2.1) Adjust the exceptions so that we warn only about small problems,
  196. # and produce errors on big ones.
  197. if not (args.regen or args.list_overbroad or args.strict):
  198. ProblemVault.set_tolerances(TOLERANCE_FNS)
  199. # 3) Go through all the files and report problems if they are not exceptions
  200. found_new_issues = 0
  201. for item in filt.filter(consider_all_metrics(files_list)):
  202. status = ProblemVault.register_problem(item)
  203. if status == problem.STATUS_ERR:
  204. print(item, file=problem_file)
  205. found_new_issues += 1
  206. elif status == problem.STATUS_WARN:
  207. # warnings always go to stdout.
  208. print("(warning) {}".format(item))
  209. if args.regen:
  210. tmpfile.close()
  211. os.rename(tmpname, exceptions_file)
  212. sys.exit(0)
  213. # If new issues were found, try to give out some advice to the developer on how to resolve it.
  214. if found_new_issues and not args.regen and not args.terse:
  215. new_issues_str = """\
  216. FAILURE: practracker found {} new problem(s) in the code: see warnings above.
  217. Please fix the problems if you can, and update the exceptions file
  218. ({}) if you can't.
  219. See doc/HACKING/HelpfulTools.md for more information on using practracker.\
  220. You can disable this message by setting the TOR_DISABLE_PRACTRACKER environment
  221. variable.
  222. """.format(found_new_issues, exceptions_file)
  223. print(new_issues_str)
  224. if args.list_overbroad:
  225. def k_fn(tup):
  226. return tup[0].key()
  227. for (ex,p) in sorted(ProblemVault.list_overbroad_exceptions(), key=k_fn):
  228. if p is None:
  229. print(ex, "->", 0)
  230. else:
  231. print(ex, "->", p.metric_value)
  232. sys.exit(found_new_issues)
  233. if __name__ == '__main__':
  234. if os.environ.get("TOR_DISABLE_PRACTRACKER"):
  235. sys.exit(0)
  236. main(sys.argv)