build.py 7.2 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307
  1. #!/usr/bin/python2.7
  2. import os, sys, platform, shutil
  3. import re, threading, time, json
  4. from os import path
  5. from hashlib import sha1
  6. from multiprocessing import cpu_count
  7. config_file = "build.config.py"
  8. cache_dir = ".buildcache"
  9. object_dir = path.join(cache_dir, "obj")
  10. cache_file = path.join(cache_dir, "cache.json")
  11. max_workers = cpu_count()
  12. config = {
  13. "compiler" : "gcc",
  14. "output" : "a.out",
  15. "source" : [ "src" ],
  16. "include" : [],
  17. "cflags" : [],
  18. "lflags" : [],
  19. "run" : "./{output}"
  20. }
  21. Hint, Warn, Error = range(3)
  22. log_prefix = {
  23. Hint: "\x1b[32mHint:\x1b[0m",
  24. Warn: "\x1b[33mWarn:\x1b[0m",
  25. Error: "\x1b[31;1mError:\x1b[0m"
  26. }
  27. log_lock = threading.Lock()
  28. def log(msg, mode=Hint):
  29. log_lock.acquire()
  30. print log_prefix[mode], msg
  31. log_lock.release()
  32. def error(msg):
  33. log(msg, mode=Error)
  34. os._exit(1)
  35. def load_config(filename):
  36. """ loads the given config file into the `config` global dict """
  37. if not path.exists(filename):
  38. error("config file does not exist: '%s'" % filename)
  39. d = {
  40. "opt": sys.argv,
  41. "platform": platform.system(),
  42. "error": error,
  43. "log": log,
  44. "Hint": Hint,
  45. "Warn": Warn,
  46. "Error": Error
  47. }
  48. execfile(filename, d)
  49. config.update(d)
  50. if len(config["source"]) == 0:
  51. error("no source directories specified in config")
  52. def load_cache(cache_file):
  53. if not path.exists(cache_file):
  54. return { "hashes": [], "cmd": "" }
  55. with open(cache_file) as fp:
  56. log("loaded cache")
  57. return json.load(fp)
  58. def update_cache(cache_file, obj):
  59. with open(cache_file, "wb") as fp:
  60. json.dump(obj, fp, indent=2)
  61. log("updated cache")
  62. def resolve_file(filename, dir):
  63. """ finds the actual location of an included file """
  64. f = path.join(dir, filename)
  65. if path.exists(f):
  66. return short_name(f)
  67. for dir in config["include"]:
  68. f = path.join(dir, filename)
  69. if path.exists(f):
  70. return short_name(f)
  71. file_info_cache = {}
  72. def get_file_info(filename):
  73. """ returns a dict of file info for the given file """
  74. if filename in file_info_cache:
  75. return file_info_cache[filename]
  76. hash = sha1()
  77. includes = []
  78. with open(filename) as fp:
  79. for line in fp.readlines():
  80. # get includes
  81. if "#include" in line:
  82. match = re.match('^\s*#include\s+"(.*?)"', line)
  83. if match:
  84. includes.append( match.group(1) )
  85. # update hash
  86. hash.update(line)
  87. hash.update("\n")
  88. res = { "hash": hash.hexdigest(), "includes": includes }
  89. file_info_cache[filename] = res
  90. return res
  91. def short_name(filename):
  92. """ returns the filename relative to the current path """
  93. n = len(path.abspath("."))
  94. return path.abspath(filename)[n+1:]
  95. def get_deep_hash(filename):
  96. """ creates a hash from the file and all its includes """
  97. h = sha1()
  98. processed = set()
  99. files = [ resolve_file(filename, ".") ]
  100. while len(files) > 0:
  101. f = files.pop()
  102. info = get_file_info(f)
  103. processed.add(f)
  104. # update hash
  105. h.update(info["hash"])
  106. # add includes
  107. for x in info["includes"]:
  108. resolved = resolve_file(x, path.dirname(f))
  109. if resolved:
  110. if resolved not in processed:
  111. files.append(resolved)
  112. else:
  113. log("could not resolve file '%s'" % x, mode=Warn)
  114. return h.hexdigest()
  115. def build_deep_hash_dict(cfiles):
  116. """ returns a dict mapping each cfile to its hash """
  117. res = {}
  118. for f in cfiles:
  119. res[f] = get_deep_hash(f)
  120. return res
  121. def get_cfiles():
  122. """ returns all .h and .c files in source directories """
  123. res = []
  124. for dir in config["source"]:
  125. for root, dirs, files in os.walk(dir):
  126. for file in files:
  127. if file.endswith((".c", ".h")):
  128. f = path.join(root, file)
  129. res.append( short_name(f) )
  130. return res
  131. def build_compile_cmd():
  132. """ creates the command used to compile files """
  133. lst = [
  134. config["compiler"],
  135. " ".join(map(lambda x: "-I" + x, config["include"])),
  136. " ".join(config["cflags"]),
  137. "-c", "{infile}", "-o", "{outfile}"
  138. ]
  139. return " ".join(lst)
  140. def obj_name(filename):
  141. """ creates the object file name for a given filename """
  142. filename = re.sub("[^\w]+", "_", filename)
  143. return filename[:-2] + "_" + sha1(filename).hexdigest()[:8] + ".o"
  144. def compile(cmd, filename):
  145. """ compiles the given file into an object file using the cmd """
  146. log("compiling '%s'" % filename)
  147. outfile = path.join(object_dir, obj_name(filename))
  148. res = os.system(cmd.format(infile=filename, outfile=outfile))
  149. if res != 0:
  150. error("failed to compile '%s'" % filename)
  151. def link():
  152. """ links objects and outputs the final binary """
  153. log("linking")
  154. lst = [
  155. config["compiler"],
  156. "-o", config["output"],
  157. path.join(object_dir, "*"),
  158. " ".join(config["lflags"])
  159. ]
  160. cmd = " ".join(lst)
  161. res = os.system(cmd)
  162. if res != 0:
  163. error("failed to link")
  164. def parallel(func, workers=4):
  165. """ runs func on multiple threads and waits for them all to finish """
  166. threads = []
  167. for i in range(workers):
  168. t = threading.Thread(target=func)
  169. threads.append(t)
  170. t.start()
  171. for t in threads:
  172. t.join()
  173. if __name__ == "__main__":
  174. start_time = time.time()
  175. load_config(config_file)
  176. run_at_exit = False
  177. output_dir = path.join(".", path.dirname(config["output"]))
  178. cache = load_cache(cache_file)
  179. cmd = build_compile_cmd()
  180. if "run" in sys.argv:
  181. run_at_exit = True
  182. if cache["cmd"] != cmd:
  183. sys.argv.append("clean")
  184. if "clean" in sys.argv:
  185. log("performing clean build")
  186. shutil.rmtree(cache_dir, ignore_errors=True)
  187. cache = load_cache(cache_file)
  188. if not path.exists(object_dir):
  189. os.makedirs(object_dir)
  190. if not path.exists(output_dir):
  191. os.makedirs(output_dir)
  192. if "pre" in config:
  193. config["pre"]()
  194. cfiles = get_cfiles()
  195. hashes = build_deep_hash_dict(cfiles)
  196. # delete object files for cfiles that no longer exist
  197. obj_files = set(map(obj_name, cfiles))
  198. for f in os.listdir(object_dir):
  199. if f not in obj_files:
  200. os.remove(path.join(object_dir, f))
  201. # build list of all .c files that need compiling
  202. pending = []
  203. for f in cfiles:
  204. if f.endswith(".c"):
  205. if f not in cache["hashes"] or cache["hashes"][f] != hashes[f]:
  206. pending.append(f)
  207. # compile files until there are none left
  208. def worker():
  209. while True:
  210. try:
  211. f = pending.pop()
  212. except:
  213. break
  214. compile(cmd, f)
  215. parallel(worker, workers=max_workers)
  216. link()
  217. update_cache(cache_file, { "hashes": hashes, "cmd": cmd })
  218. if "post" in config:
  219. config["post"]()
  220. log("done [%.2fs]" % (time.time() - start_time))
  221. if run_at_exit:
  222. log("running")
  223. cmd = config["run"].format(output=config["output"])
  224. os.system(cmd)