fabricate.py
Go to the documentation of this file.
1 #!/usr/bin/env python
2 
3 """Build tool that finds dependencies automatically for any language.
4 
5 fabricate is a build tool that finds dependencies automatically for any
6 language. It's small and just works. No hidden stuff behind your back. It was
7 inspired by Bill McCloskey's make replacement, memoize, but fabricate works on
8 Windows as well as Linux.
9 
10 Read more about how to use it and how it works on the project page:
11  https://github.com/SimonAlfie/fabricate/
12 
13 Like memoize, fabricate is released under a "New BSD license". fabricate is
14 copyright (c) 2009 Brush Technology. Full text of the license is here:
15  https://github.com/SimonAlfie/fabricate/wiki/License
16 
17 To get help on fabricate functions:
18  from fabricate import *
19  help(function)
20 
21 """
22 
23 from __future__ import with_statement, print_function, unicode_literals
24 
25 # fabricate version number
26 __version__ = '1.29.3'
27 
28 # if version of .deps file has changed, we know to not use it
29 deps_version = 2
30 
31 import atexit
32 import optparse
33 import os
34 import platform
35 import re
36 import shlex
37 import stat
38 import subprocess
39 import sys
40 import tempfile
41 import time
42 import threading # NB uses old camelCase names for backward compatibility
43 import traceback
44 # multiprocessing module only exists on Python >= 2.6
45 try:
46  import multiprocessing
47 except ImportError:
49  def __getattr__(self, name):
50  raise NotImplementedError("multiprocessing module not available, can't do parallel builds")
51  multiprocessing = MultiprocessingModule()
52 
53 # compatibility
54 PY3 = sys.version_info[0] == 3
55 if PY3:
56  string_types = str
57  threading_condition = threading.Condition
58 else:
59  string_types = basestring
60 
61 try:
62  threading_condition = threading._Condition
63 except ImportError:
64  threading_condition = threading.Condition
65 
66 # so you can do "from fabricate import *" to simplify your build script
67 __all__ = ['setup', 'run', 'autoclean', 'main', 'shell', 'fabricate_version',
68  'memoize', 'outofdate', 'parse_options', 'after',
69  'ExecutionError', 'md5_hasher', 'mtime_hasher',
70  'Runner', 'AtimesRunner', 'StraceRunner', 'AlwaysRunner',
71  'SmartRunner', 'Builder']
72 
73 import textwrap
74 
75 __doc__ += "Exported functions are:\n" + ' ' + '\n '.join(textwrap.wrap(', '.join(__all__), 80))
76 
77 
78 
79 FAT_atime_resolution = 24*60*60 # resolution on FAT filesystems (seconds)
80 FAT_mtime_resolution = 2
81 
82 # NTFS resolution is < 1 ms
83 # We assume this is considerably more than time to run a new process
84 
85 NTFS_atime_resolution = 0.0002048 # resolution on NTFS filesystems (seconds)
86 NTFS_mtime_resolution = 0.0002048 # is actually 0.1us but python's can be
87  # as low as 204.8us due to poor
88  # float precision when storing numbers
89  # as big as NTFS file times can be
90  # (float has 52-bit precision and NTFS
91  # FILETIME has 63-bit precision, so
92  # we've lost 11 bits = 2048)
93 
94 # So we can use md5func in old and new versions of Python without warnings
95 try:
96  import hashlib
97  md5func = hashlib.md5
98 except ImportError:
99  import md5
100  md5func = md5.new
101 
102 # Use json, or pickle on older Python versions if simplejson not installed
103 try:
104  import json
105 except ImportError:
106  try:
107  import simplejson as json
108  except ImportError:
109  import cPickle
110  # needed to ignore the indent= argument for pickle's dump()
111  class PickleJson:
112  def load(self, f):
113  return cPickle.load(f)
114  def dump(self, obj, f, indent=None, sort_keys=None):
115  return cPickle.dump(obj, f)
116  json = PickleJson()
117 
118 def printerr(message):
119  """ Print given message to stderr with a line feed. """
120  print(message, file=sys.stderr)
121 
123  pass
124 
125 class ExecutionError(Exception):
126  """ Raised by shell() and run() if command returns non-zero exit code. """
127  pass
128 
129 def args_to_list(args):
130  """ Return a flat list of the given arguments for shell(). """
131  arglist = []
132  for arg in args:
133  if arg is None:
134  continue
135  if isinstance(arg, (list, tuple)):
136  arglist.extend(args_to_list(arg))
137  else:
138  if not isinstance(arg, string_types):
139  arg = str(arg)
140  arglist.append(arg)
141  return arglist
142 
143 def shell(*args, **kwargs):
144  r""" Run a command: program name is given in first arg and command line
145  arguments in the rest of the args. Iterables (lists and tuples) in args
146  are recursively converted to separate arguments, non-string types are
147  converted with str(arg), and None is ignored. For example:
148 
149  >>> def tail(input, n=3, flags=None):
150  >>> args = ['-n', n]
151  >>> return shell('tail', args, flags, input=input)
152  >>> tail('a\nb\nc\nd\ne\n')
153  'c\nd\ne\n'
154  >>> tail('a\nb\nc\nd\ne\n', 2, ['-v'])
155  '==> standard input <==\nd\ne\n'
156 
157  Keyword arguments kwargs are interpreted as follows:
158 
159  "input" is a string to pass standard input into the process (or the
160  default of None to use parent's stdin, eg: the keyboard)
161  "silent" is True (default) to return process's standard output as a
162  string, or False to print it as it comes out
163  "shell" set to True will run the command via the shell (/bin/sh or
164  COMSPEC) instead of running the command directly (the default)
165  "ignore_status" set to True means ignore command status code -- i.e.,
166  don't raise an ExecutionError on nonzero status code
167  Any other kwargs are passed directly to subprocess.Popen
168  Raises ExecutionError(message, output, status) if the command returns
169  a non-zero status code. """
170  try:
171  return _shell(args, **kwargs)
172  finally:
173  sys.stderr.flush()
174  sys.stdout.flush()
175 
176 def _shell(args, input=None, silent=True, shell=False, ignore_status=False, **kwargs):
177  if input:
178  stdin = subprocess.PIPE
179  else:
180  stdin = None
181  if silent:
182  stdout = subprocess.PIPE
183  else:
184  stdout = None
185  arglist = args_to_list(args)
186  if not arglist:
187  raise TypeError('shell() takes at least 1 argument (0 given)')
188  if shell:
189  # handle subprocess.Popen quirk where subsequent args are passed
190  # to bash instead of to our command
191  command = subprocess.list2cmdline(arglist)
192  else:
193  command = arglist
194  try:
195  proc = subprocess.Popen(command, stdin=stdin, stdout=stdout,
196  stderr=subprocess.STDOUT, shell=shell, **kwargs)
197  except OSError as e:
198  # Work around the problem that Windows Popen doesn't say what file it couldn't find
199  if platform.system() == 'Windows' and e.errno == 2 and e.filename is None:
200  e.filename = arglist[0]
201  raise e
202  output, stderr = proc.communicate(input)
203  status = proc.wait()
204  if status and not ignore_status:
205  raise ExecutionError('%r exited with status %d'
206  % (os.path.basename(arglist[0]), status),
207  output, status)
208  if silent:
209  return output
210 
211 def md5_hasher(filename):
212  """ Return MD5 hash of given filename if it is a regular file or
213  a symlink with a hashable target, or the MD5 hash of the
214  target_filename if it is a symlink without a hashable target,
215  or the MD5 hash of the filename if it is a directory, or None
216  if file doesn't exist.
217 
218  Note: Pyhton versions before 3.2 do not support os.readlink on
219  Windows so symlinks without a hashable target fall back to
220  a hash of the filename if the symlink target is a directory,
221  or None if the symlink is broken"""
222  if not isinstance(filename, bytes):
223  filename = filename.encode('utf-8')
224  try:
225  f = open(filename, 'rb')
226  try:
227  return md5func(f.read()).hexdigest()
228  finally:
229  f.close()
230  except IOError:
231  if hasattr(os, 'readlink') and os.path.islink(filename):
232  return md5func(os.readlink(filename)).hexdigest()
233  elif os.path.isdir(filename):
234  return md5func(filename).hexdigest()
235  return None
236 
237 def mtime_hasher(filename):
238  """ Return modification time of file, or None if file doesn't exist. """
239  try:
240  st = os.stat(filename)
241  return repr(st.st_mtime)
242  except (IOError, OSError):
243  return None
244 
246  """ Exception raise by Runner constructor if it is not supported
247  on the current platform."""
248  pass
249 
250 class Runner(object):
251  def __call__(self, *args, **kwargs):
252  """ Run command and return (dependencies, outputs), where
253  dependencies is a list of the filenames of files that the
254  command depended on, and output is a list of the filenames
255  of files that the command modified. The input is passed
256  to shell()"""
257  raise NotImplementedError("Runner subclass called but subclass didn't define __call__")
258 
259  def actual_runner(self):
260  """ Return the actual runner object (overriden in SmartRunner). """
261  return self
262 
263  def ignore(self, name):
264  return self._builder.ignore.search(name)
265 
267  def __init__(self, builder):
268  self._builder = builder
269  self.atimes = AtimesRunner.has_atimes(self._builder.dirs)
270  if self.atimes == 0:
272  'atimes are not supported on this platform')
273 
274  @staticmethod
275  def file_has_atimes(filename):
276  """ Return whether the given filesystem supports access time updates for
277  this file. Return:
278  - 0 if no a/mtimes not updated
279  - 1 if the atime resolution is at least one day and
280  the mtime resolution at least 2 seconds (as on FAT filesystems)
281  - 2 if the atime and mtime resolutions are both < ms
282  (NTFS filesystem has 100 ns resolution). """
283 
284  def access_file(filename):
285  """ Access (read a byte from) file to try to update its access time. """
286  f = open(filename)
287  f.read(1)
288  f.close()
289 
290  initial = os.stat(filename)
291  os.utime(filename, (
292  initial.st_atime-FAT_atime_resolution,
293  initial.st_mtime-FAT_mtime_resolution))
294 
295  adjusted = os.stat(filename)
296  access_file(filename)
297  after = os.stat(filename)
298 
299  # Check that a/mtimes actually moved back by at least resolution and
300  # updated by a file access.
301  # add NTFS_atime_resolution to account for float resolution factors
302  # Comment on resolution/2 in atimes_runner()
303  if initial.st_atime-adjusted.st_atime > FAT_atime_resolution+NTFS_atime_resolution or \
304  initial.st_mtime-adjusted.st_mtime > FAT_mtime_resolution+NTFS_atime_resolution or \
305  initial.st_atime==adjusted.st_atime or \
306  initial.st_mtime==adjusted.st_mtime or \
307  not after.st_atime-FAT_atime_resolution/2 > adjusted.st_atime:
308  return 0
309 
310  os.utime(filename, (
311  initial.st_atime-NTFS_atime_resolution,
312  initial.st_mtime-NTFS_mtime_resolution))
313  adjusted = os.stat(filename)
314 
315  # Check that a/mtimes actually moved back by at least resolution
316  # Note: != comparison here fails due to float rounding error
317  # double NTFS_atime_resolution to account for float resolution factors
318  if initial.st_atime-adjusted.st_atime > NTFS_atime_resolution*2 or \
319  initial.st_mtime-adjusted.st_mtime > NTFS_mtime_resolution*2 or \
320  initial.st_atime==adjusted.st_atime or \
321  initial.st_mtime==adjusted.st_mtime:
322  return 1
323 
324  return 2
325 
326  @staticmethod
327  def exists(path):
328  if not os.path.exists(path):
329  # Note: in linux, error may not occur: strace runner doesn't check
330  raise PathError("build dirs specified a non-existant path '%s'" % path)
331 
332  @staticmethod
333  def has_atimes(paths):
334  """ Return whether a file created in each path supports atimes and mtimes.
335  Return value is the same as used by file_has_atimes
336  Note: for speed, this only tests files created at the top directory
337  of each path. A safe assumption in most build environments.
338  In the unusual case that any sub-directories are mounted
339  on alternate file systems that don't support atimes, the build may
340  fail to identify a dependency """
341 
342  atimes = 2 # start by assuming we have best atimes
343  for path in paths:
344  AtimesRunner.exists(path)
345  handle, filename = tempfile.mkstemp(dir=path)
346  try:
347  try:
348  f = os.fdopen(handle, 'wb')
349  except:
350  os.close(handle)
351  raise
352  try:
353  f.write(b'x') # need a byte in the file for access test
354  finally:
355  f.close()
356  atimes = min(atimes, AtimesRunner.file_has_atimes(filename))
357  finally:
358  os.remove(filename)
359  return atimes
360 
361  def _file_times(self, path, depth):
362  """ Helper function for file_times().
363  Return a dict of file times, recursing directories that don't
364  start with self._builder.ignoreprefix """
365 
366  AtimesRunner.exists(path)
367  names = os.listdir(path)
368  times = {}
369  ignoreprefix = self._builder.ignoreprefix
370  for name in names:
371  if ignoreprefix and name.startswith(ignoreprefix):
372  continue
373  if path == '.':
374  fullname = name
375  else:
376  fullname = os.path.join(path, name)
377  st = os.stat(fullname)
378  if stat.S_ISDIR(st.st_mode):
379  if depth > 1:
380  times.update(self._file_times(fullname, depth-1))
381  elif stat.S_ISREG(st.st_mode):
382  times[fullname] = st.st_atime, st.st_mtime
383  return times
384 
385  def file_times(self):
386  """ Return a dict of "filepath: (atime, mtime)" entries for each file
387  in self._builder.dirs. "filepath" is the absolute path, "atime" is
388  the access time, "mtime" the modification time.
389  Recurse directories that don't start with
390  self._builder.ignoreprefix and have depth less than
391  self._builder.dirdepth. """
392 
393  times = {}
394  for path in self._builder.dirs:
395  times.update(self._file_times(path, self._builder.dirdepth))
396  return times
397 
398  def _utime(self, filename, atime, mtime):
399  """ Call os.utime but ignore permission errors """
400  try:
401  os.utime(filename, (atime, mtime))
402  except OSError as e:
403  # ignore permission errors -- we can't build with files
404  # that we can't access anyway
405  if e.errno != 1:
406  raise
407 
408  def _age_atimes(self, filetimes):
409  """ Age files' atimes and mtimes to be at least FAT_xx_resolution old.
410  Only adjust if the given filetimes dict says it isn't that old,
411  and return a new dict of filetimes with the ages adjusted. """
412  adjusted = {}
413  now = time.time()
414  for filename, entry in filetimes.items():
415  if now-entry[0] < FAT_atime_resolution or now-entry[1] < FAT_mtime_resolution:
416  entry = entry[0] - FAT_atime_resolution, entry[1] - FAT_mtime_resolution
417  self._utime(filename, entry[0], entry[1])
418  adjusted[filename] = entry
419  return adjusted
420 
421  def __call__(self, *args, **kwargs):
422  """ Run command and return its dependencies and outputs, using before
423  and after access times to determine dependencies. """
424 
425  # For Python pre-2.5, ensure os.stat() returns float atimes
426  old_stat_float = os.stat_float_times()
427  os.stat_float_times(True)
428 
429  originals = self.file_times()
430  if self.atimes == 2:
431  befores = originals
432  atime_resolution = 0
433  mtime_resolution = 0
434  else:
435  befores = self._age_atimes(originals)
436  atime_resolution = FAT_atime_resolution
437  mtime_resolution = FAT_mtime_resolution
438  shell_keywords = dict(silent=False)
439  shell_keywords.update(kwargs)
440  shell(*args, **shell_keywords)
441  afters = self.file_times()
442  deps = []
443  outputs = []
444  for name in afters:
445  if name in befores:
446  # if file exists before+after && mtime changed, add to outputs
447  # Note: Can't just check that atimes > than we think they were
448  # before because os might have rounded them to a later
449  # date than what we think we set them to in befores.
450  # So we make sure they're > by at least 1/2 the
451  # resolution. This will work for anything with a
452  # resolution better than FAT.
453  if afters[name][1]-mtime_resolution/2 > befores[name][1]:
454  if not self.ignore(name):
455  outputs.append(name)
456  elif afters[name][0]-atime_resolution/2 > befores[name][0]:
457  # otherwise add to deps if atime changed
458  if not self.ignore(name):
459  deps.append(name)
460  else:
461  # file created (in afters but not befores), add as output
462  if not self.ignore(name):
463  outputs.append(name)
464 
465  if self.atimes < 2:
466  # Restore atimes of files we didn't access: not for any functional
467  # reason -- it's just to preserve the access time for the user's info
468  for name in deps:
469  originals.pop(name)
470  for name in originals:
471  original = originals[name]
472  if original != afters.get(name, None):
473  self._utime(name, original[0], original[1])
474 
475  os.stat_float_times(old_stat_float) # restore stat_float_times value
476  return deps, outputs
477 
479  def __init__(self, cwd='.', delayed=False):
480  self.cwd = cwd
481  self.deps = set()
482  self.outputs = set()
483  self.delayed = delayed
484  self.delayed_lines = []
485 
486  def add_dep(self, dep):
487  self.deps.add(dep)
488 
489  def add_output(self, output):
490  self.outputs.add(output)
491 
492  def add_delayed_line(self, line):
493  self.delayed_lines.append(line)
494 
495  def __str__(self):
496  return '<StraceProcess cwd=%s deps=%s outputs=%s>' % \
497  (self.cwd, self.deps, self.outputs)
498 
499 def _call_strace(self, *args, **kwargs):
500  """ Top level function call for Strace that can be run in parallel """
501  return self(*args, **kwargs)
502 
504  keep_temps = False
505 
506  def __init__(self, builder, build_dir=None):
507  self.strace_system_calls = StraceRunner.get_strace_system_calls()
508  if self.strace_system_calls is None:
509  raise RunnerUnsupportedException('strace is not available')
510  self._builder = builder
511  self.temp_count = 0
512  self.build_dir = os.path.abspath(build_dir or os.getcwd())
513 
514  @staticmethod
516  """ Return None if this system doesn't have strace, otherwise
517  return a comma seperated list of system calls supported by strace. """
518  if platform.system() == 'Windows':
519  # even if windows has strace, it's probably a dodgy cygwin one
520  return None
521  possible_system_calls = ['open','stat', 'stat64', 'lstat', 'lstat64',
522  'execve','exit_group','chdir','mkdir','rename','clone','vfork',
523  'fork','symlink','creat']
524  valid_system_calls = []
525  try:
526  # check strace is installed and if it supports each type of call
527  for system_call in possible_system_calls:
528  proc = subprocess.Popen(['strace', '-e', 'trace=' + system_call], stderr=subprocess.PIPE)
529  stdout, stderr = proc.communicate()
530  proc.wait()
531  if b'invalid system call' not in stderr:
532  valid_system_calls.append(system_call)
533  except OSError:
534  return None
535  return ','.join(valid_system_calls)
536 
537  # Regular expressions for parsing of strace log
538  _open_re = re.compile(r'(?P<pid>\d+)\s+open\("(?P<name>[^"]*)", (?P<mode>[^,)]*)')
539  _stat_re = re.compile(r'(?P<pid>\d+)\s+l?stat(?:64)?\("(?P<name>[^"]*)", .*') # stat,lstat,stat64,lstat64
540  _execve_re = re.compile(r'(?P<pid>\d+)\s+execve\("(?P<name>[^"]*)", .*')
541  _creat_re = re.compile(r'(?P<pid>\d+)\s+creat\("(?P<name>[^"]*)", .*')
542  _mkdir_re = re.compile(r'(?P<pid>\d+)\s+mkdir\("(?P<name>[^"]*)", .*\)\s*=\s(?P<result>-?[0-9]*).*')
543  _rename_re = re.compile(r'(?P<pid>\d+)\s+rename\("[^"]*", "(?P<name>[^"]*)"\)')
544  _symlink_re = re.compile(r'(?P<pid>\d+)\s+symlink\("[^"]*", "(?P<name>[^"]*)"\)')
545  _kill_re = re.compile(r'(?P<pid>\d+)\s+killed by.*')
546  _chdir_re = re.compile(r'(?P<pid>\d+)\s+chdir\("(?P<cwd>[^"]*)"\)')
547  _exit_group_re = re.compile(r'(?P<pid>\d+)\s+exit_group\((?P<status>.*)\).*')
548  _clone_re = re.compile(r'(?P<pid_clone>\d+)\s+(clone|fork|vfork)\(.*\)\s*=\s*(?P<pid>\d*)')
549 
550  # Regular expressions for detecting interrupted lines in strace log
551  # 3618 clone( <unfinished ...>
552  # 3618 <... clone resumed> child_stack=0, flags=CLONE, child_tidptr=0x7f83deffa780) = 3622
553  _unfinished_start_re = re.compile(r'(?P<pid>\d+)(?P<body>.*)<unfinished ...>$')
554  _unfinished_end_re = re.compile(r'(?P<pid>\d+)\s+<\.\.\..*>(?P<body>.*)')
555 
556  def _do_strace(self, args, kwargs, outfile, outname):
557  """ Run strace on given command args/kwargs, sending output to file.
558  Return (status code, list of dependencies, list of outputs). """
559  shell_keywords = dict(silent=False)
560  shell_keywords.update(kwargs)
561  try:
562  shell('strace', '-fo', outname, '-e',
563  'trace=' + self.strace_system_calls,
564  args, **shell_keywords)
565  except ExecutionError as e:
566  # if strace failed to run, re-throw the exception
567  # we can tell this happend if the file is empty
568  outfile.seek(0, os.SEEK_END)
569  if outfile.tell() is 0:
570  raise e
571  else:
572  # reset the file postion for reading
573  outfile.seek(0)
574 
575  self.status = 0
576  processes = {} # dictionary of processes (key = pid)
577  unfinished = {} # list of interrupted entries in strace log
578  for line in outfile:
579  self._match_line(line, processes, unfinished)
580 
581  # collect outputs and dependencies from all processes
582  deps = set()
583  outputs = set()
584  for pid, process in processes.items():
585  deps = deps.union(process.deps)
586  outputs = outputs.union(process.outputs)
587 
588  return self.status, list(deps), list(outputs)
589 
590  def _match_line(self, line, processes, unfinished):
591  # look for split lines
592  unfinished_start_match = self._unfinished_start_re.match(line)
593  unfinished_end_match = self._unfinished_end_re.match(line)
594  if unfinished_start_match:
595  pid = unfinished_start_match.group('pid')
596  body = unfinished_start_match.group('body')
597  unfinished[pid] = pid + ' ' + body
598  return
599  elif unfinished_end_match:
600  pid = unfinished_end_match.group('pid')
601  body = unfinished_end_match.group('body')
602  if pid not in unfinished:
603  # Looks like we need to hande an strace bug here
604  # I think it is safe to ignore as I have only seen futex calls which strace should not output
605  printerr('fabricate: Warning: resume without unfinished in strace output (strace bug?), \'%s\'' % line.strip())
606  return
607  line = unfinished[pid] + body
608  del unfinished[pid]
609 
610  is_output = False
611  open_match = self._open_re.match(line)
612  stat_match = self._stat_re.match(line)
613  execve_match = self._execve_re.match(line)
614  creat_match = self._creat_re.match(line)
615  mkdir_match = self._mkdir_re.match(line)
616  symlink_match = self._symlink_re.match(line)
617  rename_match = self._rename_re.match(line)
618  clone_match = self._clone_re.match(line)
619 
620  kill_match = self._kill_re.match(line)
621  if kill_match:
622  return None, None, None
623 
624  match = None
625  if execve_match:
626  pid = execve_match.group('pid')
627  match = execve_match # Executables can be dependencies
628  if pid not in processes and len(processes) == 0:
629  # This is the first process so create dict entry
630  processes[pid] = StraceProcess()
631  elif clone_match:
632  pid = clone_match.group('pid')
633  pid_clone = clone_match.group('pid_clone')
634  if pid not in processes:
635  # Simple case where there are no delayed lines
636  processes[pid] = StraceProcess(processes[pid_clone].cwd)
637  else:
638  # Some line processing was delayed due to an interupted clone_match
639  processes[pid].cwd = processes[pid_clone].cwd # Set the correct cwd
640  processes[pid].delayed = False # Set that matching is no longer delayed
641  for delayed_line in processes[pid].delayed_lines:
642  # Process all the delayed lines
643  self._match_line(delayed_line, processes, unfinished)
644  processes[pid].delayed_lines = [] # Clear the lines
645  elif open_match:
646  match = open_match
647  mode = match.group('mode')
648  if 'O_WRONLY' in mode or 'O_RDWR' in mode:
649  # it's an output file if opened for writing
650  is_output = True
651  elif stat_match:
652  match = stat_match
653  elif creat_match:
654  match = creat_match
655  # a created file is an output file
656  is_output = True
657  elif mkdir_match:
658  match = mkdir_match
659  if match.group('result') == '0':
660  # a created directory is an output file
661  is_output = True
662  elif symlink_match:
663  match = symlink_match
664  # the created symlink is an output file
665  is_output = True
666  elif rename_match:
667  match = rename_match
668  # the destination of a rename is an output file
669  is_output = True
670 
671  if match:
672  name = match.group('name')
673  pid = match.group('pid')
674  if not self._matching_is_delayed(processes, pid, line):
675  cwd = processes[pid].cwd
676  if cwd != '.':
677  name = os.path.join(cwd, name)
678 
679  # normalise path name to ensure files are only listed once
680  name = os.path.normpath(name)
681 
682  # if it's an absolute path name under the build directory,
683  # make it relative to build_dir before saving to .deps file
684  if os.path.isabs(name) and name.startswith(self.build_dir):
685  name = name[len(self.build_dir):]
686  name = name.lstrip(os.path.sep)
687 
688  if (self._builder._is_relevant(name)
689  and not self.ignore(name)
690  and os.path.lexists(name)):
691  if is_output:
692  processes[pid].add_output(name)
693  else:
694  processes[pid].add_dep(name)
695 
696  match = self._chdir_re.match(line)
697  if match:
698  pid = match.group('pid')
699  if not self._matching_is_delayed(processes, pid, line):
700  processes[pid].cwd = os.path.join(processes[pid].cwd, match.group('cwd'))
701 
702  match = self._exit_group_re.match(line)
703  if match:
704  self.status = int(match.group('status'))
705 
706  def _matching_is_delayed(self, processes, pid, line):
707  # Check if matching is delayed and cache a delayed line
708  if pid not in processes:
709  processes[pid] = StraceProcess(delayed=True)
710 
711  process = processes[pid]
712  if process.delayed:
713  process.add_delayed_line(line)
714  return True
715  else:
716  return False
717 
718  def __call__(self, *args, **kwargs):
719  """ Run command and return its dependencies and outputs, using strace
720  to determine dependencies (by looking at what files are opened or
721  modified). """
722  ignore_status = kwargs.pop('ignore_status', False)
723  if self.keep_temps:
724  outname = 'strace%03d.txt' % self.temp_count
725  self.temp_count += 1
726  handle = os.open(outname, os.O_CREAT)
727  else:
728  handle, outname = tempfile.mkstemp()
729 
730  try:
731  try:
732  outfile = os.fdopen(handle, 'r')
733  except:
734  os.close(handle)
735  raise
736  try:
737  status, deps, outputs = self._do_strace(args, kwargs, outfile, outname)
738  if status is None:
739  raise ExecutionError(
740  '%r was killed unexpectedly' % args[0], '', -1)
741  finally:
742  outfile.close()
743  finally:
744  if not self.keep_temps:
745  os.remove(outname)
746 
747  if status and not ignore_status:
748  raise ExecutionError('%r exited with status %d'
749  % (os.path.basename(args[0]), status),
750  '', status)
751  return list(deps), list(outputs)
752 
754  def __init__(self, builder):
755  pass
756 
757  def __call__(self, *args, **kwargs):
758  """ Runner that always runs given command, used as a backup in case
759  a system doesn't have strace or atimes. """
760  shell_keywords = dict(silent=False)
761  shell_keywords.update(kwargs)
762  shell(*args, **shell_keywords)
763  return None, None
764 
766  """ Smart command runner that uses StraceRunner if it can,
767  otherwise AtimesRunner if available, otherwise AlwaysRunner. """
768  def __init__(self, builder):
769  self._builder = builder
770  try:
772  except RunnerUnsupportedException:
773  try:
774  self._runner = AtimesRunner(self._builder)
775  except RunnerUnsupportedException:
776  self._runner = AlwaysRunner(self._builder)
777 
778  def actual_runner(self):
779  return self._runner
780 
781  def __call__(self, *args, **kwargs):
782  return self._runner(*args, **kwargs)
783 
785  """ Represents a task put on the parallel pool
786  and its results when complete """
787  def __init__(self, async, command):
788  """ "async" is the AsyncResult object returned from pool.apply_async
789  "command" is the command that was run """
790  self.async = async
791  self.command = command
792  self.results = None
793 
794 class _after(object):
795  """ Represents something waiting on completion of some previous commands """
796  def __init__(self, afters, do):
797  """ "afters" is a group id or a iterable of group ids to wait on
798  "do" is either a tuple representing a command (group, command,
799  arglist, kwargs) or a threading.Condition to be released """
800  self.afters = afters
801  self.do = do
802  self.done = False
803 
805  """ Thread safe mapping object whose values are lists of _running
806  or _after objects and a count of how many have *not* completed """
807  class value(object):
808  """ the value type in the map """
809  def __init__(self, val=None):
810  self.count = 0 # count of items not yet completed.
811  # This also includes count_in_false number
812  self.count_in_false = 0 # count of commands which is assigned
813  # to False group, but will be moved
814  # to this group.
815  self.items = [] # items in this group
816  if val is not None:
817  self.items.append(val)
818  self.ok = True # True if no error from any command in group so far
819 
820  def __init__(self):
821  self.groups = {False: self.value()}
822  self.lock = threading.Lock()
823 
824  def item_list(self, id):
825  """ Return copy of the value list """
826  with self.lock:
827  return self.groups[id].items[:]
828 
829  def remove(self, id):
830  """ Remove the group """
831  with self.lock:
832  del self.groups[id]
833 
834  def remove_item(self, id, val):
835  with self.lock:
836  self.groups[id].items.remove(val)
837 
838  def add(self, id, val):
839  with self.lock:
840  if id in self.groups:
841  self.groups[id].items.append(val)
842  else:
843  self.groups[id] = self.value(val)
844  self.groups[id].count += 1
845 
846  def ensure(self, id):
847  """if id does not exit, create it without any value"""
848  with self.lock:
849  if not id in self.groups:
850  self.groups[id] = self.value()
851 
852  def get_count(self, id):
853  with self.lock:
854  if id not in self.groups:
855  return 0
856  return self.groups[id].count
857 
858  def dec_count(self, id):
859  with self.lock:
860  c = self.groups[id].count - 1
861  if c < 0:
862  raise ValueError
863  self.groups[id].count = c
864  return c
865 
866  def get_ok(self, id):
867  with self.lock:
868  return self.groups[id].ok
869 
870  def set_ok(self, id, to):
871  with self.lock:
872  self.groups[id].ok = to
873 
874  def ids(self):
875  with self.lock:
876  return self.groups.keys()
877 
878  # modification to reserve blocked commands to corresponding groups
879  def inc_count_for_blocked(self, id):
880  with self.lock:
881  if not id in self.groups:
882  self.groups[id] = self.value()
883  self.groups[id].count += 1
884  self.groups[id].count_in_false += 1
885 
886  def add_for_blocked(self, id, val):
887  # modification of add(), in order to move command from False group
888  # to actual group
889  with self.lock:
890  # id must be registered before
891  self.groups[id].items.append(val)
892  # count does not change (already considered
893  # in inc_count_for_blocked), but decrease count_in_false.
894  c = self.groups[id].count_in_false - 1
895  if c < 0:
896  raise ValueError
897  self.groups[id].count_in_false = c
898 
899 
900 # pool of processes to run parallel jobs, must not be part of any object that
901 # is pickled for transfer to these processes, ie it must be global
902 _pool = None
903 # object holding results, must also be global
904 _groups = _Groups()
905 # results collecting thread
906 _results = None
907 _stop_results = threading.Event()
908 
909 class _todo(object):
910  """ holds the parameters for commands waiting on others """
911  def __init__(self, group, command, arglist, kwargs):
912  self.group = group # which group it should run as
913  self.command = command # string command
914  self.arglist = arglist # command arguments
915  self.kwargs = kwargs # keywork args for the runner
916 
917 def _results_handler( builder, delay=0.01):
918  """ Body of thread that stores results in .deps and handles 'after'
919  conditions
920  "builder" the builder used """
921  try:
922  while not _stop_results.isSet():
923  # go through the lists and check any results available
924  for id in _groups.ids():
925  if id is False: continue # key of False is _afters not _runnings
926  for r in _groups.item_list(id):
927  if r.results is None and r.async.ready():
928  try:
929  d, o = r.async.get()
930  except ExecutionError as e:
931  r.results = e
932  _groups.set_ok(id, False)
933  message, data, status = e
934  printerr("fabricate: " + message)
935  else:
936  builder.done(r.command, d, o) # save deps
937  r.results = (r.command, d, o)
938  _groups.dec_count(id)
939  # check if can now schedule things waiting on the after queue
940  for a in _groups.item_list(False):
941  still_to_do = sum(_groups.get_count(g) for g in a.afters)
942  no_error = all(_groups.get_ok(g) for g in a.afters)
943  if False in a.afters:
944  still_to_do -= 1 # don't count yourself of course
945  if still_to_do == 0:
946  if isinstance(a.do, _todo):
947  if no_error:
948  async = _pool.apply_async(_call_strace, a.do.arglist,
949  a.do.kwargs)
950  _groups.add_for_blocked(a.do.group, _running(async, a.do.command))
951  else:
952  # Mark the command as not done due to errors
953  r = _running(None, a.do.command)
954  _groups.add_for_blocked(a.do.group, r)
955  r.results = False
956  _groups.set_ok(a.do.group, False)
957  _groups.dec_count(a.do.group)
958  elif isinstance(a.do, threading_condition):
959  # is this only for threading_condition in after()?
960  a.do.acquire()
961  # only mark as done if there is no error
962  a.done = no_error
963  a.do.notify()
964  a.do.release()
965  # else: #are there other cases?
966  _groups.remove_item(False, a)
967  _groups.dec_count(False)
968 
969  _stop_results.wait(delay)
970  except Exception:
971  etype, eval, etb = sys.exc_info()
972  printerr("Error: exception " + repr(etype) + " at line " + str(etb.tb_lineno))
973  traceback.print_tb(etb)
974  finally:
975  if not _stop_results.isSet():
976  # oh dear, I am about to die for unexplained reasons, stop the whole
977  # app otherwise the main thread hangs waiting on non-existant me,
978  # Note: sys.exit() only kills me
979  printerr("Error: unexpected results handler exit")
980  os._exit(1)
981 
983  """ The Builder.
984 
985  You may supply a "runner" class to change the way commands are run
986  or dependencies are determined. For an example, see:
987  https://github.com/SimonAlfie/fabricate/wiki/HowtoMakeYourOwnRunner
988 
989  A "runner" must be a subclass of Runner and must have a __call__()
990  function that takes a command as a list of args and returns a tuple of
991  (deps, outputs), where deps is a list of rel-path'd dependency files
992  and outputs is a list of rel-path'd output files. The default runner
993  is SmartRunner, which automatically picks one of StraceRunner,
994  AtimesRunner, or AlwaysRunner depending on your system.
995  A "runner" class may have an __init__() function that takes the
996  builder as a parameter.
997  """
998 
999  def __init__(self, runner=None, dirs=None, dirdepth=100, ignoreprefix='.',
1000  ignore=None, hasher=md5_hasher, depsname='.deps',
1001  quiet=False, debug=False, inputs_only=False, parallel_ok=False):
1002  """ Initialise a Builder with the given options.
1003 
1004  "runner" specifies how programs should be run. It is either a
1005  callable compatible with the Runner class, or a string selecting
1006  one of the standard runners ("atimes_runner", "strace_runner",
1007  "always_runner", or "smart_runner").
1008  "dirs" is a list of paths to look for dependencies (or outputs) in
1009  if using the strace or atimes runners.
1010  "dirdepth" is the depth to recurse into the paths in "dirs" (default
1011  essentially means infinitely). Set to 1 to just look at the
1012  immediate paths in "dirs" and not recurse at all. This can be
1013  useful to speed up the AtimesRunner if you're building in a large
1014  tree and you don't care about all of the subdirectories.
1015  "ignoreprefix" prevents recursion into directories that start with
1016  prefix. It defaults to '.' to ignore svn directories.
1017  Change it to '_svn' if you use _svn hidden directories.
1018  "ignore" is a regular expression. Any dependency that contains a
1019  regex match is ignored and not put into the dependency list.
1020  Note that the regex may be VERBOSE (spaces are ignored and # line
1021  comments allowed -- use \ prefix to insert these characters)
1022  "hasher" is a function which returns a string which changes when
1023  the contents of its filename argument changes, or None on error.
1024  Default is md5_hasher, but can also be mtime_hasher.
1025  "depsname" is the name of the JSON dependency file to load/save.
1026  "quiet" set to True tells the builder to not display the commands being
1027  executed (or other non-error output).
1028  "debug" set to True makes the builder print debug output, such as why
1029  particular commands are being executed
1030  "inputs_only" set to True makes builder only re-build if input hashes
1031  have changed (ignores output hashes); use with tools that touch
1032  files that shouldn't cause a rebuild; e.g. g++ collect phase
1033  "parallel_ok" set to True to indicate script is safe for parallel running
1034  """
1035  if dirs is None:
1036  dirs = ['.']
1037  self.dirs = dirs
1038  self.dirdepth = dirdepth
1039  self.ignoreprefix = ignoreprefix
1040  if ignore is None:
1041  ignore = r'$x^' # something that can't match
1042  self.ignore = re.compile(ignore, re.VERBOSE)
1043  self.depsname = depsname
1044  self.hasher = hasher
1045  self.quiet = quiet
1046  self.debug = debug
1047  self.inputs_only = inputs_only
1048  self.checking = False
1049  self.hash_cache = {}
1050 
1051  # instantiate runner after the above have been set in case it needs them
1052  if runner is not None:
1053  self.set_runner(runner)
1054  elif hasattr(self, 'runner'):
1055  # For backwards compatibility, if a derived class has
1056  # defined a "runner" method then use it:
1057  pass
1058  else:
1059  self.runner = SmartRunner(self)
1060 
1061  is_strace = isinstance(self.runner.actual_runner(), StraceRunner)
1062  self.parallel_ok = parallel_ok and is_strace and _pool is not None
1063  if self.parallel_ok:
1064  global _results
1065  _results = threading.Thread(target=_results_handler,
1066  args=[self])
1067  _results.setDaemon(True)
1068  _results.start()
1069  atexit.register(self._join_results_handler)
1070  StraceRunner.keep_temps = False # unsafe for parallel execution
1071 
1072  def echo(self, message):
1073  """ Print message, but only if builder is not in quiet mode. """
1074  if not self.quiet:
1075  print(message)
1076 
1077  def echo_command(self, command, echo=None):
1078  """ Show a command being executed. Also passed run's "echo" arg
1079  so you can override what's displayed.
1080  """
1081  if echo is not None:
1082  command = str(echo)
1083  self.echo(command)
1084 
1085  def echo_delete(self, filename, error=None):
1086  """ Show a file being deleted. For subclassing Builder and overriding
1087  this function, the exception is passed in if an OSError occurs
1088  while deleting a file. """
1089  if error is None:
1090  self.echo('deleting %s' % filename)
1091  else:
1092  self.echo_debug('error deleting %s: %s' % (filename, error.strerror))
1093 
1094  def echo_debug(self, message):
1095  """ Print message, but only if builder is in debug mode. """
1096  if self.debug:
1097  print('DEBUG: ' + message)
1098 
1099  def _run(self, *args, **kwargs):
1100  after = kwargs.pop('after', None)
1101  group = kwargs.pop('group', True)
1102  echo = kwargs.pop('echo', None)
1103  arglist = args_to_list(args)
1104  if not arglist:
1105  raise TypeError('run() takes at least 1 argument (0 given)')
1106  # we want a command line string for the .deps file key and for display
1107  command = subprocess.list2cmdline(arglist)
1108  if not self.cmdline_outofdate(command):
1109  if self.parallel_ok:
1110  _groups.ensure(group)
1111  return command, None, None
1112 
1113  # if just checking up-to-date-ness, set flag and do nothing more
1114  self.outofdate_flag = True
1115  if self.checking:
1116  if self.parallel_ok:
1117  _groups.ensure(group)
1118  return command, None, None
1119 
1120  # use runner to run command and collect dependencies
1121  self.echo_command(command, echo=echo)
1122  if self.parallel_ok:
1123  arglist.insert(0, self.runner)
1124  if after is not None:
1125  if not isinstance(after, (list, tuple)):
1126  after = [after]
1127  # This command is registered to False group firstly,
1128  # but the actual group of this command should
1129  # count this blocked command as well as usual commands
1130  _groups.inc_count_for_blocked(group)
1131  _groups.add(False,
1132  _after(after, _todo(group, command, arglist,
1133  kwargs)))
1134  else:
1135  async = _pool.apply_async(_call_strace, arglist, kwargs)
1136  _groups.add(group, _running(async, command))
1137  return None
1138  else:
1139  deps, outputs = self.runner(*arglist, **kwargs)
1140  return self.done(command, deps, outputs)
1141 
1142  def run(self, *args, **kwargs):
1143  """ Run command given in args with kwargs per shell(), but only if its
1144  dependencies or outputs have changed or don't exist. Return tuple
1145  of (command_line, deps_list, outputs_list) so caller or subclass
1146  can use them.
1147 
1148  Parallel operation keyword args "after" specifies a group or
1149  iterable of groups to wait for after they finish, "group" specifies
1150  the group to add this command to.
1151 
1152  Optional "echo" keyword arg is passed to echo_command() so you can
1153  override its output if you want.
1154  """
1155  try:
1156  return self._run(*args, **kwargs)
1157  finally:
1158  sys.stderr.flush()
1159  sys.stdout.flush()
1160 
1161  def done(self, command, deps, outputs):
1162  """ Store the results in the .deps file when they are available """
1163  if deps is not None or outputs is not None:
1164  deps_dict = {}
1165 
1166  # hash the dependency inputs and outputs
1167  for dep in deps:
1168  if dep in self.hash_cache:
1169  # already hashed so don't repeat hashing work
1170  hashed = self.hash_cache[dep]
1171  else:
1172  hashed = self.hasher(dep)
1173  if hashed is not None:
1174  deps_dict[dep] = "input-" + hashed
1175  # store hash in hash cache as it may be a new file
1176  self.hash_cache[dep] = hashed
1177 
1178  for output in outputs:
1179  hashed = self.hasher(output)
1180  if hashed is not None:
1181  deps_dict[output] = "output-" + hashed
1182  # update hash cache as this file should already be in
1183  # there but has probably changed
1184  self.hash_cache[output] = hashed
1185 
1186  self.deps[command] = deps_dict
1187 
1188  return command, deps, outputs
1189 
1190  def memoize(self, command, **kwargs):
1191  """ Run the given command, but only if its dependencies have changed --
1192  like run(), but returns the status code instead of raising an
1193  exception on error. If "command" is a string (as per memoize.py)
1194  it's split into args using shlex.split() in a POSIX/bash style,
1195  otherwise it's a list of args as per run().
1196 
1197  This function is for compatiblity with memoize.py and is
1198  deprecated. Use run() instead. """
1199  if isinstance(command, string_types):
1200  args = shlex.split(command)
1201  else:
1202  args = args_to_list(command)
1203  try:
1204  self.run(args, **kwargs)
1205  return 0
1206  except ExecutionError as exc:
1207  message, data, status = exc
1208  return status
1209 
1210  def outofdate(self, func):
1211  """ Return True if given build function is out of date. """
1212  self.checking = True
1213  self.outofdate_flag = False
1214  func()
1215  self.checking = False
1216  return self.outofdate_flag
1217 
1218  def cmdline_outofdate(self, command):
1219  """ Return True if given command line is out of date. """
1220  if command in self.deps:
1221  # command has been run before, see if deps have changed
1222  for dep, oldhash in self.deps[command].items():
1223  assert oldhash.startswith('input-') or \
1224  oldhash.startswith('output-'), \
1225  "%s file corrupt, do a clean!" % self.depsname
1226  io_type, oldhash = oldhash.split('-', 1)
1227 
1228  # make sure this dependency or output hasn't changed
1229  if dep in self.hash_cache:
1230  # already hashed so don't repeat hashing work
1231  newhash = self.hash_cache[dep]
1232  else:
1233  # not in hash_cache so make sure this dependency or
1234  # output hasn't changed
1235  newhash = self.hasher(dep)
1236  if newhash is not None:
1237  # Add newhash to the hash cache
1238  self.hash_cache[dep] = newhash
1239 
1240  if newhash is None:
1241  self.echo_debug("rebuilding %r, %s %s doesn't exist" %
1242  (command, io_type, dep))
1243  break
1244  if newhash != oldhash and (not self.inputs_only or io_type == 'input'):
1245  self.echo_debug("rebuilding %r, hash for %s %s (%s) != old hash (%s)" %
1246  (command, io_type, dep, newhash, oldhash))
1247  break
1248  else:
1249  # all dependencies are unchanged
1250  return False
1251  else:
1252  self.echo_debug('rebuilding %r, no dependency data' % command)
1253  # command has never been run, or one of the dependencies didn't
1254  # exist or had changed
1255  return True
1256 
1257  def autoclean(self):
1258  """ Automatically delete all outputs of this build as well as the .deps
1259  file. """
1260  # first build a list of all the outputs from the .deps file
1261  outputs = []
1262  dirs = []
1263  for command, deps in self.deps.items():
1264  outputs.extend(dep for dep, hashed in deps.items()
1265  if hashed.startswith('output-'))
1266  outputs.append(self.depsname)
1267  self._deps = None
1268  for output in outputs:
1269  try:
1270  os.remove(output)
1271  except OSError as e:
1272  if os.path.isdir(output):
1273  # cache directories to be removed once all other outputs
1274  # have been removed, as they may be content of the dir
1275  dirs.append(output)
1276  else:
1277  self.echo_delete(output, e)
1278  else:
1279  self.echo_delete(output)
1280  # delete the directories in reverse sort order
1281  # this ensures that parents are removed after children
1282  for dir in sorted(dirs, reverse=True):
1283  try:
1284  os.rmdir(dir)
1285  except OSError as e:
1286  self.echo_delete(dir, e)
1287  else:
1288  self.echo_delete(dir)
1289 
1290 
1291  @property
1292  def deps(self):
1293  """ Lazy load .deps file so that instantiating a Builder is "safe". """
1294  if not hasattr(self, '_deps') or self._deps is None:
1295  self.read_deps()
1296  atexit.register(self.write_deps, depsname=os.path.abspath(self.depsname))
1297  return self._deps
1298 
1299  def read_deps(self):
1300  """ Read dependency JSON file into deps object. """
1301  try:
1302  f = open(self.depsname)
1303  try:
1304  self._deps = json.load(f)
1305  # make sure the version is correct
1306  if self._deps.get('.deps_version', 0) != deps_version:
1307  printerr('Bad %s dependency file version! Rebuilding.'
1308  % self.depsname)
1309  self._deps = {}
1310  self._deps.pop('.deps_version', None)
1311  finally:
1312  f.close()
1313  except IOError:
1314  self._deps = {}
1315 
1316  def write_deps(self, depsname=None):
1317  """ Write out deps object into JSON dependency file. """
1318  if self._deps is None:
1319  return # we've cleaned so nothing to save
1320  self.deps['.deps_version'] = deps_version
1321  if depsname is None:
1322  depsname = self.depsname
1323  f = open(depsname, 'w')
1324  try:
1325  json.dump(self.deps, f, indent=4, sort_keys=True)
1326  finally:
1327  f.close()
1328  self._deps.pop('.deps_version', None)
1329 
1330  _runner_map = {
1331  'atimes_runner' : AtimesRunner,
1332  'strace_runner' : StraceRunner,
1333  'always_runner' : AlwaysRunner,
1334  'smart_runner' : SmartRunner,
1335  }
1336 
1337  def set_runner(self, runner):
1338  """Set the runner for this builder. "runner" is either a Runner
1339  subclass (e.g. SmartRunner), or a string selecting one of the
1340  standard runners ("atimes_runner", "strace_runner",
1341  "always_runner", or "smart_runner")."""
1342  try:
1343  self.runner = self._runner_map[runner](self)
1344  except KeyError:
1345  if isinstance(runner, string_types):
1346  # For backwards compatibility, allow runner to be the
1347  # name of a method in a derived class:
1348  self.runner = getattr(self, runner)
1349  else:
1350  # pass builder to runner class to get a runner instance
1351  self.runner = runner(self)
1352 
1353  def _is_relevant(self, fullname):
1354  """ Return True if file is in the dependency search directories. """
1355 
1356  # need to abspath to compare rel paths with abs
1357  fullname = os.path.abspath(fullname)
1358  for path in self.dirs:
1359  path = os.path.abspath(path)
1360  if fullname.startswith(path):
1361  rest = fullname[len(path):]
1362  # files in dirs starting with ignoreprefix are not relevant
1363  if os.sep+self.ignoreprefix in os.sep+os.path.dirname(rest):
1364  continue
1365  # files deeper than dirdepth are not relevant
1366  if rest.count(os.sep) > self.dirdepth:
1367  continue
1368  return True
1369  return False
1370 
1372  """Stops then joins the results handler thread"""
1373  _stop_results.set()
1374  _results.join()
1375 
1376 # default Builder instance, used by helper run() and main() helper functions
1377 default_builder = None
1378 default_command = 'build'
1379 
1380 # save the setup arguments for use by main()
1381 _setup_builder = None
1382 _setup_default = None
1383 _setup_kwargs = {}
1384 
1385 def setup(builder=None, default=None, **kwargs):
1386  """ NOTE: setup functionality is now in main(), setup() is kept for
1387  backward compatibility and should not be used in new scripts.
1388 
1389  Setup the default Builder (or an instance of given builder if "builder"
1390  is not None) with the same keyword arguments as for Builder().
1391  "default" is the name of the default function to run when the build
1392  script is run with no command line arguments. """
1393  global _setup_builder, _setup_default, _setup_kwargs
1394  _setup_builder = builder
1395  _setup_default = default
1396  _setup_kwargs = kwargs
1397 setup.__doc__ += '\n\n' + Builder.__init__.__doc__
1398 
1400  """ Set default builder to Builder() instance if it's not yet set. """
1401  global default_builder
1402  if default_builder is None:
1403  default_builder = Builder()
1404 
1405 def run(*args, **kwargs):
1406  """ Run the given command, but only if its dependencies have changed. Uses
1407  the default Builder. Return value as per Builder.run(). If there is
1408  only one positional argument which is an iterable treat each element
1409  as a command, returns a list of returns from Builder.run().
1410  """
1412  if len(args) == 1 and isinstance(args[0], (list, tuple)):
1413  return [default_builder.run(*a, **kwargs) for a in args[0]]
1414  return default_builder.run(*args, **kwargs)
1415 
1416 def after(*args):
1417  """ wait until after the specified command groups complete and return
1418  results, or None if not parallel """
1420  if getattr(default_builder, 'parallel_ok', False):
1421  if len(args) == 0:
1422  args = _groups.ids() # wait on all
1423  cond = threading.Condition()
1424  cond.acquire()
1425  a = _after(args, cond)
1426  _groups.add(False, a)
1427  cond.wait()
1428  if not a.done:
1429  sys.exit(1)
1430  results = []
1431  ids = _groups.ids()
1432  for a in args:
1433  if a in ids and a is not False:
1434  r = []
1435  for i in _groups.item_list(a):
1436  r.append(i.results)
1437  results.append((a,r))
1438  return results
1439  else:
1440  return None
1441 
1443  """ Automatically delete all outputs of the default build. """
1445  default_builder.autoclean()
1446 
1447 def memoize(command, **kwargs):
1449  return default_builder.memoize(command, **kwargs)
1450 
1451 memoize.__doc__ = Builder.memoize.__doc__
1452 
1453 def outofdate(command):
1454  """ Return True if given command is out of date and needs to be run. """
1456  return default_builder.outofdate(command)
1457 
1458 # save options for use by main() if parse_options called earlier by user script
1459 _parsed_options = None
1460 
1461 # default usage message
1462 _usage = '[options] build script functions to run'
1463 
1464 def parse_options(usage=_usage, extra_options=None, command_line=None):
1465  """ Parse command line options and return (parser, options, args). """
1466  parser = optparse.OptionParser(usage='Usage: %prog '+usage,
1467  version='%prog '+__version__)
1468  parser.disable_interspersed_args()
1469  parser.add_option('-t', '--time', action='store_true',
1470  help='use file modification times instead of MD5 sums')
1471  parser.add_option('-d', '--dir', action='append',
1472  help='add DIR to list of relevant directories')
1473  parser.add_option('-c', '--clean', action='store_true',
1474  help='autoclean build outputs before running')
1475  parser.add_option('-q', '--quiet', action='store_true',
1476  help="don't echo commands, only print errors")
1477  parser.add_option('-D', '--debug', action='store_true',
1478  help="show debug info (why commands are rebuilt)")
1479  parser.add_option('-k', '--keep', action='store_true',
1480  help='keep temporary strace output files')
1481  parser.add_option('-j', '--jobs', type='int',
1482  help='maximum number of parallel jobs')
1483  if extra_options:
1484  # add any user-specified options passed in via main()
1485  for option in extra_options:
1486  parser.add_option(option)
1487  if command_line is not None:
1488  options, args = parser.parse_args(command_line)
1489  else:
1490  options, args = parser.parse_args()
1491  global _parsed_options
1492  _parsed_options = (parser, options, args)
1493  return _parsed_options
1494 
1495 def fabricate_version(min=None, max=None):
1496  """ If min is given, assert that the running fabricate is at least that
1497  version or exit with an error message. If max is given, assert that
1498  the running fabricate is at most that version. Return the current
1499  fabricate version string. This function was introduced in v1.14;
1500  for prior versions, the version string is available only as module
1501  local string fabricate.__version__ """
1502 
1503  if min is not None and float(__version__) < min:
1504  sys.stderr.write(("fabricate is version %s. This build script "
1505  "requires at least version %.2f") % (__version__, min))
1506  sys.exit()
1507  if max is not None and float(__version__) > max:
1508  sys.stderr.write(("fabricate is version %s. This build script "
1509  "requires at most version %.2f") % (__version__, max))
1510  sys.exit()
1511  return __version__
1512 
1513 def main(globals_dict=None, build_dir=None, extra_options=None, builder=None,
1514  default=None, jobs=1, command_line=None, **kwargs):
1515  """ Run the default function or the function(s) named in the command line
1516  arguments. Call this at the end of your build script. If one of the
1517  functions returns nonzero, main will exit with the last nonzero return
1518  value as its status code.
1519 
1520  "builder" is the class of builder to create, default (None) is the
1521  normal builder
1522  "command_line" is an optional list of command line arguments that can
1523  be used to prevent the default parsing of sys.argv. Used to intercept
1524  and modify the command line passed to the build script.
1525  "default" is the default user script function to call, None = 'build'
1526  "extra_options" is an optional list of options created with
1527  optparse.make_option(). The pseudo-global variable main.options
1528  is set to the parsed options list.
1529  "kwargs" is any other keyword arguments to pass to the builder """
1530  global default_builder, default_command, _pool
1531 
1532  kwargs.update(_setup_kwargs)
1533  if _parsed_options is not None and command_line is None:
1534  parser, options, actions = _parsed_options
1535  else:
1536  parser, options, actions = parse_options(extra_options=extra_options, command_line=command_line)
1537  kwargs['quiet'] = options.quiet
1538  kwargs['debug'] = options.debug
1539  if options.time:
1540  kwargs['hasher'] = mtime_hasher
1541  if options.dir:
1542  kwargs['dirs'] = options.dir
1543  if options.keep:
1544  StraceRunner.keep_temps = options.keep
1545  main.options = options
1546  if options.jobs is not None:
1547  jobs = options.jobs
1548  if default is not None:
1549  default_command = default
1550  if default_command is None:
1551  default_command = _setup_default
1552  if not actions:
1553  actions = [default_command]
1554 
1555  original_path = os.getcwd()
1556  if None in [globals_dict, build_dir]:
1557  try:
1558  frame = sys._getframe(1)
1559  except:
1560  printerr("Your Python version doesn't support sys._getframe(1),")
1561  printerr("call main(globals(), build_dir) explicitly")
1562  sys.exit(1)
1563  if globals_dict is None:
1564  globals_dict = frame.f_globals
1565  if build_dir is None:
1566  build_file = frame.f_globals.get('__file__', None)
1567  if build_file:
1568  build_dir = os.path.dirname(build_file)
1569  if build_dir:
1570  if not options.quiet and os.path.abspath(build_dir) != original_path:
1571  print("Entering directory '%s'" % build_dir)
1572  os.chdir(build_dir)
1573  if _pool is None and jobs > 1:
1574  _pool = multiprocessing.Pool(jobs)
1575 
1576  use_builder = Builder
1577  if _setup_builder is not None:
1578  use_builder = _setup_builder
1579  if builder is not None:
1580  use_builder = builder
1581  default_builder = use_builder(**kwargs)
1582 
1583  if options.clean:
1584  default_builder.autoclean()
1585 
1586  status = 0
1587  try:
1588  for action in actions:
1589  if '(' not in action:
1590  action = action.strip() + '()'
1591  name = action.split('(')[0].split('.')[0]
1592  if name in globals_dict:
1593  this_status = eval(action, globals_dict)
1594  if this_status:
1595  status = int(this_status)
1596  else:
1597  printerr('%r command not defined!' % action)
1598  sys.exit(1)
1599  after() # wait till the build commands are finished
1600  except ExecutionError as exc:
1601  message, data, status = exc.args
1602  printerr('fabricate: ' + message)
1603  finally:
1604  _stop_results.set() # stop the results gatherer so I don't hang
1605  if not options.quiet and os.path.abspath(build_dir) != original_path:
1606  print("Leaving directory '%s' back to '%s'" % (build_dir, original_path))
1607  os.chdir(original_path)
1608  sys.exit(status)
1609 
1610 if __name__ == '__main__':
1611  # if called as a script, emulate memoize.py -- run() command line
1612  parser, options, args = parse_options('[options] command line to run')
1613  status = 0
1614  if args:
1615  status = memoize(args)
1616  elif not options.clean:
1617  parser.print_help()
1618  status = 1
1619  # autoclean may have been used
1620  sys.exit(status)
def _results_handler(builder, delay=0.01)
Definition: fabricate.py:917
void split(double tt, double *fr)
def file_has_atimes(filename)
Definition: fabricate.py:275
def echo(self, message)
Definition: fabricate.py:1072
def memoize(self, command, kwargs)
Definition: fabricate.py:1190
def get_count(self, id)
Definition: fabricate.py:852
def inc_count_for_blocked(self, id)
Definition: fabricate.py:879
def outofdate(command)
Definition: fabricate.py:1453
def remove_item(self, id, val)
Definition: fabricate.py:834
def echo_command(self, command, echo=None)
Definition: fabricate.py:1077
def md5_hasher(filename)
Definition: fabricate.py:211
def after(args)
Definition: fabricate.py:1416
def done(self, command, deps, outputs)
Definition: fabricate.py:1161
def printerr(message)
Definition: fabricate.py:118
def __init__(self, runner=None, dirs=None, dirdepth=100, ignoreprefix='.', ignore=None, hasher=md5_hasher, depsname='.deps', quiet=False, debug=False, inputs_only=False, parallel_ok=False)
Definition: fabricate.py:1001
def _call_strace(self, args, kwargs)
Definition: fabricate.py:499
def memoize(command, kwargs)
Definition: fabricate.py:1447
def read_deps(self)
Definition: fabricate.py:1299
def has_atimes(paths)
Definition: fabricate.py:333
dictionary _runner_map
Definition: fabricate.py:1330
def __init__(self, cwd='.', delayed=False)
Definition: fabricate.py:479
def add(self, id, val)
Definition: fabricate.py:838
def remove(self, id)
Definition: fabricate.py:829
def _run(self, args, kwargs)
Definition: fabricate.py:1099
def add_dep(self, dep)
Definition: fabricate.py:486
def echo_delete(self, filename, error=None)
Definition: fabricate.py:1085
def cmdline_outofdate(self, command)
Definition: fabricate.py:1218
def __call__(self, args, kwargs)
Definition: fabricate.py:251
def __init__(self, afters, do)
Definition: fabricate.py:796
def autoclean(self)
Definition: fabricate.py:1257
def __init__(self)
Definition: fabricate.py:820
def shell(args, kwargs)
Definition: fabricate.py:143
def setup(builder=None, default=None, kwargs)
Definition: fabricate.py:1385
def _set_default_builder()
Definition: fabricate.py:1399
Definition: novas.h:112
def parse_options(usage=_usage, extra_options=None, command_line=None)
Definition: fabricate.py:1464
def deps(self)
Definition: fabricate.py:1292
def dec_count(self, id)
Definition: fabricate.py:858
def add_delayed_line(self, line)
Definition: fabricate.py:492
def __init__(self, async, command)
Definition: fabricate.py:787
def set_ok(self, id, to)
Definition: fabricate.py:870
def __call__(self, args, kwargs)
Definition: fabricate.py:781
def load(self, f)
Definition: fabricate.py:112
def __init__(self, builder)
Definition: fabricate.py:754
def ignore(self, name)
Definition: fabricate.py:263
def set_runner(self, runner)
Definition: fabricate.py:1337
def _age_atimes(self, filetimes)
Definition: fabricate.py:408
def __init__(self, builder, build_dir=None)
Definition: fabricate.py:506
def mtime_hasher(filename)
Definition: fabricate.py:237
bool print
def __call__(self, args, kwargs)
Definition: fabricate.py:718
def __init__(self, group, command, arglist, kwargs)
Definition: fabricate.py:911
def add_for_blocked(self, id, val)
Definition: fabricate.py:886
double func(double x, double y)
def ids(self)
Definition: fabricate.py:874
def _join_results_handler(self)
Definition: fabricate.py:1371
def write_deps(self, depsname=None)
Definition: fabricate.py:1316
procfile open("FD_BRL_v0.txt")
def actual_runner(self)
Definition: fabricate.py:778
static float min(const float a, const float b, const float c)
Definition: absgeo.cxx:45
def _do_strace(self, args, kwargs, outfile, outname)
Definition: fabricate.py:556
cet::coded_exception< errors::ErrorCodes, ExceptionDetail::translate > Exception
Definition: Exception.h:66
def dump(self, obj, f, indent=None, sort_keys=None)
Definition: fabricate.py:114
def fabricate_version(min=None, max=None)
Definition: fabricate.py:1495
def _is_relevant(self, fullname)
Definition: fabricate.py:1353
def _matching_is_delayed(self, processes, pid, line)
Definition: fabricate.py:706
def echo_debug(self, message)
Definition: fabricate.py:1094
def _file_times(self, path, depth)
Definition: fabricate.py:361
def ensure(self, id)
Definition: fabricate.py:846
def _utime(self, filename, atime, mtime)
Definition: fabricate.py:398
def get_ok(self, id)
Definition: fabricate.py:866
def add_output(self, output)
Definition: fabricate.py:489
def autoclean()
Definition: fabricate.py:1442
def __getattr__(self, name)
Definition: fabricate.py:49
def __call__(self, args, kwargs)
Definition: fabricate.py:421
def outofdate(self, func)
Definition: fabricate.py:1210
def __init__(self, val=None)
Definition: fabricate.py:809
def _shell(args, input=None, silent=True, shell=False, ignore_status=False, kwargs)
Definition: fabricate.py:176
def args_to_list(args)
Definition: fabricate.py:129
def run(self, args, kwargs)
Definition: fabricate.py:1142
def run(args, kwargs)
Definition: fabricate.py:1405
Double_t sum
Definition: plot.C:31
def __call__(self, args, kwargs)
Definition: fabricate.py:757
def __init__(self, builder)
Definition: fabricate.py:768
def __init__(self, builder)
Definition: fabricate.py:267
def item_list(self, id)
Definition: fabricate.py:824
def main(globals_dict=None, build_dir=None, extra_options=None, builder=None, default=None, jobs=1, command_line=None, kwargs)
Definition: fabricate.py:1514
def actual_runner(self)
Definition: fabricate.py:259
def _match_line(self, line, processes, unfinished)
Definition: fabricate.py:590