Package SCons :: Package Node :: Module FS
[hide private]
[frames] | no frames]

Source Code for Module SCons.Node.FS

   1  """scons.Node.FS 
   2   
   3  File system nodes. 
   4   
   5  These Nodes represent the canonical external objects that people think 
   6  of when they think of building software: files and directories. 
   7   
   8  This holds a "default_fs" variable that should be initialized with an FS 
   9  that can be used by scripts or modules looking for the canonical default. 
  10   
  11  """ 
  12   
  13  # 
  14  # Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010 The SCons Foundation 
  15  # 
  16  # Permission is hereby granted, free of charge, to any person obtaining 
  17  # a copy of this software and associated documentation files (the 
  18  # "Software"), to deal in the Software without restriction, including 
  19  # without limitation the rights to use, copy, modify, merge, publish, 
  20  # distribute, sublicense, and/or sell copies of the Software, and to 
  21  # permit persons to whom the Software is furnished to do so, subject to 
  22  # the following conditions: 
  23  # 
  24  # The above copyright notice and this permission notice shall be included 
  25  # in all copies or substantial portions of the Software. 
  26  # 
  27  # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY 
  28  # KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE 
  29  # WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND 
  30  # NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE 
  31  # LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION 
  32  # OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION 
  33  # WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. 
  34  # 
  35   
  36  __revision__ = "src/engine/SCons/Node/FS.py 5110 2010/07/25 16:14:38 bdeegan" 
  37   
  38  from itertools import izip 
  39  import cStringIO 
  40  import fnmatch 
  41  import os 
  42  import os.path 
  43  import re 
  44  import shutil 
  45  import stat 
  46  import string 
  47  import sys 
  48  import time 
  49   
  50  try: 
  51      import codecs 
  52  except ImportError: 
  53      pass 
  54  else: 
  55      # TODO(2.2):  Remove when 2.3 becomes the minimal supported version. 
  56      try: 
  57          codecs.BOM_UTF8 
  58      except AttributeError: 
  59          codecs.BOM_UTF8 = '\xef\xbb\xbf' 
  60      try: 
  61          codecs.BOM_UTF16_LE 
  62          codecs.BOM_UTF16_BE 
  63      except AttributeError: 
  64          codecs.BOM_UTF16_LE = '\xff\xfe' 
  65          codecs.BOM_UTF16_BE = '\xfe\xff' 
  66   
  67      # Provide a wrapper function to handle decoding differences in 
  68      # different versions of Python.  Normally, we'd try to do this in the 
  69      # compat layer (and maybe it still makes sense to move there?) but 
  70      # that doesn't provide a way to supply the string class used in 
  71      # pre-2.3 Python versions with a .decode() method that all strings 
  72      # naturally have.  Plus, the 2.[01] encodings behave differently 
  73      # enough that we have to settle for a lowest-common-denominator 
  74      # wrapper approach. 
  75      # 
  76      # Note that the 2.[012] implementations below may be inefficient 
  77      # because they perform an explicit look up of the encoding for every 
  78      # decode, but they're old enough (and we want to stop supporting 
  79      # them soon enough) that it's not worth complicating the interface. 
  80      # Think of it as additional incentive for people to upgrade... 
  81      try: 
  82          ''.decode 
  83      except AttributeError: 
  84          # 2.0 through 2.2:  strings have no .decode() method 
  85          try: 
  86              codecs.lookup('ascii').decode 
  87          except AttributeError: 
  88              # 2.0 and 2.1:  encodings are a tuple of functions, and the 
  89              # decode() function returns a (result, length) tuple. 
90 - def my_decode(contents, encoding):
91 return codecs.lookup(encoding)[1](contents)[0]
92 else: 93 # 2.2: encodings are an object with methods, and the 94 # .decode() method returns just the decoded bytes.
95 - def my_decode(contents, encoding):
96 return codecs.lookup(encoding).decode(contents)
97 else: 98 # 2.3 or later: use the .decode() string method
99 - def my_decode(contents, encoding):
100 return contents.decode(encoding)
101 102 import SCons.Action 103 from SCons.Debug import logInstanceCreation 104 import SCons.Errors 105 import SCons.Memoize 106 import SCons.Node 107 import SCons.Node.Alias 108 import SCons.Subst 109 import SCons.Util 110 import SCons.Warnings 111 112 from SCons.Debug import Trace 113 114 do_store_info = True 115 116
117 -class EntryProxyAttributeError(AttributeError):
118 """ 119 An AttributeError subclass for recording and displaying the name 120 of the underlying Entry involved in an AttributeError exception. 121 """
122 - def __init__(self, entry_proxy, attribute):
123 AttributeError.__init__(self) 124 self.entry_proxy = entry_proxy 125 self.attribute = attribute
126 - def __str__(self):
127 entry = self.entry_proxy.get() 128 fmt = "%s instance %s has no attribute %s" 129 return fmt % (entry.__class__.__name__, 130 repr(entry.name), 131 repr(self.attribute))
132 133 # The max_drift value: by default, use a cached signature value for 134 # any file that's been untouched for more than two days. 135 default_max_drift = 2*24*60*60 136 137 # 138 # We stringify these file system Nodes a lot. Turning a file system Node 139 # into a string is non-trivial, because the final string representation 140 # can depend on a lot of factors: whether it's a derived target or not, 141 # whether it's linked to a repository or source directory, and whether 142 # there's duplication going on. The normal technique for optimizing 143 # calculations like this is to memoize (cache) the string value, so you 144 # only have to do the calculation once. 145 # 146 # A number of the above factors, however, can be set after we've already 147 # been asked to return a string for a Node, because a Repository() or 148 # VariantDir() call or the like may not occur until later in SConscript 149 # files. So this variable controls whether we bother trying to save 150 # string values for Nodes. The wrapper interface can set this whenever 151 # they're done mucking with Repository and VariantDir and the other stuff, 152 # to let this module know it can start returning saved string values 153 # for Nodes. 154 # 155 Save_Strings = None 156
157 -def save_strings(val):
158 global Save_Strings 159 Save_Strings = val
160 161 # 162 # Avoid unnecessary function calls by recording a Boolean value that 163 # tells us whether or not os.path.splitdrive() actually does anything 164 # on this system, and therefore whether we need to bother calling it 165 # when looking up path names in various methods below. 166 # 167 168 do_splitdrive = None 169
170 -def initialize_do_splitdrive():
171 global do_splitdrive 172 drive, path = os.path.splitdrive('X:/foo') 173 do_splitdrive = not not drive
174 175 initialize_do_splitdrive() 176 177 # 178 179 needs_normpath_check = None 180
181 -def initialize_normpath_check():
182 """ 183 Initialize the normpath_check regular expression. 184 185 This function is used by the unit tests to re-initialize the pattern 186 when testing for behavior with different values of os.sep. 187 """ 188 global needs_normpath_check 189 if os.sep == '/': 190 pattern = r'.*/|\.$|\.\.$' 191 else: 192 pattern = r'.*[/%s]|\.$|\.\.$' % re.escape(os.sep) 193 needs_normpath_check = re.compile(pattern)
194 195 initialize_normpath_check() 196 197 # 198 # SCons.Action objects for interacting with the outside world. 199 # 200 # The Node.FS methods in this module should use these actions to 201 # create and/or remove files and directories; they should *not* use 202 # os.{link,symlink,unlink,mkdir}(), etc., directly. 203 # 204 # Using these SCons.Action objects ensures that descriptions of these 205 # external activities are properly displayed, that the displays are 206 # suppressed when the -s (silent) option is used, and (most importantly) 207 # the actions are disabled when the the -n option is used, in which case 208 # there should be *no* changes to the external file system(s)... 209 # 210 211 if hasattr(os, 'link'): 224 else: 225 _hardlink_func = None 226 227 if hasattr(os, 'symlink'): 230 else: 231 _softlink_func = None 232
233 -def _copy_func(fs, src, dest):
234 shutil.copy2(src, dest) 235 st = fs.stat(src) 236 fs.chmod(dest, stat.S_IMODE(st[stat.ST_MODE]) | stat.S_IWRITE)
237 238 239 Valid_Duplicates = ['hard-soft-copy', 'soft-hard-copy', 240 'hard-copy', 'soft-copy', 'copy'] 241 242 Link_Funcs = [] # contains the callables of the specified duplication style 243
244 -def set_duplicate(duplicate):
245 # Fill in the Link_Funcs list according to the argument 246 # (discarding those not available on the platform). 247 248 # Set up the dictionary that maps the argument names to the 249 # underlying implementations. We do this inside this function, 250 # not in the top-level module code, so that we can remap os.link 251 # and os.symlink for testing purposes. 252 link_dict = { 253 'hard' : _hardlink_func, 254 'soft' : _softlink_func, 255 'copy' : _copy_func 256 } 257 258 if not duplicate in Valid_Duplicates: 259 raise SCons.Errors.InternalError, ("The argument of set_duplicate " 260 "should be in Valid_Duplicates") 261 global Link_Funcs 262 Link_Funcs = [] 263 for func in string.split(duplicate,'-'): 264 if link_dict[func]: 265 Link_Funcs.append(link_dict[func])
266
267 -def LinkFunc(target, source, env):
268 # Relative paths cause problems with symbolic links, so 269 # we use absolute paths, which may be a problem for people 270 # who want to move their soft-linked src-trees around. Those 271 # people should use the 'hard-copy' mode, softlinks cannot be 272 # used for that; at least I have no idea how ... 273 src = source[0].abspath 274 dest = target[0].abspath 275 dir, file = os.path.split(dest) 276 if dir and not target[0].fs.isdir(dir): 277 os.makedirs(dir) 278 if not Link_Funcs: 279 # Set a default order of link functions. 280 set_duplicate('hard-soft-copy') 281 fs = source[0].fs 282 # Now link the files with the previously specified order. 283 for func in Link_Funcs: 284 try: 285 func(fs, src, dest) 286 break 287 except (IOError, OSError): 288 # An OSError indicates something happened like a permissions 289 # problem or an attempt to symlink across file-system 290 # boundaries. An IOError indicates something like the file 291 # not existing. In either case, keeping trying additional 292 # functions in the list and only raise an error if the last 293 # one failed. 294 if func == Link_Funcs[-1]: 295 # exception of the last link method (copy) are fatal 296 raise 297 return 0
298 299 Link = SCons.Action.Action(LinkFunc, None)
300 -def LocalString(target, source, env):
301 return 'Local copy of %s from %s' % (target[0], source[0])
302 303 LocalCopy = SCons.Action.Action(LinkFunc, LocalString) 304
305 -def UnlinkFunc(target, source, env):
306 t = target[0] 307 t.fs.unlink(t.abspath) 308 return 0
309 310 Unlink = SCons.Action.Action(UnlinkFunc, None) 311
312 -def MkdirFunc(target, source, env):
313 t = target[0] 314 if not t.exists(): 315 t.fs.mkdir(t.abspath) 316 return 0
317 318 Mkdir = SCons.Action.Action(MkdirFunc, None, presub=None) 319 320 MkdirBuilder = None 321
322 -def get_MkdirBuilder():
323 global MkdirBuilder 324 if MkdirBuilder is None: 325 import SCons.Builder 326 import SCons.Defaults 327 # "env" will get filled in by Executor.get_build_env() 328 # calling SCons.Defaults.DefaultEnvironment() when necessary. 329 MkdirBuilder = SCons.Builder.Builder(action = Mkdir, 330 env = None, 331 explain = None, 332 is_explicit = None, 333 target_scanner = SCons.Defaults.DirEntryScanner, 334 name = "MkdirBuilder") 335 return MkdirBuilder
336
337 -class _Null:
338 pass
339 340 _null = _Null() 341 342 DefaultSCCSBuilder = None 343 DefaultRCSBuilder = None 344
345 -def get_DefaultSCCSBuilder():
346 global DefaultSCCSBuilder 347 if DefaultSCCSBuilder is None: 348 import SCons.Builder 349 # "env" will get filled in by Executor.get_build_env() 350 # calling SCons.Defaults.DefaultEnvironment() when necessary. 351 act = SCons.Action.Action('$SCCSCOM', '$SCCSCOMSTR') 352 DefaultSCCSBuilder = SCons.Builder.Builder(action = act, 353 env = None, 354 name = "DefaultSCCSBuilder") 355 return DefaultSCCSBuilder
356
357 -def get_DefaultRCSBuilder():
358 global DefaultRCSBuilder 359 if DefaultRCSBuilder is None: 360 import SCons.Builder 361 # "env" will get filled in by Executor.get_build_env() 362 # calling SCons.Defaults.DefaultEnvironment() when necessary. 363 act = SCons.Action.Action('$RCS_COCOM', '$RCS_COCOMSTR') 364 DefaultRCSBuilder = SCons.Builder.Builder(action = act, 365 env = None, 366 name = "DefaultRCSBuilder") 367 return DefaultRCSBuilder
368 369 # Cygwin's os.path.normcase pretends it's on a case-sensitive filesystem. 370 _is_cygwin = sys.platform == "cygwin" 371 if os.path.normcase("TeSt") == os.path.normpath("TeSt") and not _is_cygwin:
372 - def _my_normcase(x):
373 return x
374 else:
375 - def _my_normcase(x):
376 return string.upper(x)
377 378 379
380 -class DiskChecker:
381 - def __init__(self, type, do, ignore):
382 self.type = type 383 self.do = do 384 self.ignore = ignore 385 self.set_do()
386 - def set_do(self):
387 self.__call__ = self.do
388 - def set_ignore(self):
389 self.__call__ = self.ignore
390 - def set(self, list):
391 if self.type in list: 392 self.set_do() 393 else: 394 self.set_ignore()
395
396 -def do_diskcheck_match(node, predicate, errorfmt):
397 result = predicate() 398 try: 399 # If calling the predicate() cached a None value from stat(), 400 # remove it so it doesn't interfere with later attempts to 401 # build this Node as we walk the DAG. (This isn't a great way 402 # to do this, we're reaching into an interface that doesn't 403 # really belong to us, but it's all about performance, so 404 # for now we'll just document the dependency...) 405 if node._memo['stat'] is None: 406 del node._memo['stat'] 407 except (AttributeError, KeyError): 408 pass 409 if result: 410 raise TypeError, errorfmt % node.abspath
411
412 -def ignore_diskcheck_match(node, predicate, errorfmt):
413 pass
414
415 -def do_diskcheck_rcs(node, name):
416 try: 417 rcs_dir = node.rcs_dir 418 except AttributeError: 419 if node.entry_exists_on_disk('RCS'): 420 rcs_dir = node.Dir('RCS') 421 else: 422 rcs_dir = None 423 node.rcs_dir = rcs_dir 424 if rcs_dir: 425 return rcs_dir.entry_exists_on_disk(name+',v') 426 return None
427
428 -def ignore_diskcheck_rcs(node, name):
429 return None
430
431 -def do_diskcheck_sccs(node, name):
432 try: 433 sccs_dir = node.sccs_dir 434 except AttributeError: 435 if node.entry_exists_on_disk('SCCS'): 436 sccs_dir = node.Dir('SCCS') 437 else: 438 sccs_dir = None 439 node.sccs_dir = sccs_dir 440 if sccs_dir: 441 return sccs_dir.entry_exists_on_disk('s.'+name) 442 return None
443
444 -def ignore_diskcheck_sccs(node, name):
445 return None
446 447 diskcheck_match = DiskChecker('match', do_diskcheck_match, ignore_diskcheck_match) 448 diskcheck_rcs = DiskChecker('rcs', do_diskcheck_rcs, ignore_diskcheck_rcs) 449 diskcheck_sccs = DiskChecker('sccs', do_diskcheck_sccs, ignore_diskcheck_sccs) 450 451 diskcheckers = [ 452 diskcheck_match, 453 diskcheck_rcs, 454 diskcheck_sccs, 455 ] 456
457 -def set_diskcheck(list):
458 for dc in diskcheckers: 459 dc.set(list)
460
461 -def diskcheck_types():
462 return map(lambda dc: dc.type, diskcheckers)
463 464 465
466 -class EntryProxy(SCons.Util.Proxy):
467 - def __get_abspath(self):
468 entry = self.get() 469 return SCons.Subst.SpecialAttrWrapper(entry.get_abspath(), 470 entry.name + "_abspath")
471
472 - def __get_filebase(self):
473 name = self.get().name 474 return SCons.Subst.SpecialAttrWrapper(SCons.Util.splitext(name)[0], 475 name + "_filebase")
476
477 - def __get_suffix(self):
478 name = self.get().name 479 return SCons.Subst.SpecialAttrWrapper(SCons.Util.splitext(name)[1], 480 name + "_suffix")
481
482 - def __get_file(self):
483 name = self.get().name 484 return SCons.Subst.SpecialAttrWrapper(name, name + "_file")
485
486 - def __get_base_path(self):
487 """Return the file's directory and file name, with the 488 suffix stripped.""" 489 entry = self.get() 490 return SCons.Subst.SpecialAttrWrapper(SCons.Util.splitext(entry.get_path())[0], 491 entry.name + "_base")
492
493 - def __get_posix_path(self):
494 """Return the path with / as the path separator, 495 regardless of platform.""" 496 if os.sep == '/': 497 return self 498 else: 499 entry = self.get() 500 r = string.replace(entry.get_path(), os.sep, '/') 501 return SCons.Subst.SpecialAttrWrapper(r, entry.name + "_posix")
502
503 - def __get_windows_path(self):
504 """Return the path with \ as the path separator, 505 regardless of platform.""" 506 if os.sep == '\\': 507 return self 508 else: 509 entry = self.get() 510 r = string.replace(entry.get_path(), os.sep, '\\') 511 return SCons.Subst.SpecialAttrWrapper(r, entry.name + "_windows")
512
513 - def __get_srcnode(self):
514 return EntryProxy(self.get().srcnode())
515
516 - def __get_srcdir(self):
517 """Returns the directory containing the source node linked to this 518 node via VariantDir(), or the directory of this node if not linked.""" 519 return EntryProxy(self.get().srcnode().dir)
520
521 - def __get_rsrcnode(self):
522 return EntryProxy(self.get().srcnode().rfile())
523
524 - def __get_rsrcdir(self):
525 """Returns the directory containing the source node linked to this 526 node via VariantDir(), or the directory of this node if not linked.""" 527 return EntryProxy(self.get().srcnode().rfile().dir)
528
529 - def __get_dir(self):
530 return EntryProxy(self.get().dir)
531 532 dictSpecialAttrs = { "base" : __get_base_path, 533 "posix" : __get_posix_path, 534 "windows" : __get_windows_path, 535 "win32" : __get_windows_path, 536 "srcpath" : __get_srcnode, 537 "srcdir" : __get_srcdir, 538 "dir" : __get_dir, 539 "abspath" : __get_abspath, 540 "filebase" : __get_filebase, 541 "suffix" : __get_suffix, 542 "file" : __get_file, 543 "rsrcpath" : __get_rsrcnode, 544 "rsrcdir" : __get_rsrcdir, 545 } 546
547 - def __getattr__(self, name):
548 # This is how we implement the "special" attributes 549 # such as base, posix, srcdir, etc. 550 try: 551 attr_function = self.dictSpecialAttrs[name] 552 except KeyError: 553 try: 554 attr = SCons.Util.Proxy.__getattr__(self, name) 555 except AttributeError, e: 556 # Raise our own AttributeError subclass with an 557 # overridden __str__() method that identifies the 558 # name of the entry that caused the exception. 559 raise EntryProxyAttributeError(self, name) 560 return attr 561 else: 562 return attr_function(self)
563
564 -class Base(SCons.Node.Node):
565 """A generic class for file system entries. This class is for 566 when we don't know yet whether the entry being looked up is a file 567 or a directory. Instances of this class can morph into either 568 Dir or File objects by a later, more precise lookup. 569 570 Note: this class does not define __cmp__ and __hash__ for 571 efficiency reasons. SCons does a lot of comparing of 572 Node.FS.{Base,Entry,File,Dir} objects, so those operations must be 573 as fast as possible, which means we want to use Python's built-in 574 object identity comparisons. 575 """ 576 577 memoizer_counters = [] 578
579 - def __init__(self, name, directory, fs):
580 """Initialize a generic Node.FS.Base object. 581 582 Call the superclass initialization, take care of setting up 583 our relative and absolute paths, identify our parent 584 directory, and indicate that this node should use 585 signatures.""" 586 if __debug__: logInstanceCreation(self, 'Node.FS.Base') 587 SCons.Node.Node.__init__(self) 588 589 # Filenames and paths are probably reused and are intern'ed to 590 # save some memory. 591 self.name = SCons.Util.silent_intern(name) 592 self.suffix = SCons.Util.silent_intern(SCons.Util.splitext(name)[1]) 593 self.fs = fs 594 595 assert directory, "A directory must be provided" 596 597 self.abspath = SCons.Util.silent_intern(directory.entry_abspath(name)) 598 self.labspath = SCons.Util.silent_intern(directory.entry_labspath(name)) 599 if directory.path == '.': 600 self.path = SCons.Util.silent_intern(name) 601 else: 602 self.path = SCons.Util.silent_intern(directory.entry_path(name)) 603 if directory.tpath == '.': 604 self.tpath = SCons.Util.silent_intern(name) 605 else: 606 self.tpath = SCons.Util.silent_intern(directory.entry_tpath(name)) 607 self.path_elements = directory.path_elements + [self] 608 609 self.dir = directory 610 self.cwd = None # will hold the SConscript directory for target nodes 611 self.duplicate = directory.duplicate
612
613 - def str_for_display(self):
614 return '"' + self.__str__() + '"'
615
616 - def must_be_same(self, klass):
617 """ 618 This node, which already existed, is being looked up as the 619 specified klass. Raise an exception if it isn't. 620 """ 621 if isinstance(self, klass) or klass is Entry: 622 return 623 raise TypeError, "Tried to lookup %s '%s' as a %s." %\ 624 (self.__class__.__name__, self.path, klass.__name__)
625
626 - def get_dir(self):
627 return self.dir
628
629 - def get_suffix(self):
630 return self.suffix
631
632 - def rfile(self):
633 return self
634
635 - def __str__(self):
636 """A Node.FS.Base object's string representation is its path 637 name.""" 638 global Save_Strings 639 if Save_Strings: 640 return self._save_str() 641 return self._get_str()
642 643 memoizer_counters.append(SCons.Memoize.CountValue('_save_str')) 644
645 - def _save_str(self):
646 try: 647 return self._memo['_save_str'] 648 except KeyError: 649 pass 650 result = intern(self._get_str()) 651 self._memo['_save_str'] = result 652 return result
653
654 - def _get_str(self):
655 global Save_Strings 656 if self.duplicate or self.is_derived(): 657 return self.get_path() 658 srcnode = self.srcnode() 659 if srcnode.stat() is None and self.stat() is not None: 660 result = self.get_path() 661 else: 662 result = srcnode.get_path() 663 if not Save_Strings: 664 # We're not at the point where we're saving the string string 665 # representations of FS Nodes (because we haven't finished 666 # reading the SConscript files and need to have str() return 667 # things relative to them). That also means we can't yet 668 # cache values returned (or not returned) by stat(), since 669 # Python code in the SConscript files might still create 670 # or otherwise affect the on-disk file. So get rid of the 671 # values that the underlying stat() method saved. 672 try: del self._memo['stat'] 673 except KeyError: pass 674 if self is not srcnode: 675 try: del srcnode._memo['stat'] 676 except KeyError: pass 677 return result
678 679 rstr = __str__ 680 681 memoizer_counters.append(SCons.Memoize.CountValue('stat')) 682
683 - def stat(self):
684 try: return self._memo['stat'] 685 except KeyError: pass 686 try: result = self.fs.stat(self.abspath) 687 except os.error: result = None 688 self._memo['stat'] = result 689 return result
690
691 - def exists(self):
692 return self.stat() is not None
693
694 - def rexists(self):
695 return self.rfile().exists()
696
697 - def getmtime(self):
698 st = self.stat() 699 if st: return st[stat.ST_MTIME] 700 else: return None
701
702 - def getsize(self):
703 st = self.stat() 704 if st: return st[stat.ST_SIZE] 705 else: return None
706
707 - def isdir(self):
708 st = self.stat() 709 return st is not None and stat.S_ISDIR(st[stat.ST_MODE])
710
711 - def isfile(self):
712 st = self.stat() 713 return st is not None and stat.S_ISREG(st[stat.ST_MODE])
714 715 if hasattr(os, 'symlink'): 720 else: 723
724 - def is_under(self, dir):
725 if self is dir: 726 return 1 727 else: 728 return self.dir.is_under(dir)
729
730 - def set_local(self):
731 self._local = 1
732
733 - def srcnode(self):
734 """If this node is in a build path, return the node 735 corresponding to its source file. Otherwise, return 736 ourself. 737 """ 738 srcdir_list = self.dir.srcdir_list() 739 if srcdir_list: 740 srcnode = srcdir_list[0].Entry(self.name) 741 srcnode.must_be_same(self.__class__) 742 return srcnode 743 return self
744
745 - def get_path(self, dir=None):
746 """Return path relative to the current working directory of the 747 Node.FS.Base object that owns us.""" 748 if not dir: 749 dir = self.fs.getcwd() 750 if self == dir: 751 return '.' 752 path_elems = self.path_elements 753 try: i = path_elems.index(dir) 754 except ValueError: pass 755 else: path_elems = path_elems[i+1:] 756 path_elems = map(lambda n: n.name, path_elems) 757 return string.join(path_elems, os.sep)
758
759 - def set_src_builder(self, builder):
760 """Set the source code builder for this node.""" 761 self.sbuilder = builder 762 if not self.has_builder(): 763 self.builder_set(builder)
764
765 - def src_builder(self):
766 """Fetch the source code builder for this node. 767 768 If there isn't one, we cache the source code builder specified 769 for the directory (which in turn will cache the value from its 770 parent directory, and so on up to the file system root). 771 """ 772 try: 773 scb = self.sbuilder 774 except AttributeError: 775 scb = self.dir.src_builder() 776 self.sbuilder = scb 777 return scb
778
779 - def get_abspath(self):
780 """Get the absolute path of the file.""" 781 return self.abspath
782
783 - def for_signature(self):
784 # Return just our name. Even an absolute path would not work, 785 # because that can change thanks to symlinks or remapped network 786 # paths. 787 return self.name
788
789 - def get_subst_proxy(self):
790 try: 791 return self._proxy 792 except AttributeError: 793 ret = EntryProxy(self) 794 self._proxy = ret 795 return ret
796
797 - def target_from_source(self, prefix, suffix, splitext=SCons.Util.splitext):
798 """ 799 800 Generates a target entry that corresponds to this entry (usually 801 a source file) with the specified prefix and suffix. 802 803 Note that this method can be overridden dynamically for generated 804 files that need different behavior. See Tool/swig.py for 805 an example. 806 """ 807 return self.dir.Entry(prefix + splitext(self.name)[0] + suffix)
808
809 - def _Rfindalldirs_key(self, pathlist):
810 return pathlist
811 812 memoizer_counters.append(SCons.Memoize.CountDict('Rfindalldirs', _Rfindalldirs_key)) 813
814 - def Rfindalldirs(self, pathlist):
815 """ 816 Return all of the directories for a given path list, including 817 corresponding "backing" directories in any repositories. 818 819 The Node lookups are relative to this Node (typically a 820 directory), so memoizing result saves cycles from looking 821 up the same path for each target in a given directory. 822 """ 823 try: 824 memo_dict = self._memo['Rfindalldirs'] 825 except KeyError: 826 memo_dict = {} 827 self._memo['Rfindalldirs'] = memo_dict 828 else: 829 try: 830 return memo_dict[pathlist] 831 except KeyError: 832 pass 833 834 create_dir_relative_to_self = self.Dir 835 result = [] 836 for path in pathlist: 837 if isinstance(path, SCons.Node.Node): 838 result.append(path) 839 else: 840 dir = create_dir_relative_to_self(path) 841 result.extend(dir.get_all_rdirs()) 842 843 memo_dict[pathlist] = result 844 845 return result
846
847 - def RDirs(self, pathlist):
848 """Search for a list of directories in the Repository list.""" 849 cwd = self.cwd or self.fs._cwd 850 return cwd.Rfindalldirs(pathlist)
851 852 memoizer_counters.append(SCons.Memoize.CountValue('rentry')) 853
854 - def rentry(self):
855 try: 856 return self._memo['rentry'] 857 except KeyError: 858 pass 859 result = self 860 if not self.exists(): 861 norm_name = _my_normcase(self.name) 862 for dir in self.dir.get_all_rdirs(): 863 try: 864 node = dir.entries[norm_name] 865 except KeyError: 866 if dir.entry_exists_on_disk(self.name): 867 result = dir.Entry(self.name) 868 break 869 self._memo['rentry'] = result 870 return result
871
872 - def _glob1(self, pattern, ondisk=True, source=False, strings=False):
873 return []
874
875 -class Entry(Base):
876 """This is the class for generic Node.FS entries--that is, things 877 that could be a File or a Dir, but we're just not sure yet. 878 Consequently, the methods in this class really exist just to 879 transform their associated object into the right class when the 880 time comes, and then call the same-named method in the transformed 881 class.""" 882
883 - def diskcheck_match(self):
884 pass
885
886 - def disambiguate(self, must_exist=None):
887 """ 888 """ 889 if self.isdir(): 890 self.__class__ = Dir 891 self._morph() 892 elif self.isfile(): 893 self.__class__ = File 894 self._morph() 895 self.clear() 896 else: 897 # There was nothing on-disk at this location, so look in 898 # the src directory. 899 # 900 # We can't just use self.srcnode() straight away because 901 # that would create an actual Node for this file in the src 902 # directory, and there might not be one. Instead, use the 903 # dir_on_disk() method to see if there's something on-disk 904 # with that name, in which case we can go ahead and call 905 # self.srcnode() to create the right type of entry. 906 srcdir = self.dir.srcnode() 907 if srcdir != self.dir and \ 908 srcdir.entry_exists_on_disk(self.name) and \ 909 self.srcnode().isdir(): 910 self.__class__ = Dir 911 self._morph() 912 elif must_exist: 913 msg = "No such file or directory: '%s'" % self.abspath 914 raise SCons.Errors.UserError, msg 915 else: 916 self.__class__ = File 917 self._morph() 918 self.clear() 919 return self
920
921 - def rfile(self):
922 """We're a generic Entry, but the caller is actually looking for 923 a File at this point, so morph into one.""" 924 self.__class__ = File 925 self._morph() 926 self.clear() 927 return File.rfile(self)
928
929 - def scanner_key(self):
930 return self.get_suffix()
931
932 - def get_contents(self):
933 """Fetch the contents of the entry. Returns the exact binary 934 contents of the file.""" 935 try: 936 self = self.disambiguate(must_exist=1) 937 except SCons.Errors.UserError: 938 # There was nothing on disk with which to disambiguate 939 # this entry. Leave it as an Entry, but return a null 940 # string so calls to get_contents() in emitters and the 941 # like (e.g. in qt.py) don't have to disambiguate by hand 942 # or catch the exception. 943 return '' 944 else: 945 return self.get_contents()
946
947 - def get_text_contents(self):
948 """Fetch the decoded text contents of a Unicode encoded Entry. 949 950 Since this should return the text contents from the file 951 system, we check to see into what sort of subclass we should 952 morph this Entry.""" 953 try: 954 self = self.disambiguate(must_exist=1) 955 except SCons.Errors.UserError: 956 # There was nothing on disk with which to disambiguate 957 # this entry. Leave it as an Entry, but return a null 958 # string so calls to get_text_contents() in emitters and 959 # the like (e.g. in qt.py) don't have to disambiguate by 960 # hand or catch the exception. 961 return '' 962 else: 963 return self.get_text_contents()
964
965 - def must_be_same(self, klass):
966 """Called to make sure a Node is a Dir. Since we're an 967 Entry, we can morph into one.""" 968 if self.__class__ is not klass: 969 self.__class__ = klass 970 self._morph() 971 self.clear()
972 973 # The following methods can get called before the Taskmaster has 974 # had a chance to call disambiguate() directly to see if this Entry 975 # should really be a Dir or a File. We therefore use these to call 976 # disambiguate() transparently (from our caller's point of view). 977 # 978 # Right now, this minimal set of methods has been derived by just 979 # looking at some of the methods that will obviously be called early 980 # in any of the various Taskmasters' calling sequences, and then 981 # empirically figuring out which additional methods are necessary 982 # to make various tests pass. 983
984 - def exists(self):
985 """Return if the Entry exists. Check the file system to see 986 what we should turn into first. Assume a file if there's no 987 directory.""" 988 return self.disambiguate().exists()
989
990 - def rel_path(self, other):
991 d = self.disambiguate() 992 if d.__class__ is Entry: 993 raise "rel_path() could not disambiguate File/Dir" 994 return d.rel_path(other)
995
996 - def new_ninfo(self):
997 return self.disambiguate().new_ninfo()
998
999 - def changed_since_last_build(self, target, prev_ni):
1000 return self.disambiguate().changed_since_last_build(target, prev_ni)
1001
1002 - def _glob1(self, pattern, ondisk=True, source=False, strings=False):
1003 return self.disambiguate()._glob1(pattern, ondisk, source, strings)
1004
1005 - def get_subst_proxy(self):
1006 return self.disambiguate().get_subst_proxy()
1007 1008 # This is for later so we can differentiate between Entry the class and Entry 1009 # the method of the FS class. 1010 _classEntry = Entry 1011 1012
1013 -class LocalFS:
1014 1015 if SCons.Memoize.use_memoizer: 1016 __metaclass__ = SCons.Memoize.Memoized_Metaclass 1017 1018 # This class implements an abstraction layer for operations involving 1019 # a local file system. Essentially, this wraps any function in 1020 # the os, os.path or shutil modules that we use to actually go do 1021 # anything with or to the local file system. 1022 # 1023 # Note that there's a very good chance we'll refactor this part of 1024 # the architecture in some way as we really implement the interface(s) 1025 # for remote file system Nodes. For example, the right architecture 1026 # might be to have this be a subclass instead of a base class. 1027 # Nevertheless, we're using this as a first step in that direction. 1028 # 1029 # We're not using chdir() yet because the calling subclass method 1030 # needs to use os.chdir() directly to avoid recursion. Will we 1031 # really need this one? 1032 #def chdir(self, path): 1033 # return os.chdir(path)
1034 - def chmod(self, path, mode):
1035 return os.chmod(path, mode)
1036 - def copy(self, src, dst):
1037 return shutil.copy(src, dst)
1038 - def copy2(self, src, dst):
1039 return shutil.copy2(src, dst)
1040 - def exists(self, path):
1041 return os.path.exists(path)
1042 - def getmtime(self, path):
1043 return os.path.getmtime(path)
1044 - def getsize(self, path):
1045 return os.path.getsize(path)
1046 - def isdir(self, path):
1047 return os.path.isdir(path)
1048 - def isfile(self, path):
1049 return os.path.isfile(path)
1052 - def lstat(self, path):
1053 return os.lstat(path)
1054 - def listdir(self, path):
1055 return os.listdir(path)
1056 - def makedirs(self, path):
1057 return os.makedirs(path)
1058 - def mkdir(self, path):
1059 return os.mkdir(path)
1060 - def rename(self, old, new):
1061 return os.rename(old, new)
1062 - def stat(self, path):
1063 return os.stat(path)
1066 - def open(self, path):
1067 return open(path)
1070 1071 if hasattr(os, 'symlink'): 1074 else: 1077 1078 if hasattr(os, 'readlink'): 1081 else:
1084 1085 1086 #class RemoteFS: 1087 # # Skeleton for the obvious methods we might need from the 1088 # # abstraction layer for a remote filesystem. 1089 # def upload(self, local_src, remote_dst): 1090 # pass 1091 # def download(self, remote_src, local_dst): 1092 # pass 1093 1094
1095 -class FS(LocalFS):
1096 1097 memoizer_counters = [] 1098
1099 - def __init__(self, path = None):
1100 """Initialize the Node.FS subsystem. 1101 1102 The supplied path is the top of the source tree, where we 1103 expect to find the top-level build file. If no path is 1104 supplied, the current directory is the default. 1105 1106 The path argument must be a valid absolute path. 1107 """ 1108 if __debug__: logInstanceCreation(self, 'Node.FS') 1109 1110 self._memo = {} 1111 1112 self.Root = {} 1113 self.SConstruct_dir = None 1114 self.max_drift = default_max_drift 1115 1116 self.Top = None 1117 if path is None: 1118 self.pathTop = os.getcwd() 1119 else: 1120 self.pathTop = path 1121 self.defaultDrive = _my_normcase(os.path.splitdrive(self.pathTop)[0]) 1122 1123 self.Top = self.Dir(self.pathTop) 1124 self.Top.path = '.' 1125 self.Top.tpath = '.' 1126 self._cwd = self.Top 1127 1128 DirNodeInfo.fs = self 1129 FileNodeInfo.fs = self
1130
1131 - def set_SConstruct_dir(self, dir):
1132 self.SConstruct_dir = dir
1133
1134 - def get_max_drift(self):
1135 return self.max_drift
1136
1137 - def set_max_drift(self, max_drift):
1138 self.max_drift = max_drift
1139
1140 - def getcwd(self):
1141 return self._cwd
1142
1143 - def chdir(self, dir, change_os_dir=0):
1144 """Change the current working directory for lookups. 1145 If change_os_dir is true, we will also change the "real" cwd 1146 to match. 1147 """ 1148 curr=self._cwd 1149 try: 1150 if dir is not None: 1151 self._cwd = dir 1152 if change_os_dir: 1153 os.chdir(dir.abspath) 1154 except OSError: 1155 self._cwd = curr 1156 raise
1157
1158 - def get_root(self, drive):
1159 """ 1160 Returns the root directory for the specified drive, creating 1161 it if necessary. 1162 """ 1163 drive = _my_normcase(drive) 1164 try: 1165 return self.Root[drive] 1166 except KeyError: 1167 root = RootDir(drive, self) 1168 self.Root[drive] = root 1169 if not drive: 1170 self.Root[self.defaultDrive] = root 1171 elif drive == self.defaultDrive: 1172 self.Root[''] = root 1173 return root
1174
1175 - def _lookup(self, p, directory, fsclass, create=1):
1176 """ 1177 The generic entry point for Node lookup with user-supplied data. 1178 1179 This translates arbitrary input into a canonical Node.FS object 1180 of the specified fsclass. The general approach for strings is 1181 to turn it into a fully normalized absolute path and then call 1182 the root directory's lookup_abs() method for the heavy lifting. 1183 1184 If the path name begins with '#', it is unconditionally 1185 interpreted relative to the top-level directory of this FS. '#' 1186 is treated as a synonym for the top-level SConstruct directory, 1187 much like '~' is treated as a synonym for the user's home 1188 directory in a UNIX shell. So both '#foo' and '#/foo' refer 1189 to the 'foo' subdirectory underneath the top-level SConstruct 1190 directory. 1191 1192 If the path name is relative, then the path is looked up relative 1193 to the specified directory, or the current directory (self._cwd, 1194 typically the SConscript directory) if the specified directory 1195 is None. 1196 """ 1197 if isinstance(p, Base): 1198 # It's already a Node.FS object. Make sure it's the right 1199 # class and return. 1200 p.must_be_same(fsclass) 1201 return p 1202 # str(p) in case it's something like a proxy object 1203 p = str(p) 1204 1205 initial_hash = (p[0:1] == '#') 1206 if initial_hash: 1207 # There was an initial '#', so we strip it and override 1208 # whatever directory they may have specified with the 1209 # top-level SConstruct directory. 1210 p = p[1:] 1211 directory = self.Top 1212 1213 if directory and not isinstance(directory, Dir): 1214 directory = self.Dir(directory) 1215 1216 if do_splitdrive: 1217 drive, p = os.path.splitdrive(p) 1218 else: 1219 drive = '' 1220 if drive and not p: 1221 # This causes a naked drive letter to be treated as a synonym 1222 # for the root directory on that drive. 1223 p = os.sep 1224 absolute = os.path.isabs(p) 1225 1226 needs_normpath = needs_normpath_check.match(p) 1227 1228 if initial_hash or not absolute: 1229 # This is a relative lookup, either to the top-level 1230 # SConstruct directory (because of the initial '#') or to 1231 # the current directory (the path name is not absolute). 1232 # Add the string to the appropriate directory lookup path, 1233 # after which the whole thing gets normalized. 1234 if not directory: 1235 directory = self._cwd 1236 if p: 1237 p = directory.labspath + '/' + p 1238 else: 1239 p = directory.labspath 1240 1241 if needs_normpath: 1242 p = os.path.normpath(p) 1243 1244 if drive or absolute: 1245 root = self.get_root(drive) 1246 else: 1247 if not directory: 1248 directory = self._cwd 1249 root = directory.root 1250 1251 if os.sep != '/': 1252 p = string.replace(p, os.sep, '/') 1253 return root._lookup_abs(p, fsclass, create)
1254
1255 - def Entry(self, name, directory = None, create = 1):
1256 """Look up or create a generic Entry node with the specified name. 1257 If the name is a relative path (begins with ./, ../, or a file 1258 name), then it is looked up relative to the supplied directory 1259 node, or to the top level directory of the FS (supplied at 1260 construction time) if no directory is supplied. 1261 """ 1262 return self._lookup(name, directory, Entry, create)
1263
1264 - def File(self, name, directory = None, create = 1):
1265 """Look up or create a File node with the specified name. If 1266 the name is a relative path (begins with ./, ../, or a file name), 1267 then it is looked up relative to the supplied directory node, 1268 or to the top level directory of the FS (supplied at construction 1269 time) if no directory is supplied. 1270 1271 This method will raise TypeError if a directory is found at the 1272 specified path. 1273 """ 1274 return self._lookup(name, directory, File, create)
1275
1276 - def Dir(self, name, directory = None, create = True):
1277 """Look up or create a Dir node with the specified name. If 1278 the name is a relative path (begins with ./, ../, or a file name), 1279 then it is looked up relative to the supplied directory node, 1280 or to the top level directory of the FS (supplied at construction 1281 time) if no directory is supplied. 1282 1283 This method will raise TypeError if a normal file is found at the 1284 specified path. 1285 """ 1286 return self._lookup(name, directory, Dir, create)
1287
1288 - def VariantDir(self, variant_dir, src_dir, duplicate=1):
1289 """Link the supplied variant directory to the source directory 1290 for purposes of building files.""" 1291 1292 if not isinstance(src_dir, SCons.Node.Node): 1293 src_dir = self.Dir(src_dir) 1294 if not isinstance(variant_dir, SCons.Node.Node): 1295 variant_dir = self.Dir(variant_dir) 1296 if src_dir.is_under(variant_dir): 1297 raise SCons.Errors.UserError, "Source directory cannot be under variant directory." 1298 if variant_dir.srcdir: 1299 if variant_dir.srcdir == src_dir: 1300 return # We already did this. 1301 raise SCons.Errors.UserError, "'%s' already has a source directory: '%s'."%(variant_dir, variant_dir.srcdir) 1302 variant_dir.link(src_dir, duplicate)
1303
1304 - def Repository(self, *dirs):
1305 """Specify Repository directories to search.""" 1306 for d in dirs: 1307 if not isinstance(d, SCons.Node.Node): 1308 d = self.Dir(d) 1309 self.Top.addRepository(d)
1310
1311 - def variant_dir_target_climb(self, orig, dir, tail):
1312 """Create targets in corresponding variant directories 1313 1314 Climb the directory tree, and look up path names 1315 relative to any linked variant directories we find. 1316 1317 Even though this loops and walks up the tree, we don't memoize 1318 the return value because this is really only used to process 1319 the command-line targets. 1320 """ 1321 targets = [] 1322 message = None 1323 fmt = "building associated VariantDir targets: %s" 1324 start_dir = dir 1325 while dir: 1326 for bd in dir.variant_dirs: 1327 if start_dir.is_under(bd): 1328 # If already in the build-dir location, don't reflect 1329 return [orig], fmt % str(orig) 1330 p = apply(os.path.join, [bd.path] + tail) 1331 targets.append(self.Entry(p)) 1332 tail = [dir.name] + tail 1333 dir = dir.up() 1334 if targets: 1335 message = fmt % string.join(map(str, targets)) 1336 return targets, message
1337
1338 - def Glob(self, pathname, ondisk=True, source=True, strings=False, cwd=None):
1339 """ 1340 Globs 1341 1342 This is mainly a shim layer 1343 """ 1344 if cwd is None: 1345 cwd = self.getcwd() 1346 return cwd.glob(pathname, ondisk, source, strings)
1347
1348 -class DirNodeInfo(SCons.Node.NodeInfoBase):
1349 # This should get reset by the FS initialization. 1350 current_version_id = 1 1351 1352 fs = None 1353
1354 - def str_to_node(self, s):
1355 top = self.fs.Top 1356 root = top.root 1357 if do_splitdrive: 1358 drive, s = os.path.splitdrive(s) 1359 if drive: 1360 root = self.fs.get_root(drive) 1361 if not os.path.isabs(s): 1362 s = top.labspath + '/' + s 1363 return root._lookup_abs(s, Entry)
1364
1365 -class DirBuildInfo(SCons.Node.BuildInfoBase):
1366 current_version_id = 1
1367 1368 glob_magic_check = re.compile('[*?[]') 1369
1370 -def has_glob_magic(s):
1371 return glob_magic_check.search(s) is not None
1372
1373 -class Dir(Base):
1374 """A class for directories in a file system. 1375 """ 1376 1377 memoizer_counters = [] 1378 1379 NodeInfo = DirNodeInfo 1380 BuildInfo = DirBuildInfo 1381
1382 - def __init__(self, name, directory, fs):
1383 if __debug__: logInstanceCreation(self, 'Node.FS.Dir') 1384 Base.__init__(self, name, directory, fs) 1385 self._morph()
1386
1387 - def _morph(self):
1388 """Turn a file system Node (either a freshly initialized directory 1389 object or a separate Entry object) into a proper directory object. 1390 1391 Set up this directory's entries and hook it into the file 1392 system tree. Specify that directories (this Node) don't use 1393 signatures for calculating whether they're current. 1394 """ 1395 1396 self.repositories = [] 1397 self.srcdir = None 1398 1399 self.entries = {} 1400 self.entries['.'] = self 1401 self.entries['..'] = self.dir 1402 self.cwd = self 1403 self.searched = 0 1404 self._sconsign = None 1405 self.variant_dirs = [] 1406 self.root = self.dir.root 1407 1408 # Don't just reset the executor, replace its action list, 1409 # because it might have some pre-or post-actions that need to 1410 # be preserved. 1411 self.builder = get_MkdirBuilder() 1412 self.get_executor().set_action_list(self.builder.action)
1413
1414 - def diskcheck_match(self):
1415 diskcheck_match(self, self.isfile, 1416 "File %s found where directory expected.")
1417
1418 - def __clearRepositoryCache(self, duplicate=None):
1419 """Called when we change the repository(ies) for a directory. 1420 This clears any cached information that is invalidated by changing 1421 the repository.""" 1422 1423 for node in self.entries.values(): 1424 if node != self.dir: 1425 if node != self and isinstance(node, Dir): 1426 node.__clearRepositoryCache(duplicate) 1427 else: 1428 node.clear() 1429 try: 1430 del node._srcreps 1431 except AttributeError: 1432 pass 1433 if duplicate is not None: 1434 node.duplicate=duplicate
1435
1436 - def __resetDuplicate(self, node):
1437 if node != self: 1438 node.duplicate = node.get_dir().duplicate
1439
1440 - def Entry(self, name):
1441 """ 1442 Looks up or creates an entry node named 'name' relative to 1443 this directory. 1444 """ 1445 return self.fs.Entry(name, self)
1446
1447 - def Dir(self, name, create=True):
1448 """ 1449 Looks up or creates a directory node named 'name' relative to 1450 this directory. 1451 """ 1452 return self.fs.Dir(name, self, create)
1453
1454 - def File(self, name):
1455 """ 1456 Looks up or creates a file node named 'name' relative to 1457 this directory. 1458 """ 1459 return self.fs.File(name, self)
1460
1461 - def _lookup_rel(self, name, klass, create=1):
1462 """ 1463 Looks up a *normalized* relative path name, relative to this 1464 directory. 1465 1466 This method is intended for use by internal lookups with 1467 already-normalized path data. For general-purpose lookups, 1468 use the Entry(), Dir() and File() methods above. 1469 1470 This method does *no* input checking and will die or give 1471 incorrect results if it's passed a non-normalized path name (e.g., 1472 a path containing '..'), an absolute path name, a top-relative 1473 ('#foo') path name, or any kind of object. 1474 """ 1475 name = self.entry_labspath(name) 1476 return self.root._lookup_abs(name, klass, create)
1477 1485
1486 - def getRepositories(self):
1487 """Returns a list of repositories for this directory. 1488 """ 1489 if self.srcdir and not self.duplicate: 1490 return self.srcdir.get_all_rdirs() + self.repositories 1491 return self.repositories
1492 1493 memoizer_counters.append(SCons.Memoize.CountValue('get_all_rdirs')) 1494
1495 - def get_all_rdirs(self):
1496 try: 1497 return list(self._memo['get_all_rdirs']) 1498 except KeyError: 1499 pass 1500 1501 result = [self] 1502 fname = '.' 1503 dir = self 1504 while dir: 1505 for rep in dir.getRepositories(): 1506 result.append(rep.Dir(fname)) 1507 if fname == '.': 1508 fname = dir.name 1509 else: 1510 fname = dir.name + os.sep + fname 1511 dir = dir.up() 1512 1513 self._memo['get_all_rdirs'] = list(result) 1514 1515 return result
1516
1517 - def addRepository(self, dir):
1518 if dir != self and not dir in self.repositories: 1519 self.repositories.append(dir) 1520 dir.tpath = '.' 1521 self.__clearRepositoryCache()
1522
1523 - def up(self):
1524 return self.entries['..']
1525
1526 - def _rel_path_key(self, other):
1527 return str(other)
1528 1529 memoizer_counters.append(SCons.Memoize.CountDict('rel_path', _rel_path_key)) 1530
1531 - def rel_path(self, other):
1532 """Return a path to "other" relative to this directory. 1533 """ 1534 1535 # This complicated and expensive method, which constructs relative 1536 # paths between arbitrary Node.FS objects, is no longer used 1537 # by SCons itself. It was introduced to store dependency paths 1538 # in .sconsign files relative to the target, but that ended up 1539 # being significantly inefficient. 1540 # 1541 # We're continuing to support the method because some SConstruct 1542 # files out there started using it when it was available, and 1543 # we're all about backwards compatibility.. 1544 1545 try: 1546 memo_dict = self._memo['rel_path'] 1547 except KeyError: 1548 memo_dict = {} 1549 self._memo['rel_path'] = memo_dict 1550 else: 1551 try: 1552 return memo_dict[other] 1553 except KeyError: 1554 pass 1555 1556 if self is other: 1557 result = '.' 1558 1559 elif not other in self.path_elements: 1560 try: 1561 other_dir = other.get_dir() 1562 except AttributeError: 1563 result = str(other) 1564 else: 1565 if other_dir is None: 1566 result = other.name 1567 else: 1568 dir_rel_path = self.rel_path(other_dir) 1569 if dir_rel_path == '.': 1570 result = other.name 1571 else: 1572 result = dir_rel_path + os.sep + other.name 1573 else: 1574 i = self.path_elements.index(other) + 1 1575 1576 path_elems = ['..'] * (len(self.path_elements) - i) \ 1577 + map(lambda n: n.name, other.path_elements[i:]) 1578 1579 result = string.join(path_elems, os.sep) 1580 1581 memo_dict[other] = result 1582 1583 return result
1584
1585 - def get_env_scanner(self, env, kw={}):
1586 import SCons.Defaults 1587 return SCons.Defaults.DirEntryScanner
1588
1589 - def get_target_scanner(self):
1590 import SCons.Defaults 1591 return SCons.Defaults.DirEntryScanner
1592
1593 - def get_found_includes(self, env, scanner, path):
1594 """Return this directory's implicit dependencies. 1595 1596 We don't bother caching the results because the scan typically 1597 shouldn't be requested more than once (as opposed to scanning 1598 .h file contents, which can be requested as many times as the 1599 files is #included by other files). 1600 """ 1601 if not scanner: 1602 return [] 1603 # Clear cached info for this Dir. If we already visited this 1604 # directory on our walk down the tree (because we didn't know at 1605 # that point it was being used as the source for another Node) 1606 # then we may have calculated build signature before realizing 1607 # we had to scan the disk. Now that we have to, though, we need 1608 # to invalidate the old calculated signature so that any node 1609 # dependent on our directory structure gets one that includes 1610 # info about everything on disk. 1611 self.clear() 1612 return scanner(self, env, path)
1613 1614 # 1615 # Taskmaster interface subsystem 1616 # 1617
1618 - def prepare(self):
1619 pass
1620
1621 - def build(self, **kw):
1622 """A null "builder" for directories.""" 1623 global MkdirBuilder 1624 if self.builder is not MkdirBuilder: 1625 apply(SCons.Node.Node.build, [self,], kw)
1626 1627 # 1628 # 1629 # 1630
1631 - def _create(self):
1632 """Create this directory, silently and without worrying about 1633 whether the builder is the default or not.""" 1634 listDirs = [] 1635 parent = self 1636 while parent: 1637 if parent.exists(): 1638 break 1639 listDirs.append(parent) 1640 p = parent.up() 1641 if p is None: 1642 # Don't use while: - else: for this condition because 1643 # if so, then parent is None and has no .path attribute. 1644 raise SCons.Errors.StopError, parent.path 1645 parent = p 1646 listDirs.reverse() 1647 for dirnode in listDirs: 1648 try: 1649 # Don't call dirnode.build(), call the base Node method 1650 # directly because we definitely *must* create this 1651 # directory. The dirnode.build() method will suppress 1652 # the build if it's the default builder. 1653 SCons.Node.Node.build(dirnode) 1654 dirnode.get_executor().nullify() 1655 # The build() action may or may not have actually 1656 # created the directory, depending on whether the -n 1657 # option was used or not. Delete the _exists and 1658 # _rexists attributes so they can be reevaluated. 1659 dirnode.clear() 1660 except OSError: 1661 pass
1662
1664 global MkdirBuilder 1665 return self.builder is not MkdirBuilder and self.has_builder()
1666
1667 - def alter_targets(self):
1668 """Return any corresponding targets in a variant directory. 1669 """ 1670 return self.fs.variant_dir_target_climb(self, self, [])
1671
1672 - def scanner_key(self):
1673 """A directory does not get scanned.""" 1674 return None
1675
1676 - def get_text_contents(self):
1677 """We already emit things in text, so just return the binary 1678 version.""" 1679 return self.get_contents()
1680
1681 - def get_contents(self):
1682 """Return content signatures and names of all our children 1683 separated by new-lines. Ensure that the nodes are sorted.""" 1684 contents = [] 1685 name_cmp = lambda a, b: cmp(a.name, b.name) 1686 sorted_children = self.children()[:] 1687 sorted_children.sort(name_cmp) 1688 for node in sorted_children: 1689 contents.append('%s %s\n' % (node.get_csig(), node.name)) 1690 return string.join(contents, '')
1691
1692 - def get_csig(self):
1693 """Compute the content signature for Directory nodes. In 1694 general, this is not needed and the content signature is not 1695 stored in the DirNodeInfo. However, if get_contents on a Dir 1696 node is called which has a child directory, the child 1697 directory should return the hash of its contents.""" 1698 contents = self.get_contents() 1699 return SCons.Util.MD5signature(contents)
1700
1701 - def do_duplicate(self, src):
1702 pass
1703 1704 changed_since_last_build = SCons.Node.Node.state_has_changed 1705
1706 - def is_up_to_date(self):
1707 """If any child is not up-to-date, then this directory isn't, 1708 either.""" 1709 if self.builder is not MkdirBuilder and not self.exists(): 1710 return 0 1711 up_to_date = SCons.Node.up_to_date 1712 for kid in self.children(): 1713 if kid.get_state() > up_to_date: 1714 return 0 1715 return 1
1716
1717 - def rdir(self):
1718 if not self.exists(): 1719 norm_name = _my_normcase(self.name) 1720 for dir in self.dir.get_all_rdirs(): 1721 try: node = dir.entries[norm_name] 1722 except KeyError: node = dir.dir_on_disk(self.name) 1723 if node and node.exists() and \ 1724 (isinstance(dir, Dir) or isinstance(dir, Entry)): 1725 return node 1726 return self
1727
1728 - def sconsign(self):
1729 """Return the .sconsign file info for this directory, 1730 creating it first if necessary.""" 1731 if not self._sconsign: 1732 import SCons.SConsign 1733 self._sconsign = SCons.SConsign.ForDirectory(self) 1734 return self._sconsign
1735
1736 - def srcnode(self):
1737 """Dir has a special need for srcnode()...if we 1738 have a srcdir attribute set, then that *is* our srcnode.""" 1739 if self.srcdir: 1740 return self.srcdir 1741 return Base.srcnode(self)
1742
1743 - def get_timestamp(self):
1744 """Return the latest timestamp from among our children""" 1745 stamp = 0 1746 for kid in self.children(): 1747 if kid.get_timestamp() > stamp: 1748 stamp = kid.get_timestamp() 1749 return stamp
1750
1751 - def entry_abspath(self, name):
1752 return self.abspath + os.sep + name
1753
1754 - def entry_labspath(self, name):
1755 return self.labspath + '/' + name
1756
1757 - def entry_path(self, name):
1758 return self.path + os.sep + name
1759
1760 - def entry_tpath(self, name):
1761 return self.tpath + os.sep + name
1762
1763 - def entry_exists_on_disk(self, name):
1764 try: 1765 d = self.on_disk_entries 1766 except AttributeError: 1767 d = {} 1768 try: 1769 entries = os.listdir(self.abspath) 1770 except OSError: 1771 pass 1772 else: 1773 for entry in map(_my_normcase, entries): 1774 d[entry] = True 1775 self.on_disk_entries = d 1776 if sys.platform == 'win32': 1777 name = _my_normcase(name) 1778 result = d.get(name) 1779 if result is None: 1780 # Belt-and-suspenders for Windows: check directly for 1781 # 8.3 file names that don't show up in os.listdir(). 1782 result = os.path.exists(self.abspath + os.sep + name) 1783 d[name] = result 1784 return result 1785 else: 1786 return d.has_key(_my_normcase(name))
1787 1788 memoizer_counters.append(SCons.Memoize.CountValue('srcdir_list')) 1789
1790 - def srcdir_list(self):
1791 try: 1792 return self._memo['srcdir_list'] 1793 except KeyError: 1794 pass 1795 1796 result = [] 1797 1798 dirname = '.' 1799 dir = self 1800 while dir: 1801 if dir.srcdir: 1802 result.append(dir.srcdir.Dir(dirname)) 1803 dirname = dir.name + os.sep + dirname 1804 dir = dir.up() 1805 1806 self._memo['srcdir_list'] = result 1807 1808 return result
1809
1810 - def srcdir_duplicate(self, name):
1811 for dir in self.srcdir_list(): 1812 if self.is_under(dir): 1813 # We shouldn't source from something in the build path; 1814 # variant_dir is probably under src_dir, in which case 1815 # we are reflecting. 1816 break 1817 if dir.entry_exists_on_disk(name): 1818 srcnode = dir.Entry(name).disambiguate() 1819 if self.duplicate: 1820 node = self.Entry(name).disambiguate() 1821 node.do_duplicate(srcnode) 1822 return node 1823 else: 1824 return srcnode 1825 return None
1826
1827 - def _srcdir_find_file_key(self, filename):
1828 return filename
1829 1830 memoizer_counters.append(SCons.Memoize.CountDict('srcdir_find_file', _srcdir_find_file_key)) 1831
1832 - def srcdir_find_file(self, filename):
1833 try: 1834 memo_dict = self._memo['srcdir_find_file'] 1835 except KeyError: 1836 memo_dict = {} 1837 self._memo['srcdir_find_file'] = memo_dict 1838 else: 1839 try: 1840 return memo_dict[filename] 1841 except KeyError: 1842 pass 1843 1844 def func(node): 1845 if (isinstance(node, File) or isinstance(node, Entry)) and \ 1846 (node.is_derived() or node.exists()): 1847 return node 1848 return None
1849 1850 norm_name = _my_normcase(filename) 1851 1852 for rdir in self.get_all_rdirs(): 1853 try: node = rdir.entries[norm_name] 1854 except KeyError: node = rdir.file_on_disk(filename) 1855 else: node = func(node) 1856 if node: 1857 result = (node, self) 1858 memo_dict[filename] = result 1859 return result 1860 1861 for srcdir in self.srcdir_list(): 1862 for rdir in srcdir.get_all_rdirs(): 1863 try: node = rdir.entries[norm_name] 1864 except KeyError: node = rdir.file_on_disk(filename) 1865 else: node = func(node) 1866 if node: 1867 result = (File(filename, self, self.fs), srcdir) 1868 memo_dict[filename] = result 1869 return result 1870 1871 result = (None, None) 1872 memo_dict[filename] = result 1873 return result
1874
1875 - def dir_on_disk(self, name):
1876 if self.entry_exists_on_disk(name): 1877 try: return self.Dir(name) 1878 except TypeError: pass 1879 node = self.srcdir_duplicate(name) 1880 if isinstance(node, File): 1881 return None 1882 return node
1883
1884 - def file_on_disk(self, name):
1885 if self.entry_exists_on_disk(name) or \ 1886 diskcheck_rcs(self, name) or \ 1887 diskcheck_sccs(self, name): 1888 try: return self.File(name) 1889 except TypeError: pass 1890 node = self.srcdir_duplicate(name) 1891 if isinstance(node, Dir): 1892 return None 1893 return node
1894
1895 - def walk(self, func, arg):
1896 """ 1897 Walk this directory tree by calling the specified function 1898 for each directory in the tree. 1899 1900 This behaves like the os.path.walk() function, but for in-memory 1901 Node.FS.Dir objects. The function takes the same arguments as 1902 the functions passed to os.path.walk(): 1903 1904 func(arg, dirname, fnames) 1905 1906 Except that "dirname" will actually be the directory *Node*, 1907 not the string. The '.' and '..' entries are excluded from 1908 fnames. The fnames list may be modified in-place to filter the 1909 subdirectories visited or otherwise impose a specific order. 1910 The "arg" argument is always passed to func() and may be used 1911 in any way (or ignored, passing None is common). 1912 """ 1913 entries = self.entries 1914 names = entries.keys() 1915 names.remove('.') 1916 names.remove('..') 1917 func(arg, self, names) 1918 select_dirs = lambda n, e=entries: isinstance(e[n], Dir) 1919 for dirname in filter(select_dirs, names): 1920 entries[dirname].walk(func, arg)
1921
1922 - def glob(self, pathname, ondisk=True, source=False, strings=False):
1923 """ 1924 Returns a list of Nodes (or strings) matching a specified 1925 pathname pattern. 1926 1927 Pathname patterns follow UNIX shell semantics: * matches 1928 any-length strings of any characters, ? matches any character, 1929 and [] can enclose lists or ranges of characters. Matches do 1930 not span directory separators. 1931 1932 The matches take into account Repositories, returning local 1933 Nodes if a corresponding entry exists in a Repository (either 1934 an in-memory Node or something on disk). 1935 1936 By defafult, the glob() function matches entries that exist 1937 on-disk, in addition to in-memory Nodes. Setting the "ondisk" 1938 argument to False (or some other non-true value) causes the glob() 1939 function to only match in-memory Nodes. The default behavior is 1940 to return both the on-disk and in-memory Nodes. 1941 1942 The "source" argument, when true, specifies that corresponding 1943 source Nodes must be returned if you're globbing in a build 1944 directory (initialized with VariantDir()). The default behavior 1945 is to return Nodes local to the VariantDir(). 1946 1947 The "strings" argument, when true, returns the matches as strings, 1948 not Nodes. The strings are path names relative to this directory. 1949 1950 The underlying algorithm is adapted from the glob.glob() function 1951 in the Python library (but heavily modified), and uses fnmatch() 1952 under the covers. 1953 """ 1954 dirname, basename = os.path.split(pathname) 1955 if not dirname: 1956 result = self._glob1(basename, ondisk, source, strings) 1957 result.sort(lambda a, b: cmp(str(a), str(b))) 1958 return result 1959 if has_glob_magic(dirname): 1960 list = self.glob(dirname, ondisk, source, strings=False) 1961 else: 1962 list = [self.Dir(dirname, create=True)] 1963 result = [] 1964 for dir in list: 1965 r = dir._glob1(basename, ondisk, source, strings) 1966 if strings: 1967 r = map(lambda x, d=str(dir): os.path.join(d, x), r) 1968 result.extend(r) 1969 result.sort(lambda a, b: cmp(str(a), str(b))) 1970 return result
1971
1972 - def _glob1(self, pattern, ondisk=True, source=False, strings=False):
1973 """ 1974 Globs for and returns a list of entry names matching a single 1975 pattern in this directory. 1976 1977 This searches any repositories and source directories for 1978 corresponding entries and returns a Node (or string) relative 1979 to the current directory if an entry is found anywhere. 1980 1981 TODO: handle pattern with no wildcard 1982 """ 1983 search_dir_list = self.get_all_rdirs() 1984 for srcdir in self.srcdir_list(): 1985 search_dir_list.extend(srcdir.get_all_rdirs()) 1986 1987 selfEntry = self.Entry 1988 names = [] 1989 for dir in search_dir_list: 1990 # We use the .name attribute from the Node because the keys of 1991 # the dir.entries dictionary are normalized (that is, all upper 1992 # case) on case-insensitive systems like Windows. 1993 #node_names = [ v.name for k, v in dir.entries.items() if k not in ('.', '..') ] 1994 entry_names = filter(lambda n: n not in ('.', '..'), dir.entries.keys()) 1995 node_names = map(lambda n, e=dir.entries: e[n].name, entry_names) 1996 names.extend(node_names) 1997 if not strings: 1998 # Make sure the working directory (self) actually has 1999 # entries for all Nodes in repositories or variant dirs. 2000 for name in node_names: selfEntry(name) 2001 if ondisk: 2002 try: 2003 disk_names = os.listdir(dir.abspath) 2004 except os.error: 2005 continue 2006 names.extend(disk_names) 2007 if not strings: 2008 # We're going to return corresponding Nodes in 2009 # the local directory, so we need to make sure 2010 # those Nodes exist. We only want to create 2011 # Nodes for the entries that will match the 2012 # specified pattern, though, which means we 2013 # need to filter the list here, even though 2014 # the overall list will also be filtered later, 2015 # after we exit this loop. 2016 if pattern[0] != '.': 2017 #disk_names = [ d for d in disk_names if d[0] != '.' ] 2018 disk_names = filter(lambda x: x[0] != '.', disk_names) 2019 disk_names = fnmatch.filter(disk_names, pattern) 2020 dirEntry = dir.Entry 2021 for name in disk_names: 2022 # Add './' before disk filename so that '#' at 2023 # beginning of filename isn't interpreted. 2024 name = './' + name 2025 node = dirEntry(name).disambiguate() 2026 n = selfEntry(name) 2027 if n.__class__ != node.__class__: 2028 n.__class__ = node.__class__ 2029 n._morph() 2030 2031 names = set(names) 2032 if pattern[0] != '.': 2033 #names = [ n for n in names if n[0] != '.' ] 2034 names = filter(lambda x: x[0] != '.', names) 2035 names = fnmatch.filter(names, pattern) 2036 2037 if strings: 2038 return names 2039 2040 #return [ self.entries[_my_normcase(n)] for n in names ] 2041 return map(lambda n, e=self.entries: e[_my_normcase(n)], names)
2042
2043 -class RootDir(Dir):
2044 """A class for the root directory of a file system. 2045 2046 This is the same as a Dir class, except that the path separator 2047 ('/' or '\\') is actually part of the name, so we don't need to 2048 add a separator when creating the path names of entries within 2049 this directory. 2050 """
2051 - def __init__(self, name, fs):
2052 if __debug__: logInstanceCreation(self, 'Node.FS.RootDir') 2053 # We're going to be our own parent directory (".." entry and .dir 2054 # attribute) so we have to set up some values so Base.__init__() 2055 # won't gag won't it calls some of our methods. 2056 self.abspath = '' 2057 self.labspath = '' 2058 self.path = '' 2059 self.tpath = '' 2060 self.path_elements = [] 2061 self.duplicate = 0 2062 self.root = self 2063 Base.__init__(self, name, self, fs) 2064 2065 # Now set our paths to what we really want them to be: the 2066 # initial drive letter (the name) plus the directory separator, 2067 # except for the "lookup abspath," which does not have the 2068 # drive letter. 2069 self.abspath = name + os.sep 2070 self.labspath = '' 2071 self.path = name + os.sep 2072 self.tpath = name + os.sep 2073 self._morph() 2074 2075 self._lookupDict = {} 2076 2077 # The // and os.sep + os.sep entries are necessary because 2078 # os.path.normpath() seems to preserve double slashes at the 2079 # beginning of a path (presumably for UNC path names), but 2080 # collapses triple slashes to a single slash. 2081 self._lookupDict[''] = self 2082 self._lookupDict['/'] = self 2083 self._lookupDict['//'] = self 2084 self._lookupDict[os.sep] = self 2085 self._lookupDict[os.sep + os.sep] = self
2086
2087 - def must_be_same(self, klass):
2088 if klass is Dir: 2089 return 2090 Base.must_be_same(self, klass)
2091
2092 - def _lookup_abs(self, p, klass, create=1):
2093 """ 2094 Fast (?) lookup of a *normalized* absolute path. 2095 2096 This method is intended for use by internal lookups with 2097 already-normalized path data. For general-purpose lookups, 2098 use the FS.Entry(), FS.Dir() or FS.File() methods. 2099 2100 The caller is responsible for making sure we're passed a 2101 normalized absolute path; we merely let Python's dictionary look 2102 up and return the One True Node.FS object for the path. 2103 2104 If no Node for the specified "p" doesn't already exist, and 2105 "create" is specified, the Node may be created after recursive 2106 invocation to find or create the parent directory or directories. 2107 """ 2108 k = _my_normcase(p) 2109 try: 2110 result = self._lookupDict[k] 2111 except KeyError: 2112 if not create: 2113 msg = "No such file or directory: '%s' in '%s' (and create is False)" % (p, str(self)) 2114 raise SCons.Errors.UserError, msg 2115 # There is no Node for this path name, and we're allowed 2116 # to create it. 2117 dir_name, file_name = os.path.split(p) 2118 dir_node = self._lookup_abs(dir_name, Dir) 2119 result = klass(file_name, dir_node, self.fs) 2120 2121 # Double-check on disk (as configured) that the Node we 2122 # created matches whatever is out there in the real world. 2123 result.diskcheck_match() 2124 2125 self._lookupDict[k] = result 2126 dir_node.entries[_my_normcase(file_name)] = result 2127 dir_node.implicit = None 2128 else: 2129 # There is already a Node for this path name. Allow it to 2130 # complain if we were looking for an inappropriate type. 2131 result.must_be_same(klass) 2132 return result
2133
2134 - def __str__(self):
2135 return self.abspath
2136
2137 - def entry_abspath(self, name):
2138 return self.abspath + name
2139
2140 - def entry_labspath(self, name):
2141 return '/' + name
2142
2143 - def entry_path(self, name):
2144 return self.path + name
2145
2146 - def entry_tpath(self, name):
2147 return self.tpath + name
2148
2149 - def is_under(self, dir):
2150 if self is dir: 2151 return 1 2152 else: 2153 return 0
2154
2155 - def up(self):
2156 return None
2157
2158 - def get_dir(self):
2159 return None
2160
2161 - def src_builder(self):
2162 return _null
2163
2164 -class FileNodeInfo(SCons.Node.NodeInfoBase):
2165 current_version_id = 1 2166 2167 field_list = ['csig', 'timestamp', 'size'] 2168 2169 # This should get reset by the FS initialization. 2170 fs = None 2171
2172 - def str_to_node(self, s):
2173 top = self.fs.Top 2174 root = top.root 2175 if do_splitdrive: 2176 drive, s = os.path.splitdrive(s) 2177 if drive: 2178 root = self.fs.get_root(drive) 2179 if not os.path.isabs(s): 2180 s = top.labspath + '/' + s 2181 return root._lookup_abs(s, Entry)
2182
2183 -class FileBuildInfo(SCons.Node.BuildInfoBase):
2184 current_version_id = 1 2185
2186 - def convert_to_sconsign(self):
2187 """ 2188 Converts this FileBuildInfo object for writing to a .sconsign file 2189 2190 This replaces each Node in our various dependency lists with its 2191 usual string representation: relative to the top-level SConstruct 2192 directory, or an absolute path if it's outside. 2193 """ 2194 if os.sep == '/': 2195 node_to_str = str 2196 else: 2197 def node_to_str(n): 2198 try: 2199 s = n.path 2200 except AttributeError: 2201 s = str(n) 2202 else: 2203 s = string.replace(s, os.sep, '/') 2204 return s
2205 for attr in ['bsources', 'bdepends', 'bimplicit']: 2206 try: 2207 val = getattr(self, attr) 2208 except AttributeError: 2209 pass 2210 else: 2211 setattr(self, attr, map(node_to_str, val))
2212 - def convert_from_sconsign(self, dir, name):
2213 """ 2214 Converts a newly-read FileBuildInfo object for in-SCons use 2215 2216 For normal up-to-date checking, we don't have any conversion to 2217 perform--but we're leaving this method here to make that clear. 2218 """ 2219 pass
2220 - def prepare_dependencies(self):
2221 """ 2222 Prepares a FileBuildInfo object for explaining what changed 2223 2224 The bsources, bdepends and bimplicit lists have all been 2225 stored on disk as paths relative to the top-level SConstruct 2226 directory. Convert the strings to actual Nodes (for use by the 2227 --debug=explain code and --implicit-cache). 2228 """ 2229 attrs = [ 2230 ('bsources', 'bsourcesigs'), 2231 ('bdepends', 'bdependsigs'), 2232 ('bimplicit', 'bimplicitsigs'), 2233 ] 2234 for (nattr, sattr) in attrs: 2235 try: 2236 strings = getattr(self, nattr) 2237 nodeinfos = getattr(self, sattr) 2238 except AttributeError: 2239 continue 2240 nodes = [] 2241 for s, ni in izip(strings, nodeinfos): 2242 if not isinstance(s, SCons.Node.Node): 2243 s = ni.str_to_node(s) 2244 nodes.append(s) 2245 setattr(self, nattr, nodes)
2246 - def format(self, names=0):
2247 result = [] 2248 bkids = self.bsources + self.bdepends + self.bimplicit 2249 bkidsigs = self.bsourcesigs + self.bdependsigs + self.bimplicitsigs 2250 for bkid, bkidsig in izip(bkids, bkidsigs): 2251 result.append(str(bkid) + ': ' + 2252 string.join(bkidsig.format(names=names), ' ')) 2253 result.append('%s [%s]' % (self.bactsig, self.bact)) 2254 return string.join(result, '\n')
2255
2256 -class File(Base):
2257 """A class for files in a file system. 2258 """ 2259 2260 memoizer_counters = [] 2261 2262 NodeInfo = FileNodeInfo 2263 BuildInfo = FileBuildInfo 2264 2265 md5_chunksize = 64 2266
2267 - def diskcheck_match(self):
2268 diskcheck_match(self, self.isdir, 2269 "Directory %s found where file expected.")
2270
2271 - def __init__(self, name, directory, fs):
2272 if __debug__: logInstanceCreation(self, 'Node.FS.File') 2273 Base.__init__(self, name, directory, fs) 2274 self._morph()
2275
2276 - def Entry(self, name):
2277 """Create an entry node named 'name' relative to 2278 the directory of this file.""" 2279 return self.dir.Entry(name)
2280
2281 - def Dir(self, name, create=True):
2282 """Create a directory node named 'name' relative to 2283 the directory of this file.""" 2284 return self.dir.Dir(name, create=create)
2285
2286 - def Dirs(self, pathlist):
2287 """Create a list of directories relative to the SConscript 2288 directory of this file.""" 2289 # TODO(1.5) 2290 # return [self.Dir(p) for p in pathlist] 2291 return map(lambda p, s=self: s.Dir(p), pathlist)
2292
2293 - def File(self, name):
2294 """Create a file node named 'name' relative to 2295 the directory of this file.""" 2296 return self.dir.File(name)
2297 2298 #def generate_build_dict(self): 2299 # """Return an appropriate dictionary of values for building 2300 # this File.""" 2301 # return {'Dir' : self.Dir, 2302 # 'File' : self.File, 2303 # 'RDirs' : self.RDirs} 2304
2305 - def _morph(self):
2306 """Turn a file system node into a File object.""" 2307 self.scanner_paths = {} 2308 if not hasattr(self, '_local'): 2309 self._local = 0 2310 2311 # If there was already a Builder set on this entry, then 2312 # we need to make sure we call the target-decider function, 2313 # not the source-decider. Reaching in and doing this by hand 2314 # is a little bogus. We'd prefer to handle this by adding 2315 # an Entry.builder_set() method that disambiguates like the 2316 # other methods, but that starts running into problems with the 2317 # fragile way we initialize Dir Nodes with their Mkdir builders, 2318 # yet still allow them to be overridden by the user. Since it's 2319 # not clear right now how to fix that, stick with what works 2320 # until it becomes clear... 2321 if self.has_builder(): 2322 self.changed_since_last_build = self.decide_target
2323
2324 - def scanner_key(self):
2325 return self.get_suffix()
2326
2327 - def get_contents(self):
2328 if not self.rexists(): 2329 return '' 2330 fname = self.rfile().abspath 2331 try: 2332 contents = open(fname, "rb").read() 2333 except EnvironmentError, e: 2334 if not e.filename: 2335 e.filename = fname 2336 raise 2337 return contents
2338 2339 try: 2340 import codecs 2341 except ImportError: 2342 get_text_contents = get_contents 2343 else: 2344 # This attempts to figure out what the encoding of the text is 2345 # based upon the BOM bytes, and then decodes the contents so that 2346 # it's a valid python string.
2347 - def get_text_contents(self):
2348 contents = self.get_contents() 2349 # The behavior of various decode() methods and functions 2350 # w.r.t. the initial BOM bytes is different for different 2351 # encodings and/or Python versions. ('utf-8' does not strip 2352 # them, but has a 'utf-8-sig' which does; 'utf-16' seems to 2353 # strip them; etc.) Just side step all the complication by 2354 # explicitly stripping the BOM before we decode(). 2355 if contents.startswith(codecs.BOM_UTF8): 2356 contents = contents[len(codecs.BOM_UTF8):] 2357 # TODO(2.2): Remove when 2.3 becomes floor. 2358 #contents = contents.decode('utf-8') 2359 contents = my_decode(contents, 'utf-8') 2360 elif contents.startswith(codecs.BOM_UTF16_LE): 2361 contents = contents[len(codecs.BOM_UTF16_LE):] 2362 # TODO(2.2): Remove when 2.3 becomes floor. 2363 #contents = contents.decode('utf-16-le') 2364 contents = my_decode(contents, 'utf-16-le') 2365 elif contents.startswith(codecs.BOM_UTF16_BE): 2366 contents = contents[len(codecs.BOM_UTF16_BE):] 2367 # TODO(2.2): Remove when 2.3 becomes floor. 2368 #contents = contents.decode('utf-16-be') 2369 contents = my_decode(contents, 'utf-16-be') 2370 return contents
2371
2372 - def get_content_hash(self):
2373 """ 2374 Compute and return the MD5 hash for this file. 2375 """ 2376 if not self.rexists(): 2377 return SCons.Util.MD5signature('') 2378 fname = self.rfile().abspath 2379 try: 2380 cs = SCons.Util.MD5filesignature(fname, 2381 chunksize=SCons.Node.FS.File.md5_chunksize*1024) 2382 except EnvironmentError, e: 2383 if not e.filename: 2384 e.filename = fname 2385 raise 2386 return cs
2387 2388 2389 memoizer_counters.append(SCons.Memoize.CountValue('get_size')) 2390
2391 - def get_size(self):
2392 try: 2393 return self._memo['get_size'] 2394 except KeyError: 2395 pass 2396 2397 if self.rexists(): 2398 size = self.rfile().getsize() 2399 else: 2400 size = 0 2401 2402 self._memo['get_size'] = size 2403 2404 return size
2405 2406 memoizer_counters.append(SCons.Memoize.CountValue('get_timestamp')) 2407
2408 - def get_timestamp(self):
2409 try: 2410 return self._memo['get_timestamp'] 2411 except KeyError: 2412 pass 2413 2414 if self.rexists(): 2415 timestamp = self.rfile().getmtime() 2416 else: 2417 timestamp = 0 2418 2419 self._memo['get_timestamp'] = timestamp 2420 2421 return timestamp
2422
2423 - def store_info(self):
2424 # Merge our build information into the already-stored entry. 2425 # This accomodates "chained builds" where a file that's a target 2426 # in one build (SConstruct file) is a source in a different build. 2427 # See test/chained-build.py for the use case. 2428 if do_store_info: 2429 self.dir.sconsign().store_info(self.name, self)
2430 2431 convert_copy_attrs = [ 2432 'bsources', 2433 'bimplicit', 2434 'bdepends', 2435 'bact', 2436 'bactsig', 2437 'ninfo', 2438 ] 2439 2440 2441 convert_sig_attrs = [ 2442 'bsourcesigs', 2443 'bimplicitsigs', 2444 'bdependsigs', 2445 ] 2446
2447 - def convert_old_entry(self, old_entry):
2448 # Convert a .sconsign entry from before the Big Signature 2449 # Refactoring, doing what we can to convert its information 2450 # to the new .sconsign entry format. 2451 # 2452 # The old format looked essentially like this: 2453 # 2454 # BuildInfo 2455 # .ninfo (NodeInfo) 2456 # .bsig 2457 # .csig 2458 # .timestamp 2459 # .size 2460 # .bsources 2461 # .bsourcesigs ("signature" list) 2462 # .bdepends 2463 # .bdependsigs ("signature" list) 2464 # .bimplicit 2465 # .bimplicitsigs ("signature" list) 2466 # .bact 2467 # .bactsig 2468 # 2469 # The new format looks like this: 2470 # 2471 # .ninfo (NodeInfo) 2472 # .bsig 2473 # .csig 2474 # .timestamp 2475 # .size 2476 # .binfo (BuildInfo) 2477 # .bsources 2478 # .bsourcesigs (NodeInfo list) 2479 # .bsig 2480 # .csig 2481 # .timestamp 2482 # .size 2483 # .bdepends 2484 # .bdependsigs (NodeInfo list) 2485 # .bsig 2486 # .csig 2487 # .timestamp 2488 # .size 2489 # .bimplicit 2490 # .bimplicitsigs (NodeInfo list) 2491 # .bsig 2492 # .csig 2493 # .timestamp 2494 # .size 2495 # .bact 2496 # .bactsig 2497 # 2498 # The basic idea of the new structure is that a NodeInfo always 2499 # holds all available information about the state of a given Node 2500 # at a certain point in time. The various .b*sigs lists can just 2501 # be a list of pointers to the .ninfo attributes of the different 2502 # dependent nodes, without any copying of information until it's 2503 # time to pickle it for writing out to a .sconsign file. 2504 # 2505 # The complicating issue is that the *old* format only stored one 2506 # "signature" per dependency, based on however the *last* build 2507 # was configured. We don't know from just looking at it whether 2508 # it was a build signature, a content signature, or a timestamp 2509 # "signature". Since we no longer use build signatures, the 2510 # best we can do is look at the length and if it's thirty two, 2511 # assume that it was (or might have been) a content signature. 2512 # If it was actually a build signature, then it will cause a 2513 # rebuild anyway when it doesn't match the new content signature, 2514 # but that's probably the best we can do. 2515 import SCons.SConsign 2516 new_entry = SCons.SConsign.SConsignEntry() 2517 new_entry.binfo = self.new_binfo() 2518 binfo = new_entry.binfo 2519 for attr in self.convert_copy_attrs: 2520 try: 2521 value = getattr(old_entry, attr) 2522 except AttributeError: 2523 continue 2524 setattr(binfo, attr, value) 2525 delattr(old_entry, attr) 2526 for attr in self.convert_sig_attrs: 2527 try: 2528 sig_list = getattr(old_entry, attr) 2529 except AttributeError: 2530 continue 2531 value = [] 2532 for sig in sig_list: 2533 ninfo = self.new_ninfo() 2534 if len(sig) == 32: 2535 ninfo.csig = sig 2536 else: 2537 ninfo.timestamp = sig 2538 value.append(ninfo) 2539 setattr(binfo, attr, value) 2540 delattr(old_entry, attr) 2541 return new_entry
2542 2543 memoizer_counters.append(SCons.Memoize.CountValue('get_stored_info')) 2544
2545 - def get_stored_info(self):
2546 try: 2547 return self._memo['get_stored_info'] 2548 except KeyError: 2549 pass 2550 2551 try: 2552 sconsign_entry = self.dir.sconsign().get_entry(self.name) 2553 except (KeyError, EnvironmentError): 2554 import SCons.SConsign 2555 sconsign_entry = SCons.SConsign.SConsignEntry() 2556 sconsign_entry.binfo = self.new_binfo() 2557 sconsign_entry.ninfo = self.new_ninfo() 2558 else: 2559 if isinstance(sconsign_entry, FileBuildInfo): 2560 # This is a .sconsign file from before the Big Signature 2561 # Refactoring; convert it as best we can. 2562 sconsign_entry = self.convert_old_entry(sconsign_entry) 2563 try: 2564 delattr(sconsign_entry.ninfo, 'bsig') 2565 except AttributeError: 2566 pass 2567 2568 self._memo['get_stored_info'] = sconsign_entry 2569 2570 return sconsign_entry
2571
2572 - def get_stored_implicit(self):
2573 binfo = self.get_stored_info().binfo 2574 binfo.prepare_dependencies() 2575 try: return binfo.bimplicit 2576 except AttributeError: return None
2577
2578 - def rel_path(self, other):
2579 return self.dir.rel_path(other)
2580
2581 - def _get_found_includes_key(self, env, scanner, path):
2582 return (id(env), id(scanner), path)
2583 2584 memoizer_counters.append(SCons.Memoize.CountDict('get_found_includes', _get_found_includes_key)) 2585
2586 - def get_found_includes(self, env, scanner, path):
2587 """Return the included implicit dependencies in this file. 2588 Cache results so we only scan the file once per path 2589 regardless of how many times this information is requested. 2590 """ 2591 memo_key = (id(env), id(scanner), path) 2592 try: 2593 memo_dict = self._memo['get_found_includes'] 2594 except KeyError: 2595 memo_dict = {} 2596 self._memo['get_found_includes'] = memo_dict 2597 else: 2598 try: 2599 return memo_dict[memo_key] 2600 except KeyError: 2601 pass 2602 2603 if scanner: 2604 # result = [n.disambiguate() for n in scanner(self, env, path)] 2605 result = scanner(self, env, path) 2606 result = map(lambda N: N.disambiguate(), result) 2607 else: 2608 result = [] 2609 2610 memo_dict[memo_key] = result 2611 2612 return result
2613
2614 - def _createDir(self):
2615 # ensure that the directories for this node are 2616 # created. 2617 self.dir._create()
2618
2619 - def push_to_cache(self):
2620 """Try to push the node into a cache 2621 """ 2622 # This should get called before the Nodes' .built() method is 2623 # called, which would clear the build signature if the file has 2624 # a source scanner. 2625 # 2626 # We have to clear the local memoized values *before* we push 2627 # the node to cache so that the memoization of the self.exists() 2628 # return value doesn't interfere. 2629 if self.nocache: 2630 return 2631 self.clear_memoized_values() 2632 if self.exists(): 2633 self.get_build_env().get_CacheDir().push(self)
2634
2635 - def retrieve_from_cache(self):
2636 """Try to retrieve the node's content from a cache 2637 2638 This method is called from multiple threads in a parallel build, 2639 so only do thread safe stuff here. Do thread unsafe stuff in 2640 built(). 2641 2642 Returns true iff the node was successfully retrieved. 2643 """ 2644 if self.nocache: 2645 return None 2646 if not self.is_derived(): 2647 return None 2648 return self.get_build_env().get_CacheDir().retrieve(self)
2649
2650 - def visited(self):
2651 if self.exists(): 2652 self.get_build_env().get_CacheDir().push_if_forced(self) 2653 2654 ninfo = self.get_ninfo() 2655 2656 csig = self.get_max_drift_csig() 2657 if csig: 2658 ninfo.csig = csig 2659 2660 ninfo.timestamp = self.get_timestamp() 2661 ninfo.size = self.get_size() 2662 2663 if not self.has_builder(): 2664 # This is a source file, but it might have been a target file 2665 # in another build that included more of the DAG. Copy 2666 # any build information that's stored in the .sconsign file 2667 # into our binfo object so it doesn't get lost. 2668 old = self.get_stored_info() 2669 self.get_binfo().__dict__.update(old.binfo.__dict__) 2670 2671 self.store_info()
2672
2673 - def find_src_builder(self):
2674 if self.rexists(): 2675 return None 2676 scb = self.dir.src_builder() 2677 if scb is _null: 2678 if diskcheck_sccs(self.dir, self.name): 2679 scb = get_DefaultSCCSBuilder() 2680 elif diskcheck_rcs(self.dir, self.name): 2681 scb = get_DefaultRCSBuilder() 2682 else: 2683 scb = None 2684 if scb is not None: 2685 try: 2686 b = self.builder 2687 except AttributeError: 2688 b = None 2689 if b is None: 2690 self.builder_set(scb) 2691 return scb
2692
2693 - def has_src_builder(self):
2694 """Return whether this Node has a source builder or not. 2695 2696 If this Node doesn't have an explicit source code builder, this 2697 is where we figure out, on the fly, if there's a transparent 2698 source code builder for it. 2699 2700 Note that if we found a source builder, we also set the 2701 self.builder attribute, so that all of the methods that actually 2702 *build* this file don't have to do anything different. 2703 """ 2704 try: 2705 scb = self.sbuilder 2706 except AttributeError: 2707 scb = self.sbuilder = self.find_src_builder() 2708 return scb is not None
2709
2710 - def alter_targets(self):
2711 """Return any corresponding targets in a variant directory. 2712 """ 2713 if self.is_derived(): 2714 return [], None 2715 return self.fs.variant_dir_target_climb(self, self.dir, [self.name])
2716
2717 - def _rmv_existing(self):
2718 self.clear_memoized_values() 2719 e = Unlink(self, [], None) 2720 if isinstance(e, SCons.Errors.BuildError): 2721 raise e
2722 2723 # 2724 # Taskmaster interface subsystem 2725 # 2726
2727 - def make_ready(self):
2728 self.has_src_builder() 2729 self.get_binfo()
2730
2731 - def prepare(self):
2732 """Prepare for this file to be created.""" 2733 SCons.Node.Node.prepare(self) 2734 2735 if self.get_state() != SCons.Node.up_to_date: 2736 if self.exists(): 2737 if self.is_derived() and not self.precious: 2738 self._rmv_existing() 2739 else: 2740 try: 2741 self._createDir() 2742 except SCons.Errors.StopError, drive: 2743 desc = "No drive `%s' for target `%s'." % (drive, self) 2744 raise SCons.Errors.StopError, desc
2745 2746 # 2747 # 2748 # 2749
2750 - def remove(self):
2751 """Remove this file.""" 2752 if self.exists() or self.islink(): 2753 self.fs.unlink(self.path) 2754 return 1 2755 return None
2756
2757 - def do_duplicate(self, src):
2758 self._createDir() 2759 Unlink(self, None, None) 2760 e = Link(self, src, None) 2761 if isinstance(e, SCons.Errors.BuildError): 2762 desc = "Cannot duplicate `%s' in `%s': %s." % (src.path, self.dir.path, e.errstr) 2763 raise SCons.Errors.StopError, desc 2764 self.linked = 1 2765 # The Link() action may or may not have actually 2766 # created the file, depending on whether the -n 2767 # option was used or not. Delete the _exists and 2768 # _rexists attributes so they can be reevaluated. 2769 self.clear()
2770 2771 memoizer_counters.append(SCons.Memoize.CountValue('exists')) 2772
2773 - def exists(self):
2774 try: 2775 return self._memo['exists'] 2776 except KeyError: 2777 pass 2778 # Duplicate from source path if we are set up to do this. 2779 if self.duplicate and not self.is_derived() and not self.linked: 2780 src = self.srcnode() 2781 if src is not self: 2782 # At this point, src is meant to be copied in a variant directory. 2783 src = src.rfile() 2784 if src.abspath != self.abspath: 2785 if src.exists(): 2786 self.do_duplicate(src) 2787 # Can't return 1 here because the duplication might 2788 # not actually occur if the -n option is being used. 2789 else: 2790 # The source file does not exist. Make sure no old 2791 # copy remains in the variant directory. 2792 if Base.exists(self) or self.islink(): 2793 self.fs.unlink(self.path) 2794 # Return None explicitly because the Base.exists() call 2795 # above will have cached its value if the file existed. 2796 self._memo['exists'] = None 2797 return None 2798 result = Base.exists(self) 2799 self._memo['exists'] = result 2800 return result
2801 2802 # 2803 # SIGNATURE SUBSYSTEM 2804 # 2805
2806 - def get_max_drift_csig(self):
2807 """ 2808 Returns the content signature currently stored for this node 2809 if it's been unmodified longer than the max_drift value, or the 2810 max_drift value is 0. Returns None otherwise. 2811 """ 2812 old = self.get_stored_info() 2813 mtime = self.get_timestamp() 2814 2815 max_drift = self.fs.max_drift 2816 if max_drift > 0: 2817 if (time.time() - mtime) > max_drift: 2818 try: 2819 n = old.ninfo 2820 if n.timestamp and n.csig and n.timestamp == mtime: 2821 return n.csig 2822 except AttributeError: 2823 pass 2824 elif max_drift == 0: 2825 try: 2826 return old.ninfo.csig 2827 except AttributeError: 2828 pass 2829 2830 return None
2831
2832 - def get_csig(self):
2833 """ 2834 Generate a node's content signature, the digested signature 2835 of its content. 2836 2837 node - the node 2838 cache - alternate node to use for the signature cache 2839 returns - the content signature 2840 """ 2841 ninfo = self.get_ninfo() 2842 try: 2843 return ninfo.csig 2844 except AttributeError: 2845 pass 2846 2847 csig = self.get_max_drift_csig() 2848 if csig is None: 2849 2850 try: 2851 if self.get_size() < SCons.Node.FS.File.md5_chunksize: 2852 contents = self.get_contents() 2853 else: 2854 csig = self.get_content_hash() 2855 except IOError: 2856 # This can happen if there's actually a directory on-disk, 2857 # which can be the case if they've disabled disk checks, 2858 # or if an action with a File target actually happens to 2859 # create a same-named directory by mistake. 2860 csig = '' 2861 else: 2862 if not csig: 2863 csig = SCons.Util.MD5signature(contents) 2864 2865 ninfo.csig = csig 2866 2867 return csig
2868 2869 # 2870 # DECISION SUBSYSTEM 2871 # 2872
2873 - def builder_set(self, builder):
2874 SCons.Node.Node.builder_set(self, builder) 2875 self.changed_since_last_build = self.decide_target
2876
2877 - def changed_content(self, target, prev_ni):
2878 cur_csig = self.get_csig() 2879 try: 2880 return cur_csig != prev_ni.csig 2881 except AttributeError: 2882 return 1
2883
2884 - def changed_state(self, target, prev_ni):
2885 return self.state != SCons.Node.up_to_date
2886
2887 - def changed_timestamp_then_content(self, target, prev_ni):
2888 if not self.changed_timestamp_match(target, prev_ni): 2889 try: 2890 self.get_ninfo().csig = prev_ni.csig 2891 except AttributeError: 2892 pass 2893 return False 2894 return self.changed_content(target, prev_ni)
2895
2896 - def changed_timestamp_newer(self, target, prev_ni):
2897 try: 2898 return self.get_timestamp() > target.get_timestamp() 2899 except AttributeError: 2900 return 1
2901
2902 - def changed_timestamp_match(self, target, prev_ni):
2903 try: 2904 return self.get_timestamp() != prev_ni.timestamp 2905 except AttributeError: 2906 return 1
2907
2908 - def decide_source(self, target, prev_ni):
2909 return target.get_build_env().decide_source(self, target, prev_ni)
2910
2911 - def decide_target(self, target, prev_ni):
2912 return target.get_build_env().decide_target(self, target, prev_ni)
2913 2914 # Initialize this Node's decider function to decide_source() because 2915 # every file is a source file until it has a Builder attached... 2916 changed_since_last_build = decide_source 2917
2918 - def is_up_to_date(self):
2919 T = 0 2920 if T: Trace('is_up_to_date(%s):' % self) 2921 if not self.exists(): 2922 if T: Trace(' not self.exists():') 2923 # The file doesn't exist locally... 2924 r = self.rfile() 2925 if r != self: 2926 # ...but there is one in a Repository... 2927 if not self.changed(r): 2928 if T: Trace(' changed(%s):' % r) 2929 # ...and it's even up-to-date... 2930 if self._local: 2931 # ...and they'd like a local copy. 2932 e = LocalCopy(self, r, None) 2933 if isinstance(e, SCons.Errors.BuildError): 2934 raise 2935 self.store_info() 2936 if T: Trace(' 1\n') 2937 return 1 2938 self.changed() 2939 if T: Trace(' None\n') 2940 return None 2941 else: 2942 r = self.changed() 2943 if T: Trace(' self.exists(): %s\n' % r) 2944 return not r
2945 2946 memoizer_counters.append(SCons.Memoize.CountValue('rfile')) 2947
2948 - def rfile(self):
2949 try: 2950 return self._memo['rfile'] 2951 except KeyError: 2952 pass 2953 result = self 2954 if not self.exists(): 2955 norm_name = _my_normcase(self.name) 2956 for dir in self.dir.get_all_rdirs(): 2957 try: node = dir.entries[norm_name] 2958 except KeyError: node = dir.file_on_disk(self.name) 2959 if node and node.exists() and \ 2960 (isinstance(node, File) or isinstance(node, Entry) \ 2961 or not node.is_derived()): 2962 result = node 2963 # Copy over our local attributes to the repository 2964 # Node so we identify shared object files in the 2965 # repository and don't assume they're static. 2966 # 2967 # This isn't perfect; the attribute would ideally 2968 # be attached to the object in the repository in 2969 # case it was built statically in the repository 2970 # and we changed it to shared locally, but that's 2971 # rarely the case and would only occur if you 2972 # intentionally used the same suffix for both 2973 # shared and static objects anyway. So this 2974 # should work well in practice. 2975 result.attributes = self.attributes 2976 break 2977 self._memo['rfile'] = result 2978 return result
2979
2980 - def rstr(self):
2981 return str(self.rfile())
2982
2983 - def get_cachedir_csig(self):
2984 """ 2985 Fetch a Node's content signature for purposes of computing 2986 another Node's cachesig. 2987 2988 This is a wrapper around the normal get_csig() method that handles 2989 the somewhat obscure case of using CacheDir with the -n option. 2990 Any files that don't exist would normally be "built" by fetching 2991 them from the cache, but the normal get_csig() method will try 2992 to open up the local file, which doesn't exist because the -n 2993 option meant we didn't actually pull the file from cachedir. 2994 But since the file *does* actually exist in the cachedir, we 2995 can use its contents for the csig. 2996 """ 2997 try: 2998 return self.cachedir_csig 2999 except AttributeError: 3000 pass 3001 3002 cachedir, cachefile = self.get_build_env().get_CacheDir().cachepath(self) 3003 if not self.exists() and cachefile and os.path.exists(cachefile): 3004 self.cachedir_csig = SCons.Util.MD5filesignature(cachefile, \ 3005 SCons.Node.FS.File.md5_chunksize * 1024) 3006 else: 3007 self.cachedir_csig = self.get_csig() 3008 return self.cachedir_csig
3009
3010 - def get_cachedir_bsig(self):
3011 try: 3012 return self.cachesig 3013 except AttributeError: 3014 pass 3015 3016 # Add the path to the cache signature, because multiple 3017 # targets built by the same action will all have the same 3018 # build signature, and we have to differentiate them somehow. 3019 children = self.children() 3020 executor = self.get_executor() 3021 # sigs = [n.get_cachedir_csig() for n in children] 3022 sigs = map(lambda n: n.get_cachedir_csig(), children) 3023 sigs.append(SCons.Util.MD5signature(executor.get_contents())) 3024 sigs.append(self.path) 3025 result = self.cachesig = SCons.Util.MD5collect(sigs) 3026 return result
3027 3028 3029 default_fs = None 3030
3031 -def get_default_fs():
3032 global default_fs 3033 if not default_fs: 3034 default_fs = FS() 3035 return default_fs
3036
3037 -class FileFinder:
3038 """ 3039 """ 3040 if SCons.Memoize.use_memoizer: 3041 __metaclass__ = SCons.Memoize.Memoized_Metaclass 3042 3043 memoizer_counters = [] 3044
3045 - def __init__(self):
3046 self._memo = {}
3047
3048 - def filedir_lookup(self, p, fd=None):
3049 """ 3050 A helper method for find_file() that looks up a directory for 3051 a file we're trying to find. This only creates the Dir Node if 3052 it exists on-disk, since if the directory doesn't exist we know 3053 we won't find any files in it... :-) 3054 3055 It would be more compact to just use this as a nested function 3056 with a default keyword argument (see the commented-out version 3057 below), but that doesn't work unless you have nested scopes, 3058 so we define it here just so this work under Python 1.5.2. 3059 """ 3060 if fd is None: 3061 fd = self.default_filedir 3062 dir, name = os.path.split(fd) 3063 drive, d = os.path.splitdrive(dir) 3064 if not name and d[:1] in ('/', os.sep): 3065 #return p.fs.get_root(drive).dir_on_disk(name) 3066 return p.fs.get_root(drive) 3067 if dir: 3068 p = self.filedir_lookup(p, dir) 3069 if not p: 3070 return None 3071 norm_name = _my_normcase(name) 3072 try: 3073 node = p.entries[norm_name] 3074 except KeyError: 3075 return p.dir_on_disk(name) 3076 if isinstance(node, Dir): 3077 return node 3078 if isinstance(node, Entry): 3079 node.must_be_same(Dir) 3080 return node 3081 return None
3082
3083 - def _find_file_key(self, filename, paths, verbose=None):
3084 return (filename, paths)
3085 3086 memoizer_counters.append(SCons.Memoize.CountDict('find_file', _find_file_key)) 3087
3088 - def find_file(self, filename, paths, verbose=None):
3089 """ 3090 find_file(str, [Dir()]) -> [nodes] 3091 3092 filename - a filename to find 3093 paths - a list of directory path *nodes* to search in. Can be 3094 represented as a list, a tuple, or a callable that is 3095 called with no arguments and returns the list or tuple. 3096 3097 returns - the node created from the found file. 3098 3099 Find a node corresponding to either a derived file or a file 3100 that exists already. 3101 3102 Only the first file found is returned, and none is returned 3103 if no file is found. 3104 """ 3105 memo_key = self._find_file_key(filename, paths) 3106 try: 3107 memo_dict = self._memo['find_file'] 3108 except KeyError: 3109 memo_dict = {} 3110 self._memo['find_file'] = memo_dict 3111 else: 3112 try: 3113 return memo_dict[memo_key] 3114 except KeyError: 3115 pass 3116 3117 if verbose and not callable(verbose): 3118 if not SCons.Util.is_String(verbose): 3119 verbose = "find_file" 3120 verbose = ' %s: ' % verbose 3121 verbose = lambda s, v=verbose: sys.stdout.write(v + s) 3122 3123 filedir, filename = os.path.split(filename) 3124 if filedir: 3125 # More compact code that we can't use until we drop 3126 # support for Python 1.5.2: 3127 # 3128 #def filedir_lookup(p, fd=filedir): 3129 # """ 3130 # A helper function that looks up a directory for a file 3131 # we're trying to find. This only creates the Dir Node 3132 # if it exists on-disk, since if the directory doesn't 3133 # exist we know we won't find any files in it... :-) 3134 # """ 3135 # dir, name = os.path.split(fd) 3136 # if dir: 3137 # p = filedir_lookup(p, dir) 3138 # if not p: 3139 # return None 3140 # norm_name = _my_normcase(name) 3141 # try: 3142 # node = p.entries[norm_name] 3143 # except KeyError: 3144 # return p.dir_on_disk(name) 3145 # if isinstance(node, Dir): 3146 # return node 3147 # if isinstance(node, Entry): 3148 # node.must_be_same(Dir) 3149 # return node 3150 # if isinstance(node, Dir) or isinstance(node, Entry): 3151 # return node 3152 # return None 3153 #paths = filter(None, map(filedir_lookup, paths)) 3154 3155 self.default_filedir = filedir 3156 paths = filter(None, map(self.filedir_lookup, paths)) 3157 3158 result = None 3159 for dir in paths: 3160 if verbose: 3161 verbose("looking for '%s' in '%s' ...\n" % (filename, dir)) 3162 node, d = dir.srcdir_find_file(filename) 3163 if node: 3164 if verbose: 3165 verbose("... FOUND '%s' in '%s'\n" % (filename, d)) 3166 result = node 3167 break 3168 3169 memo_dict[memo_key] = result 3170 3171 return result
3172 3173 find_file = FileFinder().find_file 3174 3175
3176 -def invalidate_node_memos(targets):
3177 """ 3178 Invalidate the memoized values of all Nodes (files or directories) 3179 that are associated with the given entries. Has been added to 3180 clear the cache of nodes affected by a direct execution of an 3181 action (e.g. Delete/Copy/Chmod). Existing Node caches become 3182 inconsistent if the action is run through Execute(). The argument 3183 `targets` can be a single Node object or filename, or a sequence 3184 of Nodes/filenames. 3185 """ 3186 from traceback import extract_stack 3187 3188 # First check if the cache really needs to be flushed. Only 3189 # actions run in the SConscript with Execute() seem to be 3190 # affected. XXX The way to check if Execute() is in the stacktrace 3191 # is a very dirty hack and should be replaced by a more sensible 3192 # solution. 3193 for f in extract_stack(): 3194 if f[2] == 'Execute' and f[0][-14:] == 'Environment.py': 3195 break 3196 else: 3197 # Dont have to invalidate, so return 3198 return 3199 3200 if not SCons.Util.is_List(targets): 3201 targets = [targets] 3202 3203 for entry in targets: 3204 # If the target is a Node object, clear the cache. If it is a 3205 # filename, look up potentially existing Node object first. 3206 try: 3207 entry.clear_memoized_values() 3208 except AttributeError: 3209 # Not a Node object, try to look up Node by filename. XXX 3210 # This creates Node objects even for those filenames which 3211 # do not correspond to an existing Node object. 3212 node = get_default_fs().Entry(entry) 3213 if node: 3214 node.clear_memoized_values()
3215 3216 # Local Variables: 3217 # tab-width:4 3218 # indent-tabs-mode:nil 3219 # End: 3220 # vim: set expandtab tabstop=4 shiftwidth=4: 3221