Package SCons :: Package Node :: Module FS
[hide private]
[frames] | no frames]

Source Code for Module SCons.Node.FS

   1  """scons.Node.FS 
   2   
   3  File system nodes. 
   4   
   5  These Nodes represent the canonical external objects that people think 
   6  of when they think of building software: files and directories. 
   7   
   8  This holds a "default_fs" variable that should be initialized with an FS 
   9  that can be used by scripts or modules looking for the canonical default. 
  10   
  11  """ 
  12   
  13  # 
  14  # Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010, 2011 The SCons Foundation 
  15  # 
  16  # Permission is hereby granted, free of charge, to any person obtaining 
  17  # a copy of this software and associated documentation files (the 
  18  # "Software"), to deal in the Software without restriction, including 
  19  # without limitation the rights to use, copy, modify, merge, publish, 
  20  # distribute, sublicense, and/or sell copies of the Software, and to 
  21  # permit persons to whom the Software is furnished to do so, subject to 
  22  # the following conditions: 
  23  # 
  24  # The above copyright notice and this permission notice shall be included 
  25  # in all copies or substantial portions of the Software. 
  26  # 
  27  # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY 
  28  # KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE 
  29  # WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND 
  30  # NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE 
  31  # LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION 
  32  # OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION 
  33  # WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. 
  34   
  35  __revision__ = "src/engine/SCons/Node/FS.py 5357 2011/09/09 21:31:03 bdeegan" 
  36   
  37  import fnmatch 
  38  import os 
  39  import re 
  40  import shutil 
  41  import stat 
  42  import sys 
  43  import time 
  44  import codecs 
  45   
  46  import SCons.Action 
  47  from SCons.Debug import logInstanceCreation 
  48  import SCons.Errors 
  49  import SCons.Memoize 
  50  import SCons.Node 
  51  import SCons.Node.Alias 
  52  import SCons.Subst 
  53  import SCons.Util 
  54  import SCons.Warnings 
  55   
  56  from SCons.Debug import Trace 
  57   
  58  do_store_info = True 
  59  print_duplicate = 0 
  60   
  61   
62 -class EntryProxyAttributeError(AttributeError):
63 """ 64 An AttributeError subclass for recording and displaying the name 65 of the underlying Entry involved in an AttributeError exception. 66 """
67 - def __init__(self, entry_proxy, attribute):
68 AttributeError.__init__(self) 69 self.entry_proxy = entry_proxy 70 self.attribute = attribute
71 - def __str__(self):
72 entry = self.entry_proxy.get() 73 fmt = "%s instance %s has no attribute %s" 74 return fmt % (entry.__class__.__name__, 75 repr(entry.name), 76 repr(self.attribute))
77 78 # The max_drift value: by default, use a cached signature value for 79 # any file that's been untouched for more than two days. 80 default_max_drift = 2*24*60*60 81 82 # 83 # We stringify these file system Nodes a lot. Turning a file system Node 84 # into a string is non-trivial, because the final string representation 85 # can depend on a lot of factors: whether it's a derived target or not, 86 # whether it's linked to a repository or source directory, and whether 87 # there's duplication going on. The normal technique for optimizing 88 # calculations like this is to memoize (cache) the string value, so you 89 # only have to do the calculation once. 90 # 91 # A number of the above factors, however, can be set after we've already 92 # been asked to return a string for a Node, because a Repository() or 93 # VariantDir() call or the like may not occur until later in SConscript 94 # files. So this variable controls whether we bother trying to save 95 # string values for Nodes. The wrapper interface can set this whenever 96 # they're done mucking with Repository and VariantDir and the other stuff, 97 # to let this module know it can start returning saved string values 98 # for Nodes. 99 # 100 Save_Strings = None 101
102 -def save_strings(val):
103 global Save_Strings 104 Save_Strings = val
105 106 # 107 # Avoid unnecessary function calls by recording a Boolean value that 108 # tells us whether or not os.path.splitdrive() actually does anything 109 # on this system, and therefore whether we need to bother calling it 110 # when looking up path names in various methods below. 111 # 112 113 do_splitdrive = None 114 _my_splitdrive =None 115
116 -def initialize_do_splitdrive():
117 global do_splitdrive 118 global has_unc 119 drive, path = os.path.splitdrive('X:/foo') 120 has_unc = hasattr(os.path, 'splitunc') 121 122 do_splitdrive = not not drive or has_unc 123 124 global _my_splitdrive 125 if has_unc: 126 def splitdrive(p): 127 if p[1:2] == ':': 128 return p[:2], p[2:] 129 if p[0:2] == '//': 130 # Note that we leave a leading slash in the path 131 # because UNC paths are always absolute. 132 return '//', p[1:] 133 return '', p
134 else: 135 def splitdrive(p): 136 if p[1:2] == ':': 137 return p[:2], p[2:] 138 return '', p 139 _my_splitdrive = splitdrive 140 141 # Keep some commonly used values in global variables to skip to 142 # module look-up costs. 143 global OS_SEP 144 global UNC_PREFIX 145 global os_sep_is_slash 146 147 OS_SEP = os.sep 148 UNC_PREFIX = OS_SEP + OS_SEP 149 os_sep_is_slash = OS_SEP == '/' 150 151 initialize_do_splitdrive() 152 153 # Used to avoid invoking os.path.normpath if not necessary. 154 needs_normpath_check = re.compile( 155 r''' 156 # We need to renormalize the path if it contains any consecutive 157 # '/' characters. 158 .*// | 159 160 # We need to renormalize the path if it contains a '..' directory. 161 # Note that we check for all the following cases: 162 # 163 # a) The path is a single '..' 164 # b) The path starts with '..'. E.g. '../' or '../moredirs' 165 # but we not match '..abc/'. 166 # c) The path ends with '..'. E.g. '/..' or 'dirs/..' 167 # d) The path contains a '..' in the middle. 168 # E.g. dirs/../moredirs 169 170 (.*/)?\.\.(?:/|$) | 171 172 # We need to renormalize the path if it contains a '.' 173 # directory, but NOT if it is a single '.' '/' characters. We 174 # do not want to match a single '.' because this case is checked 175 # for explicitely since this is common enough case. 176 # 177 # Note that we check for all the following cases: 178 # 179 # a) We don't match a single '.' 180 # b) We match if the path starts with '.'. E.g. './' or 181 # './moredirs' but we not match '.abc/'. 182 # c) We match if the path ends with '.'. E.g. '/.' or 183 # 'dirs/.' 184 # d) We match if the path contains a '.' in the middle. 185 # E.g. dirs/./moredirs 186 187 \./|.*/\.(?:/|$) 188 189 ''', 190 re.VERBOSE 191 ) 192 needs_normpath_match = needs_normpath_check.match 193 194 # 195 # SCons.Action objects for interacting with the outside world. 196 # 197 # The Node.FS methods in this module should use these actions to 198 # create and/or remove files and directories; they should *not* use 199 # os.{link,symlink,unlink,mkdir}(), etc., directly. 200 # 201 # Using these SCons.Action objects ensures that descriptions of these 202 # external activities are properly displayed, that the displays are 203 # suppressed when the -s (silent) option is used, and (most importantly) 204 # the actions are disabled when the the -n option is used, in which case 205 # there should be *no* changes to the external file system(s)... 206 # 207 208 if hasattr(os, 'link'): 221 else: 222 _hardlink_func = None 223 224 if hasattr(os, 'symlink'): 227 else: 228 _softlink_func = None 229
230 -def _copy_func(fs, src, dest):
231 shutil.copy2(src, dest) 232 st = fs.stat(src) 233 fs.chmod(dest, stat.S_IMODE(st[stat.ST_MODE]) | stat.S_IWRITE)
234 235 236 Valid_Duplicates = ['hard-soft-copy', 'soft-hard-copy', 237 'hard-copy', 'soft-copy', 'copy'] 238 239 Link_Funcs = [] # contains the callables of the specified duplication style 240
241 -def set_duplicate(duplicate):
242 # Fill in the Link_Funcs list according to the argument 243 # (discarding those not available on the platform). 244 245 # Set up the dictionary that maps the argument names to the 246 # underlying implementations. We do this inside this function, 247 # not in the top-level module code, so that we can remap os.link 248 # and os.symlink for testing purposes. 249 link_dict = { 250 'hard' : _hardlink_func, 251 'soft' : _softlink_func, 252 'copy' : _copy_func 253 } 254 255 if not duplicate in Valid_Duplicates: 256 raise SCons.Errors.InternalError("The argument of set_duplicate " 257 "should be in Valid_Duplicates") 258 global Link_Funcs 259 Link_Funcs = [] 260 for func in duplicate.split('-'): 261 if link_dict[func]: 262 Link_Funcs.append(link_dict[func])
263
264 -def LinkFunc(target, source, env):
265 # Relative paths cause problems with symbolic links, so 266 # we use absolute paths, which may be a problem for people 267 # who want to move their soft-linked src-trees around. Those 268 # people should use the 'hard-copy' mode, softlinks cannot be 269 # used for that; at least I have no idea how ... 270 src = source[0].abspath 271 dest = target[0].abspath 272 dir, file = os.path.split(dest) 273 if dir and not target[0].fs.isdir(dir): 274 os.makedirs(dir) 275 if not Link_Funcs: 276 # Set a default order of link functions. 277 set_duplicate('hard-soft-copy') 278 fs = source[0].fs 279 # Now link the files with the previously specified order. 280 for func in Link_Funcs: 281 try: 282 func(fs, src, dest) 283 break 284 except (IOError, OSError): 285 # An OSError indicates something happened like a permissions 286 # problem or an attempt to symlink across file-system 287 # boundaries. An IOError indicates something like the file 288 # not existing. In either case, keeping trying additional 289 # functions in the list and only raise an error if the last 290 # one failed. 291 if func == Link_Funcs[-1]: 292 # exception of the last link method (copy) are fatal 293 raise 294 return 0
295 296 Link = SCons.Action.Action(LinkFunc, None)
297 -def LocalString(target, source, env):
298 return 'Local copy of %s from %s' % (target[0], source[0])
299 300 LocalCopy = SCons.Action.Action(LinkFunc, LocalString) 301
302 -def UnlinkFunc(target, source, env):
303 t = target[0] 304 t.fs.unlink(t.abspath) 305 return 0
306 307 Unlink = SCons.Action.Action(UnlinkFunc, None) 308
309 -def MkdirFunc(target, source, env):
310 t = target[0] 311 if not t.exists(): 312 t.fs.mkdir(t.abspath) 313 return 0
314 315 Mkdir = SCons.Action.Action(MkdirFunc, None, presub=None) 316 317 MkdirBuilder = None 318
319 -def get_MkdirBuilder():
320 global MkdirBuilder 321 if MkdirBuilder is None: 322 import SCons.Builder 323 import SCons.Defaults 324 # "env" will get filled in by Executor.get_build_env() 325 # calling SCons.Defaults.DefaultEnvironment() when necessary. 326 MkdirBuilder = SCons.Builder.Builder(action = Mkdir, 327 env = None, 328 explain = None, 329 is_explicit = None, 330 target_scanner = SCons.Defaults.DirEntryScanner, 331 name = "MkdirBuilder") 332 return MkdirBuilder
333
334 -class _Null(object):
335 pass
336 337 _null = _Null() 338 339 DefaultSCCSBuilder = None 340 DefaultRCSBuilder = None 341
342 -def get_DefaultSCCSBuilder():
343 global DefaultSCCSBuilder 344 if DefaultSCCSBuilder is None: 345 import SCons.Builder 346 # "env" will get filled in by Executor.get_build_env() 347 # calling SCons.Defaults.DefaultEnvironment() when necessary. 348 act = SCons.Action.Action('$SCCSCOM', '$SCCSCOMSTR') 349 DefaultSCCSBuilder = SCons.Builder.Builder(action = act, 350 env = None, 351 name = "DefaultSCCSBuilder") 352 return DefaultSCCSBuilder
353
354 -def get_DefaultRCSBuilder():
355 global DefaultRCSBuilder 356 if DefaultRCSBuilder is None: 357 import SCons.Builder 358 # "env" will get filled in by Executor.get_build_env() 359 # calling SCons.Defaults.DefaultEnvironment() when necessary. 360 act = SCons.Action.Action('$RCS_COCOM', '$RCS_COCOMSTR') 361 DefaultRCSBuilder = SCons.Builder.Builder(action = act, 362 env = None, 363 name = "DefaultRCSBuilder") 364 return DefaultRCSBuilder
365 366 # Cygwin's os.path.normcase pretends it's on a case-sensitive filesystem. 367 _is_cygwin = sys.platform == "cygwin" 368 if os.path.normcase("TeSt") == os.path.normpath("TeSt") and not _is_cygwin:
369 - def _my_normcase(x):
370 return x
371 else:
372 - def _my_normcase(x):
373 return x.upper()
374 375 376
377 -class DiskChecker(object):
378 - def __init__(self, type, do, ignore):
379 self.type = type 380 self.do = do 381 self.ignore = ignore 382 self.func = do
383 - def __call__(self, *args, **kw):
384 return self.func(*args, **kw)
385 - def set(self, list):
386 if self.type in list: 387 self.func = self.do 388 else: 389 self.func = self.ignore
390
391 -def do_diskcheck_match(node, predicate, errorfmt):
392 result = predicate() 393 try: 394 # If calling the predicate() cached a None value from stat(), 395 # remove it so it doesn't interfere with later attempts to 396 # build this Node as we walk the DAG. (This isn't a great way 397 # to do this, we're reaching into an interface that doesn't 398 # really belong to us, but it's all about performance, so 399 # for now we'll just document the dependency...) 400 if node._memo['stat'] is None: 401 del node._memo['stat'] 402 except (AttributeError, KeyError): 403 pass 404 if result: 405 raise TypeError(errorfmt % node.abspath)
406
407 -def ignore_diskcheck_match(node, predicate, errorfmt):
408 pass
409
410 -def do_diskcheck_rcs(node, name):
411 try: 412 rcs_dir = node.rcs_dir 413 except AttributeError: 414 if node.entry_exists_on_disk('RCS'): 415 rcs_dir = node.Dir('RCS') 416 else: 417 rcs_dir = None 418 node.rcs_dir = rcs_dir 419 if rcs_dir: 420 return rcs_dir.entry_exists_on_disk(name+',v') 421 return None
422
423 -def ignore_diskcheck_rcs(node, name):
424 return None
425
426 -def do_diskcheck_sccs(node, name):
427 try: 428 sccs_dir = node.sccs_dir 429 except AttributeError: 430 if node.entry_exists_on_disk('SCCS'): 431 sccs_dir = node.Dir('SCCS') 432 else: 433 sccs_dir = None 434 node.sccs_dir = sccs_dir 435 if sccs_dir: 436 return sccs_dir.entry_exists_on_disk('s.'+name) 437 return None
438
439 -def ignore_diskcheck_sccs(node, name):
440 return None
441 442 diskcheck_match = DiskChecker('match', do_diskcheck_match, ignore_diskcheck_match) 443 diskcheck_rcs = DiskChecker('rcs', do_diskcheck_rcs, ignore_diskcheck_rcs) 444 diskcheck_sccs = DiskChecker('sccs', do_diskcheck_sccs, ignore_diskcheck_sccs) 445 446 diskcheckers = [ 447 diskcheck_match, 448 diskcheck_rcs, 449 diskcheck_sccs, 450 ] 451
452 -def set_diskcheck(list):
453 for dc in diskcheckers: 454 dc.set(list)
455
456 -def diskcheck_types():
457 return [dc.type for dc in diskcheckers]
458 459 460
461 -class EntryProxy(SCons.Util.Proxy):
462 463 __str__ = SCons.Util.Delegate('__str__') 464
465 - def __get_abspath(self):
466 entry = self.get() 467 return SCons.Subst.SpecialAttrWrapper(entry.get_abspath(), 468 entry.name + "_abspath")
469
470 - def __get_filebase(self):
471 name = self.get().name 472 return SCons.Subst.SpecialAttrWrapper(SCons.Util.splitext(name)[0], 473 name + "_filebase")
474
475 - def __get_suffix(self):
476 name = self.get().name 477 return SCons.Subst.SpecialAttrWrapper(SCons.Util.splitext(name)[1], 478 name + "_suffix")
479
480 - def __get_file(self):
481 name = self.get().name 482 return SCons.Subst.SpecialAttrWrapper(name, name + "_file")
483
484 - def __get_base_path(self):
485 """Return the file's directory and file name, with the 486 suffix stripped.""" 487 entry = self.get() 488 return SCons.Subst.SpecialAttrWrapper(SCons.Util.splitext(entry.get_path())[0], 489 entry.name + "_base")
490
491 - def __get_posix_path(self):
492 """Return the path with / as the path separator, 493 regardless of platform.""" 494 if os_sep_is_slash: 495 return self 496 else: 497 entry = self.get() 498 r = entry.get_path().replace(OS_SEP, '/') 499 return SCons.Subst.SpecialAttrWrapper(r, entry.name + "_posix")
500
501 - def __get_windows_path(self):
502 """Return the path with \ as the path separator, 503 regardless of platform.""" 504 if OS_SEP == '\\': 505 return self 506 else: 507 entry = self.get() 508 r = entry.get_path().replace(OS_SEP, '\\') 509 return SCons.Subst.SpecialAttrWrapper(r, entry.name + "_windows")
510
511 - def __get_srcnode(self):
512 return EntryProxy(self.get().srcnode())
513
514 - def __get_srcdir(self):
515 """Returns the directory containing the source node linked to this 516 node via VariantDir(), or the directory of this node if not linked.""" 517 return EntryProxy(self.get().srcnode().dir)
518
519 - def __get_rsrcnode(self):
520 return EntryProxy(self.get().srcnode().rfile())
521
522 - def __get_rsrcdir(self):
523 """Returns the directory containing the source node linked to this 524 node via VariantDir(), or the directory of this node if not linked.""" 525 return EntryProxy(self.get().srcnode().rfile().dir)
526
527 - def __get_dir(self):
528 return EntryProxy(self.get().dir)
529 530 dictSpecialAttrs = { "base" : __get_base_path, 531 "posix" : __get_posix_path, 532 "windows" : __get_windows_path, 533 "win32" : __get_windows_path, 534 "srcpath" : __get_srcnode, 535 "srcdir" : __get_srcdir, 536 "dir" : __get_dir, 537 "abspath" : __get_abspath, 538 "filebase" : __get_filebase, 539 "suffix" : __get_suffix, 540 "file" : __get_file, 541 "rsrcpath" : __get_rsrcnode, 542 "rsrcdir" : __get_rsrcdir, 543 } 544
545 - def __getattr__(self, name):
546 # This is how we implement the "special" attributes 547 # such as base, posix, srcdir, etc. 548 try: 549 attr_function = self.dictSpecialAttrs[name] 550 except KeyError: 551 try: 552 attr = SCons.Util.Proxy.__getattr__(self, name) 553 except AttributeError, e: 554 # Raise our own AttributeError subclass with an 555 # overridden __str__() method that identifies the 556 # name of the entry that caused the exception. 557 raise EntryProxyAttributeError(self, name) 558 return attr 559 else: 560 return attr_function(self)
561
562 -class Base(SCons.Node.Node):
563 """A generic class for file system entries. This class is for 564 when we don't know yet whether the entry being looked up is a file 565 or a directory. Instances of this class can morph into either 566 Dir or File objects by a later, more precise lookup. 567 568 Note: this class does not define __cmp__ and __hash__ for 569 efficiency reasons. SCons does a lot of comparing of 570 Node.FS.{Base,Entry,File,Dir} objects, so those operations must be 571 as fast as possible, which means we want to use Python's built-in 572 object identity comparisons. 573 """ 574 575 memoizer_counters = [] 576
577 - def __init__(self, name, directory, fs):
578 """Initialize a generic Node.FS.Base object. 579 580 Call the superclass initialization, take care of setting up 581 our relative and absolute paths, identify our parent 582 directory, and indicate that this node should use 583 signatures.""" 584 if __debug__: logInstanceCreation(self, 'Node.FS.Base') 585 SCons.Node.Node.__init__(self) 586 587 # Filenames and paths are probably reused and are intern'ed to 588 # save some memory. 589 590 #: Filename with extension as it was specified when the object was 591 #: created; to obtain filesystem path, use Python str() function 592 self.name = SCons.Util.silent_intern(name) 593 #: Cached filename extension 594 self.suffix = SCons.Util.silent_intern(SCons.Util.splitext(name)[1]) 595 self.fs = fs #: Reference to parent Node.FS object 596 597 assert directory, "A directory must be provided" 598 599 self.abspath = SCons.Util.silent_intern(directory.entry_abspath(name)) 600 self.labspath = SCons.Util.silent_intern(directory.entry_labspath(name)) 601 if directory.path == '.': 602 self.path = SCons.Util.silent_intern(name) 603 else: 604 self.path = SCons.Util.silent_intern(directory.entry_path(name)) 605 if directory.tpath == '.': 606 self.tpath = SCons.Util.silent_intern(name) 607 else: 608 self.tpath = SCons.Util.silent_intern(directory.entry_tpath(name)) 609 self.path_elements = directory.path_elements + [self] 610 611 self.dir = directory 612 self.cwd = None # will hold the SConscript directory for target nodes 613 self.duplicate = directory.duplicate
614
615 - def str_for_display(self):
616 return '"' + self.__str__() + '"'
617
618 - def must_be_same(self, klass):
619 """ 620 This node, which already existed, is being looked up as the 621 specified klass. Raise an exception if it isn't. 622 """ 623 if isinstance(self, klass) or klass is Entry: 624 return 625 raise TypeError("Tried to lookup %s '%s' as a %s." %\ 626 (self.__class__.__name__, self.path, klass.__name__))
627
628 - def get_dir(self):
629 return self.dir
630
631 - def get_suffix(self):
632 return self.suffix
633
634 - def rfile(self):
635 return self
636
637 - def __str__(self):
638 """A Node.FS.Base object's string representation is its path 639 name.""" 640 global Save_Strings 641 if Save_Strings: 642 return self._save_str() 643 return self._get_str()
644 645 memoizer_counters.append(SCons.Memoize.CountValue('_save_str')) 646
647 - def _save_str(self):
648 try: 649 return self._memo['_save_str'] 650 except KeyError: 651 pass 652 result = sys.intern(self._get_str()) 653 self._memo['_save_str'] = result 654 return result
655
656 - def _get_str(self):
657 global Save_Strings 658 if self.duplicate or self.is_derived(): 659 return self.get_path() 660 srcnode = self.srcnode() 661 if srcnode.stat() is None and self.stat() is not None: 662 result = self.get_path() 663 else: 664 result = srcnode.get_path() 665 if not Save_Strings: 666 # We're not at the point where we're saving the string 667 # representations of FS Nodes (because we haven't finished 668 # reading the SConscript files and need to have str() return 669 # things relative to them). That also means we can't yet 670 # cache values returned (or not returned) by stat(), since 671 # Python code in the SConscript files might still create 672 # or otherwise affect the on-disk file. So get rid of the 673 # values that the underlying stat() method saved. 674 try: del self._memo['stat'] 675 except KeyError: pass 676 if self is not srcnode: 677 try: del srcnode._memo['stat'] 678 except KeyError: pass 679 return result
680 681 rstr = __str__ 682 683 memoizer_counters.append(SCons.Memoize.CountValue('stat')) 684
685 - def stat(self):
686 try: return self._memo['stat'] 687 except KeyError: pass 688 try: result = self.fs.stat(self.abspath) 689 except os.error: result = None 690 self._memo['stat'] = result 691 return result
692
693 - def exists(self):
694 return self.stat() is not None
695
696 - def rexists(self):
697 return self.rfile().exists()
698
699 - def getmtime(self):
700 st = self.stat() 701 if st: return st[stat.ST_MTIME] 702 else: return None
703
704 - def getsize(self):
705 st = self.stat() 706 if st: return st[stat.ST_SIZE] 707 else: return None
708
709 - def isdir(self):
710 st = self.stat() 711 return st is not None and stat.S_ISDIR(st[stat.ST_MODE])
712
713 - def isfile(self):
714 st = self.stat() 715 return st is not None and stat.S_ISREG(st[stat.ST_MODE])
716 717 if hasattr(os, 'symlink'): 722 else: 725
726 - def is_under(self, dir):
727 if self is dir: 728 return 1 729 else: 730 return self.dir.is_under(dir)
731
732 - def set_local(self):
733 self._local = 1
734
735 - def srcnode(self):
736 """If this node is in a build path, return the node 737 corresponding to its source file. Otherwise, return 738 ourself. 739 """ 740 srcdir_list = self.dir.srcdir_list() 741 if srcdir_list: 742 srcnode = srcdir_list[0].Entry(self.name) 743 srcnode.must_be_same(self.__class__) 744 return srcnode 745 return self
746
747 - def get_path(self, dir=None):
748 """Return path relative to the current working directory of the 749 Node.FS.Base object that owns us.""" 750 if not dir: 751 dir = self.fs.getcwd() 752 if self == dir: 753 return '.' 754 path_elems = self.path_elements 755 pathname = '' 756 try: i = path_elems.index(dir) 757 except ValueError: 758 for p in path_elems[:-1]: 759 pathname += p.dirname 760 else: 761 for p in path_elems[i+1:-1]: 762 pathname += p.dirname 763 return pathname + path_elems[-1].name
764
765 - def set_src_builder(self, builder):
766 """Set the source code builder for this node.""" 767 self.sbuilder = builder 768 if not self.has_builder(): 769 self.builder_set(builder)
770
771 - def src_builder(self):
772 """Fetch the source code builder for this node. 773 774 If there isn't one, we cache the source code builder specified 775 for the directory (which in turn will cache the value from its 776 parent directory, and so on up to the file system root). 777 """ 778 try: 779 scb = self.sbuilder 780 except AttributeError: 781 scb = self.dir.src_builder() 782 self.sbuilder = scb 783 return scb
784
785 - def get_abspath(self):
786 """Get the absolute path of the file.""" 787 return self.abspath
788
789 - def for_signature(self):
790 # Return just our name. Even an absolute path would not work, 791 # because that can change thanks to symlinks or remapped network 792 # paths. 793 return self.name
794
795 - def get_subst_proxy(self):
796 try: 797 return self._proxy 798 except AttributeError: 799 ret = EntryProxy(self) 800 self._proxy = ret 801 return ret
802
803 - def target_from_source(self, prefix, suffix, splitext=SCons.Util.splitext):
804 """ 805 806 Generates a target entry that corresponds to this entry (usually 807 a source file) with the specified prefix and suffix. 808 809 Note that this method can be overridden dynamically for generated 810 files that need different behavior. See Tool/swig.py for 811 an example. 812 """ 813 return self.dir.Entry(prefix + splitext(self.name)[0] + suffix)
814
815 - def _Rfindalldirs_key(self, pathlist):
816 return pathlist
817 818 memoizer_counters.append(SCons.Memoize.CountDict('Rfindalldirs', _Rfindalldirs_key)) 819
820 - def Rfindalldirs(self, pathlist):
821 """ 822 Return all of the directories for a given path list, including 823 corresponding "backing" directories in any repositories. 824 825 The Node lookups are relative to this Node (typically a 826 directory), so memoizing result saves cycles from looking 827 up the same path for each target in a given directory. 828 """ 829 try: 830 memo_dict = self._memo['Rfindalldirs'] 831 except KeyError: 832 memo_dict = {} 833 self._memo['Rfindalldirs'] = memo_dict 834 else: 835 try: 836 return memo_dict[pathlist] 837 except KeyError: 838 pass 839 840 create_dir_relative_to_self = self.Dir 841 result = [] 842 for path in pathlist: 843 if isinstance(path, SCons.Node.Node): 844 result.append(path) 845 else: 846 dir = create_dir_relative_to_self(path) 847 result.extend(dir.get_all_rdirs()) 848 849 memo_dict[pathlist] = result 850 851 return result
852
853 - def RDirs(self, pathlist):
854 """Search for a list of directories in the Repository list.""" 855 cwd = self.cwd or self.fs._cwd 856 return cwd.Rfindalldirs(pathlist)
857 858 memoizer_counters.append(SCons.Memoize.CountValue('rentry')) 859
860 - def rentry(self):
861 try: 862 return self._memo['rentry'] 863 except KeyError: 864 pass 865 result = self 866 if not self.exists(): 867 norm_name = _my_normcase(self.name) 868 for dir in self.dir.get_all_rdirs(): 869 try: 870 node = dir.entries[norm_name] 871 except KeyError: 872 if dir.entry_exists_on_disk(self.name): 873 result = dir.Entry(self.name) 874 break 875 self._memo['rentry'] = result 876 return result
877
878 - def _glob1(self, pattern, ondisk=True, source=False, strings=False):
879 return []
880
881 -class Entry(Base):
882 """This is the class for generic Node.FS entries--that is, things 883 that could be a File or a Dir, but we're just not sure yet. 884 Consequently, the methods in this class really exist just to 885 transform their associated object into the right class when the 886 time comes, and then call the same-named method in the transformed 887 class.""" 888
889 - def diskcheck_match(self):
890 pass
891
892 - def disambiguate(self, must_exist=None):
893 """ 894 """ 895 if self.isdir(): 896 self.__class__ = Dir 897 self._morph() 898 elif self.isfile(): 899 self.__class__ = File 900 self._morph() 901 self.clear() 902 else: 903 # There was nothing on-disk at this location, so look in 904 # the src directory. 905 # 906 # We can't just use self.srcnode() straight away because 907 # that would create an actual Node for this file in the src 908 # directory, and there might not be one. Instead, use the 909 # dir_on_disk() method to see if there's something on-disk 910 # with that name, in which case we can go ahead and call 911 # self.srcnode() to create the right type of entry. 912 srcdir = self.dir.srcnode() 913 if srcdir != self.dir and \ 914 srcdir.entry_exists_on_disk(self.name) and \ 915 self.srcnode().isdir(): 916 self.__class__ = Dir 917 self._morph() 918 elif must_exist: 919 msg = "No such file or directory: '%s'" % self.abspath 920 raise SCons.Errors.UserError(msg) 921 else: 922 self.__class__ = File 923 self._morph() 924 self.clear() 925 return self
926
927 - def rfile(self):
928 """We're a generic Entry, but the caller is actually looking for 929 a File at this point, so morph into one.""" 930 self.__class__ = File 931 self._morph() 932 self.clear() 933 return File.rfile(self)
934
935 - def scanner_key(self):
936 return self.get_suffix()
937
938 - def get_contents(self):
939 """Fetch the contents of the entry. Returns the exact binary 940 contents of the file.""" 941 try: 942 self = self.disambiguate(must_exist=1) 943 except SCons.Errors.UserError: 944 # There was nothing on disk with which to disambiguate 945 # this entry. Leave it as an Entry, but return a null 946 # string so calls to get_contents() in emitters and the 947 # like (e.g. in qt.py) don't have to disambiguate by hand 948 # or catch the exception. 949 return '' 950 else: 951 return self.get_contents()
952
953 - def get_text_contents(self):
954 """Fetch the decoded text contents of a Unicode encoded Entry. 955 956 Since this should return the text contents from the file 957 system, we check to see into what sort of subclass we should 958 morph this Entry.""" 959 try: 960 self = self.disambiguate(must_exist=1) 961 except SCons.Errors.UserError: 962 # There was nothing on disk with which to disambiguate 963 # this entry. Leave it as an Entry, but return a null 964 # string so calls to get_text_contents() in emitters and 965 # the like (e.g. in qt.py) don't have to disambiguate by 966 # hand or catch the exception. 967 return '' 968 else: 969 return self.get_text_contents()
970
971 - def must_be_same(self, klass):
972 """Called to make sure a Node is a Dir. Since we're an 973 Entry, we can morph into one.""" 974 if self.__class__ is not klass: 975 self.__class__ = klass 976 self._morph() 977 self.clear()
978 979 # The following methods can get called before the Taskmaster has 980 # had a chance to call disambiguate() directly to see if this Entry 981 # should really be a Dir or a File. We therefore use these to call 982 # disambiguate() transparently (from our caller's point of view). 983 # 984 # Right now, this minimal set of methods has been derived by just 985 # looking at some of the methods that will obviously be called early 986 # in any of the various Taskmasters' calling sequences, and then 987 # empirically figuring out which additional methods are necessary 988 # to make various tests pass. 989
990 - def exists(self):
991 """Return if the Entry exists. Check the file system to see 992 what we should turn into first. Assume a file if there's no 993 directory.""" 994 return self.disambiguate().exists()
995
996 - def rel_path(self, other):
997 d = self.disambiguate() 998 if d.__class__ is Entry: 999 raise Exception("rel_path() could not disambiguate File/Dir") 1000 return d.rel_path(other)
1001
1002 - def new_ninfo(self):
1003 return self.disambiguate().new_ninfo()
1004
1005 - def changed_since_last_build(self, target, prev_ni):
1006 return self.disambiguate().changed_since_last_build(target, prev_ni)
1007
1008 - def _glob1(self, pattern, ondisk=True, source=False, strings=False):
1009 return self.disambiguate()._glob1(pattern, ondisk, source, strings)
1010
1011 - def get_subst_proxy(self):
1012 return self.disambiguate().get_subst_proxy()
1013 1014 # This is for later so we can differentiate between Entry the class and Entry 1015 # the method of the FS class. 1016 _classEntry = Entry 1017 1018
1019 -class LocalFS(object):
1020 1021 if SCons.Memoize.use_memoizer: 1022 __metaclass__ = SCons.Memoize.Memoized_Metaclass 1023 1024 # This class implements an abstraction layer for operations involving 1025 # a local file system. Essentially, this wraps any function in 1026 # the os, os.path or shutil modules that we use to actually go do 1027 # anything with or to the local file system. 1028 # 1029 # Note that there's a very good chance we'll refactor this part of 1030 # the architecture in some way as we really implement the interface(s) 1031 # for remote file system Nodes. For example, the right architecture 1032 # might be to have this be a subclass instead of a base class. 1033 # Nevertheless, we're using this as a first step in that direction. 1034 # 1035 # We're not using chdir() yet because the calling subclass method 1036 # needs to use os.chdir() directly to avoid recursion. Will we 1037 # really need this one? 1038 #def chdir(self, path): 1039 # return os.chdir(path)
1040 - def chmod(self, path, mode):
1041 return os.chmod(path, mode)
1042 - def copy(self, src, dst):
1043 return shutil.copy(src, dst)
1044 - def copy2(self, src, dst):
1045 return shutil.copy2(src, dst)
1046 - def exists(self, path):
1047 return os.path.exists(path)
1048 - def getmtime(self, path):
1049 return os.path.getmtime(path)
1050 - def getsize(self, path):
1051 return os.path.getsize(path)
1052 - def isdir(self, path):
1053 return os.path.isdir(path)
1054 - def isfile(self, path):
1055 return os.path.isfile(path)
1058 - def lstat(self, path):
1059 return os.lstat(path)
1060 - def listdir(self, path):
1061 return os.listdir(path)
1062 - def makedirs(self, path):
1063 return os.makedirs(path)
1064 - def mkdir(self, path):
1065 return os.mkdir(path)
1066 - def rename(self, old, new):
1067 return os.rename(old, new)
1068 - def stat(self, path):
1069 return os.stat(path)
1072 - def open(self, path):
1073 return open(path)
1076 1077 if hasattr(os, 'symlink'): 1080 else: 1083 1084 if hasattr(os, 'readlink'): 1087 else:
1090 1091 1092 #class RemoteFS: 1093 # # Skeleton for the obvious methods we might need from the 1094 # # abstraction layer for a remote filesystem. 1095 # def upload(self, local_src, remote_dst): 1096 # pass 1097 # def download(self, remote_src, local_dst): 1098 # pass 1099 1100
1101 -class FS(LocalFS):
1102 1103 memoizer_counters = [] 1104
1105 - def __init__(self, path = None):
1106 """Initialize the Node.FS subsystem. 1107 1108 The supplied path is the top of the source tree, where we 1109 expect to find the top-level build file. If no path is 1110 supplied, the current directory is the default. 1111 1112 The path argument must be a valid absolute path. 1113 """ 1114 if __debug__: logInstanceCreation(self, 'Node.FS') 1115 1116 self._memo = {} 1117 1118 self.Root = {} 1119 self.SConstruct_dir = None 1120 self.max_drift = default_max_drift 1121 1122 self.Top = None 1123 if path is None: 1124 self.pathTop = os.getcwd() 1125 else: 1126 self.pathTop = path 1127 self.defaultDrive = _my_normcase(_my_splitdrive(self.pathTop)[0]) 1128 1129 self.Top = self.Dir(self.pathTop) 1130 self.Top.path = '.' 1131 self.Top.tpath = '.' 1132 self._cwd = self.Top 1133 1134 DirNodeInfo.fs = self 1135 FileNodeInfo.fs = self
1136
1137 - def set_SConstruct_dir(self, dir):
1138 self.SConstruct_dir = dir
1139
1140 - def get_max_drift(self):
1141 return self.max_drift
1142
1143 - def set_max_drift(self, max_drift):
1144 self.max_drift = max_drift
1145
1146 - def getcwd(self):
1147 if hasattr(self, "_cwd"): 1148 return self._cwd 1149 else: 1150 return "<no cwd>"
1151
1152 - def chdir(self, dir, change_os_dir=0):
1153 """Change the current working directory for lookups. 1154 If change_os_dir is true, we will also change the "real" cwd 1155 to match. 1156 """ 1157 curr=self._cwd 1158 try: 1159 if dir is not None: 1160 self._cwd = dir 1161 if change_os_dir: 1162 os.chdir(dir.abspath) 1163 except OSError: 1164 self._cwd = curr 1165 raise
1166
1167 - def get_root(self, drive):
1168 """ 1169 Returns the root directory for the specified drive, creating 1170 it if necessary. 1171 """ 1172 drive = _my_normcase(drive) 1173 try: 1174 return self.Root[drive] 1175 except KeyError: 1176 root = RootDir(drive, self) 1177 self.Root[drive] = root 1178 if not drive: 1179 self.Root[self.defaultDrive] = root 1180 elif drive == self.defaultDrive: 1181 self.Root[''] = root 1182 return root
1183
1184 - def _lookup(self, p, directory, fsclass, create=1):
1185 """ 1186 The generic entry point for Node lookup with user-supplied data. 1187 1188 This translates arbitrary input into a canonical Node.FS object 1189 of the specified fsclass. The general approach for strings is 1190 to turn it into a fully normalized absolute path and then call 1191 the root directory's lookup_abs() method for the heavy lifting. 1192 1193 If the path name begins with '#', it is unconditionally 1194 interpreted relative to the top-level directory of this FS. '#' 1195 is treated as a synonym for the top-level SConstruct directory, 1196 much like '~' is treated as a synonym for the user's home 1197 directory in a UNIX shell. So both '#foo' and '#/foo' refer 1198 to the 'foo' subdirectory underneath the top-level SConstruct 1199 directory. 1200 1201 If the path name is relative, then the path is looked up relative 1202 to the specified directory, or the current directory (self._cwd, 1203 typically the SConscript directory) if the specified directory 1204 is None. 1205 """ 1206 if isinstance(p, Base): 1207 # It's already a Node.FS object. Make sure it's the right 1208 # class and return. 1209 p.must_be_same(fsclass) 1210 return p 1211 # str(p) in case it's something like a proxy object 1212 p = str(p) 1213 1214 if not os_sep_is_slash: 1215 p = p.replace(OS_SEP, '/') 1216 1217 if p[0:1] == '#': 1218 # There was an initial '#', so we strip it and override 1219 # whatever directory they may have specified with the 1220 # top-level SConstruct directory. 1221 p = p[1:] 1222 directory = self.Top 1223 1224 # There might be a drive letter following the 1225 # '#'. Although it is not described in the SCons man page, 1226 # the regression test suite explicitly tests for that 1227 # syntax. It seems to mean the following thing: 1228 # 1229 # Assuming the the SCons top dir is in C:/xxx/yyy, 1230 # '#X:/toto' means X:/xxx/yyy/toto. 1231 # 1232 # i.e. it assumes that the X: drive has a directory 1233 # structure similar to the one found on drive C:. 1234 if do_splitdrive: 1235 drive, p = _my_splitdrive(p) 1236 if drive: 1237 root = self.get_root(drive) 1238 else: 1239 root = directory.root 1240 else: 1241 root = directory.root 1242 1243 # We can only strip trailing after splitting the drive 1244 # since the drive might the UNC '//' prefix. 1245 p = p.strip('/') 1246 1247 needs_normpath = needs_normpath_match(p) 1248 1249 # The path is relative to the top-level SCons directory. 1250 if p in ('', '.'): 1251 p = directory.labspath 1252 else: 1253 p = directory.labspath + '/' + p 1254 else: 1255 if do_splitdrive: 1256 drive, p = _my_splitdrive(p) 1257 if drive and not p: 1258 # This causes a naked drive letter to be treated 1259 # as a synonym for the root directory on that 1260 # drive. 1261 p = '/' 1262 else: 1263 drive = '' 1264 1265 # We can only strip trailing '/' since the drive might the 1266 # UNC '//' prefix. 1267 if p != '/': 1268 p = p.rstrip('/') 1269 1270 needs_normpath = needs_normpath_match(p) 1271 1272 if p[0:1] == '/': 1273 # Absolute path 1274 root = self.get_root(drive) 1275 else: 1276 # This is a relative lookup or to the current directory 1277 # (the path name is not absolute). Add the string to the 1278 # appropriate directory lookup path, after which the whole 1279 # thing gets normalized. 1280 if directory: 1281 if not isinstance(directory, Dir): 1282 directory = self.Dir(directory) 1283 else: 1284 directory = self._cwd 1285 1286 if p in ('', '.'): 1287 p = directory.labspath 1288 else: 1289 p = directory.labspath + '/' + p 1290 1291 if drive: 1292 root = self.get_root(drive) 1293 else: 1294 root = directory.root 1295 1296 if needs_normpath is not None: 1297 # Normalize a pathname. Will return the same result for 1298 # equivalent paths. 1299 # 1300 # We take advantage of the fact that we have an absolute 1301 # path here for sure. In addition, we know that the 1302 # components of lookup path are separated by slashes at 1303 # this point. Because of this, this code is about 2X 1304 # faster than calling os.path.normpath() followed by 1305 # replacing os.sep with '/' again. 1306 ins = p.split('/')[1:] 1307 outs = [] 1308 for d in ins: 1309 if d == '..': 1310 try: 1311 outs.pop() 1312 except IndexError: 1313 pass 1314 elif d not in ('', '.'): 1315 outs.append(d) 1316 p = '/' + '/'.join(outs) 1317 1318 return root._lookup_abs(p, fsclass, create)
1319
1320 - def Entry(self, name, directory = None, create = 1):
1321 """Look up or create a generic Entry node with the specified name. 1322 If the name is a relative path (begins with ./, ../, or a file 1323 name), then it is looked up relative to the supplied directory 1324 node, or to the top level directory of the FS (supplied at 1325 construction time) if no directory is supplied. 1326 """ 1327 return self._lookup(name, directory, Entry, create)
1328
1329 - def File(self, name, directory = None, create = 1):
1330 """Look up or create a File node with the specified name. If 1331 the name is a relative path (begins with ./, ../, or a file name), 1332 then it is looked up relative to the supplied directory node, 1333 or to the top level directory of the FS (supplied at construction 1334 time) if no directory is supplied. 1335 1336 This method will raise TypeError if a directory is found at the 1337 specified path. 1338 """ 1339 return self._lookup(name, directory, File, create)
1340
1341 - def Dir(self, name, directory = None, create = True):
1342 """Look up or create a Dir node with the specified name. If 1343 the name is a relative path (begins with ./, ../, or a file name), 1344 then it is looked up relative to the supplied directory node, 1345 or to the top level directory of the FS (supplied at construction 1346 time) if no directory is supplied. 1347 1348 This method will raise TypeError if a normal file is found at the 1349 specified path. 1350 """ 1351 return self._lookup(name, directory, Dir, create)
1352
1353 - def VariantDir(self, variant_dir, src_dir, duplicate=1):
1354 """Link the supplied variant directory to the source directory 1355 for purposes of building files.""" 1356 1357 if not isinstance(src_dir, SCons.Node.Node): 1358 src_dir = self.Dir(src_dir) 1359 if not isinstance(variant_dir, SCons.Node.Node): 1360 variant_dir = self.Dir(variant_dir) 1361 if src_dir.is_under(variant_dir): 1362 raise SCons.Errors.UserError("Source directory cannot be under variant directory.") 1363 if variant_dir.srcdir: 1364 if variant_dir.srcdir == src_dir: 1365 return # We already did this. 1366 raise SCons.Errors.UserError("'%s' already has a source directory: '%s'."%(variant_dir, variant_dir.srcdir)) 1367 variant_dir.link(src_dir, duplicate)
1368
1369 - def Repository(self, *dirs):
1370 """Specify Repository directories to search.""" 1371 for d in dirs: 1372 if not isinstance(d, SCons.Node.Node): 1373 d = self.Dir(d) 1374 self.Top.addRepository(d)
1375
1376 - def variant_dir_target_climb(self, orig, dir, tail):
1377 """Create targets in corresponding variant directories 1378 1379 Climb the directory tree, and look up path names 1380 relative to any linked variant directories we find. 1381 1382 Even though this loops and walks up the tree, we don't memoize 1383 the return value because this is really only used to process 1384 the command-line targets. 1385 """ 1386 targets = [] 1387 message = None 1388 fmt = "building associated VariantDir targets: %s" 1389 start_dir = dir 1390 while dir: 1391 for bd in dir.variant_dirs: 1392 if start_dir.is_under(bd): 1393 # If already in the build-dir location, don't reflect 1394 return [orig], fmt % str(orig) 1395 p = os.path.join(bd.path, *tail) 1396 targets.append(self.Entry(p)) 1397 tail = [dir.name] + tail 1398 dir = dir.up() 1399 if targets: 1400 message = fmt % ' '.join(map(str, targets)) 1401 return targets, message
1402
1403 - def Glob(self, pathname, ondisk=True, source=True, strings=False, cwd=None):
1404 """ 1405 Globs 1406 1407 This is mainly a shim layer 1408 """ 1409 if cwd is None: 1410 cwd = self.getcwd() 1411 return cwd.glob(pathname, ondisk, source, strings)
1412
1413 -class DirNodeInfo(SCons.Node.NodeInfoBase):
1414 # This should get reset by the FS initialization. 1415 current_version_id = 1 1416 1417 fs = None 1418
1419 - def str_to_node(self, s):
1420 top = self.fs.Top 1421 root = top.root 1422 if do_splitdrive: 1423 drive, s = _my_splitdrive(s) 1424 if drive: 1425 root = self.fs.get_root(drive) 1426 if not os.path.isabs(s): 1427 s = top.labspath + '/' + s 1428 return root._lookup_abs(s, Entry)
1429
1430 -class DirBuildInfo(SCons.Node.BuildInfoBase):
1431 current_version_id = 1
1432 1433 glob_magic_check = re.compile('[*?[]') 1434
1435 -def has_glob_magic(s):
1436 return glob_magic_check.search(s) is not None
1437
1438 -class Dir(Base):
1439 """A class for directories in a file system. 1440 """ 1441 1442 memoizer_counters = [] 1443 1444 NodeInfo = DirNodeInfo 1445 BuildInfo = DirBuildInfo 1446
1447 - def __init__(self, name, directory, fs):
1448 if __debug__: logInstanceCreation(self, 'Node.FS.Dir') 1449 Base.__init__(self, name, directory, fs) 1450 self._morph()
1451
1452 - def _morph(self):
1453 """Turn a file system Node (either a freshly initialized directory 1454 object or a separate Entry object) into a proper directory object. 1455 1456 Set up this directory's entries and hook it into the file 1457 system tree. Specify that directories (this Node) don't use 1458 signatures for calculating whether they're current. 1459 """ 1460 1461 self.repositories = [] 1462 self.srcdir = None 1463 1464 self.entries = {} 1465 self.entries['.'] = self 1466 self.entries['..'] = self.dir 1467 self.cwd = self 1468 self.searched = 0 1469 self._sconsign = None 1470 self.variant_dirs = [] 1471 self.root = self.dir.root 1472 1473 # For directories, we make a difference between the directory 1474 # 'name' and the directory 'dirname'. The 'name' attribute is 1475 # used when we need to print the 'name' of the directory or 1476 # when we it is used as the last part of a path. The 'dirname' 1477 # is used when the directory is not the last element of the 1478 # path. The main reason for making that distinction is that 1479 # for RoorDir's the dirname can not be easily inferred from 1480 # the name. For example, we have to add a '/' after a drive 1481 # letter but not after a UNC path prefix ('//'). 1482 self.dirname = self.name + OS_SEP 1483 1484 # Don't just reset the executor, replace its action list, 1485 # because it might have some pre-or post-actions that need to 1486 # be preserved. 1487 # 1488 # But don't reset the executor if there is a non-null executor 1489 # attached already. The existing executor might have other 1490 # targets, in which case replacing the action list with a 1491 # Mkdir action is a big mistake. 1492 if not hasattr(self, 'executor'): 1493 self.builder = get_MkdirBuilder() 1494 self.get_executor().set_action_list(self.builder.action) 1495 else: 1496 # Prepend MkdirBuilder action to existing action list 1497 l = self.get_executor().action_list 1498 a = get_MkdirBuilder().action 1499 l.insert(0, a) 1500 self.get_executor().set_action_list(l)
1501
1502 - def diskcheck_match(self):
1503 diskcheck_match(self, self.isfile, 1504 "File %s found where directory expected.")
1505
1506 - def __clearRepositoryCache(self, duplicate=None):
1507 """Called when we change the repository(ies) for a directory. 1508 This clears any cached information that is invalidated by changing 1509 the repository.""" 1510 1511 for node in self.entries.values(): 1512 if node != self.dir: 1513 if node != self and isinstance(node, Dir): 1514 node.__clearRepositoryCache(duplicate) 1515 else: 1516 node.clear() 1517 try: 1518 del node._srcreps 1519 except AttributeError: 1520 pass 1521 if duplicate is not None: 1522 node.duplicate=duplicate
1523
1524 - def __resetDuplicate(self, node):
1525 if node != self: 1526 node.duplicate = node.get_dir().duplicate
1527
1528 - def Entry(self, name):
1529 """ 1530 Looks up or creates an entry node named 'name' relative to 1531 this directory. 1532 """ 1533 return self.fs.Entry(name, self)
1534
1535 - def Dir(self, name, create=True):
1536 """ 1537 Looks up or creates a directory node named 'name' relative to 1538 this directory. 1539 """ 1540 return self.fs.Dir(name, self, create)
1541
1542 - def File(self, name):
1543 """ 1544 Looks up or creates a file node named 'name' relative to 1545 this directory. 1546 """ 1547 return self.fs.File(name, self)
1548 1556
1557 - def getRepositories(self):
1558 """Returns a list of repositories for this directory. 1559 """ 1560 if self.srcdir and not self.duplicate: 1561 return self.srcdir.get_all_rdirs() + self.repositories 1562 return self.repositories
1563 1564 memoizer_counters.append(SCons.Memoize.CountValue('get_all_rdirs')) 1565
1566 - def get_all_rdirs(self):
1567 try: 1568 return list(self._memo['get_all_rdirs']) 1569 except KeyError: 1570 pass 1571 1572 result = [self] 1573 fname = '.' 1574 dir = self 1575 while dir: 1576 for rep in dir.getRepositories(): 1577 result.append(rep.Dir(fname)) 1578 if fname == '.': 1579 fname = dir.name 1580 else: 1581 fname = dir.name + OS_SEP + fname 1582 dir = dir.up() 1583 1584 self._memo['get_all_rdirs'] = list(result) 1585 1586 return result
1587
1588 - def addRepository(self, dir):
1589 if dir != self and not dir in self.repositories: 1590 self.repositories.append(dir) 1591 dir.tpath = '.' 1592 self.__clearRepositoryCache()
1593
1594 - def up(self):
1595 return self.dir
1596
1597 - def _rel_path_key(self, other):
1598 return str(other)
1599 1600 memoizer_counters.append(SCons.Memoize.CountDict('rel_path', _rel_path_key)) 1601
1602 - def rel_path(self, other):
1603 """Return a path to "other" relative to this directory. 1604 """ 1605 1606 # This complicated and expensive method, which constructs relative 1607 # paths between arbitrary Node.FS objects, is no longer used 1608 # by SCons itself. It was introduced to store dependency paths 1609 # in .sconsign files relative to the target, but that ended up 1610 # being significantly inefficient. 1611 # 1612 # We're continuing to support the method because some SConstruct 1613 # files out there started using it when it was available, and 1614 # we're all about backwards compatibility.. 1615 1616 try: 1617 memo_dict = self._memo['rel_path'] 1618 except KeyError: 1619 memo_dict = {} 1620 self._memo['rel_path'] = memo_dict 1621 else: 1622 try: 1623 return memo_dict[other] 1624 except KeyError: 1625 pass 1626 1627 if self is other: 1628 result = '.' 1629 1630 elif not other in self.path_elements: 1631 try: 1632 other_dir = other.get_dir() 1633 except AttributeError: 1634 result = str(other) 1635 else: 1636 if other_dir is None: 1637 result = other.name 1638 else: 1639 dir_rel_path = self.rel_path(other_dir) 1640 if dir_rel_path == '.': 1641 result = other.name 1642 else: 1643 result = dir_rel_path + OS_SEP + other.name 1644 else: 1645 i = self.path_elements.index(other) + 1 1646 1647 path_elems = ['..'] * (len(self.path_elements) - i) \ 1648 + [n.name for n in other.path_elements[i:]] 1649 1650 result = OS_SEP.join(path_elems) 1651 1652 memo_dict[other] = result 1653 1654 return result
1655
1656 - def get_env_scanner(self, env, kw={}):
1657 import SCons.Defaults 1658 return SCons.Defaults.DirEntryScanner
1659
1660 - def get_target_scanner(self):
1661 import SCons.Defaults 1662 return SCons.Defaults.DirEntryScanner
1663
1664 - def get_found_includes(self, env, scanner, path):
1665 """Return this directory's implicit dependencies. 1666 1667 We don't bother caching the results because the scan typically 1668 shouldn't be requested more than once (as opposed to scanning 1669 .h file contents, which can be requested as many times as the 1670 files is #included by other files). 1671 """ 1672 if not scanner: 1673 return [] 1674 # Clear cached info for this Dir. If we already visited this 1675 # directory on our walk down the tree (because we didn't know at 1676 # that point it was being used as the source for another Node) 1677 # then we may have calculated build signature before realizing 1678 # we had to scan the disk. Now that we have to, though, we need 1679 # to invalidate the old calculated signature so that any node 1680 # dependent on our directory structure gets one that includes 1681 # info about everything on disk. 1682 self.clear() 1683 return scanner(self, env, path)
1684 1685 # 1686 # Taskmaster interface subsystem 1687 # 1688
1689 - def prepare(self):
1690 pass
1691
1692 - def build(self, **kw):
1693 """A null "builder" for directories.""" 1694 global MkdirBuilder 1695 if self.builder is not MkdirBuilder: 1696 SCons.Node.Node.build(self, **kw)
1697 1698 # 1699 # 1700 # 1701
1702 - def _create(self):
1703 """Create this directory, silently and without worrying about 1704 whether the builder is the default or not.""" 1705 listDirs = [] 1706 parent = self 1707 while parent: 1708 if parent.exists(): 1709 break 1710 listDirs.append(parent) 1711 p = parent.up() 1712 if p is None: 1713 # Don't use while: - else: for this condition because 1714 # if so, then parent is None and has no .path attribute. 1715 raise SCons.Errors.StopError(parent.path) 1716 parent = p 1717 listDirs.reverse() 1718 for dirnode in listDirs: 1719 try: 1720 # Don't call dirnode.build(), call the base Node method 1721 # directly because we definitely *must* create this 1722 # directory. The dirnode.build() method will suppress 1723 # the build if it's the default builder. 1724 SCons.Node.Node.build(dirnode) 1725 dirnode.get_executor().nullify() 1726 # The build() action may or may not have actually 1727 # created the directory, depending on whether the -n 1728 # option was used or not. Delete the _exists and 1729 # _rexists attributes so they can be reevaluated. 1730 dirnode.clear() 1731 except OSError: 1732 pass
1733
1735 global MkdirBuilder 1736 return self.builder is not MkdirBuilder and self.has_builder()
1737
1738 - def alter_targets(self):
1739 """Return any corresponding targets in a variant directory. 1740 """ 1741 return self.fs.variant_dir_target_climb(self, self, [])
1742
1743 - def scanner_key(self):
1744 """A directory does not get scanned.""" 1745 return None
1746
1747 - def get_text_contents(self):
1748 """We already emit things in text, so just return the binary 1749 version.""" 1750 return self.get_contents()
1751
1752 - def get_contents(self):
1753 """Return content signatures and names of all our children 1754 separated by new-lines. Ensure that the nodes are sorted.""" 1755 contents = [] 1756 for node in sorted(self.children(), key=lambda t: t.name): 1757 contents.append('%s %s\n' % (node.get_csig(), node.name)) 1758 return ''.join(contents)
1759
1760 - def get_csig(self):
1761 """Compute the content signature for Directory nodes. In 1762 general, this is not needed and the content signature is not 1763 stored in the DirNodeInfo. However, if get_contents on a Dir 1764 node is called which has a child directory, the child 1765 directory should return the hash of its contents.""" 1766 contents = self.get_contents() 1767 return SCons.Util.MD5signature(contents)
1768
1769 - def do_duplicate(self, src):
1770 pass
1771 1772 changed_since_last_build = SCons.Node.Node.state_has_changed 1773
1774 - def is_up_to_date(self):
1775 """If any child is not up-to-date, then this directory isn't, 1776 either.""" 1777 if self.builder is not MkdirBuilder and not self.exists(): 1778 return 0 1779 up_to_date = SCons.Node.up_to_date 1780 for kid in self.children(): 1781 if kid.get_state() > up_to_date: 1782 return 0 1783 return 1
1784
1785 - def rdir(self):
1786 if not self.exists(): 1787 norm_name = _my_normcase(self.name) 1788 for dir in self.dir.get_all_rdirs(): 1789 try: node = dir.entries[norm_name] 1790 except KeyError: node = dir.dir_on_disk(self.name) 1791 if node and node.exists() and \ 1792 (isinstance(dir, Dir) or isinstance(dir, Entry)): 1793 return node 1794 return self
1795
1796 - def sconsign(self):
1797 """Return the .sconsign file info for this directory, 1798 creating it first if necessary.""" 1799 if not self._sconsign: 1800 import SCons.SConsign 1801 self._sconsign = SCons.SConsign.ForDirectory(self) 1802 return self._sconsign
1803
1804 - def srcnode(self):
1805 """Dir has a special need for srcnode()...if we 1806 have a srcdir attribute set, then that *is* our srcnode.""" 1807 if self.srcdir: 1808 return self.srcdir 1809 return Base.srcnode(self)
1810
1811 - def get_timestamp(self):
1812 """Return the latest timestamp from among our children""" 1813 stamp = 0 1814 for kid in self.children(): 1815 if kid.get_timestamp() > stamp: 1816 stamp = kid.get_timestamp() 1817 return stamp
1818
1819 - def entry_abspath(self, name):
1820 return self.abspath + OS_SEP + name
1821
1822 - def entry_labspath(self, name):
1823 return self.labspath + '/' + name
1824
1825 - def entry_path(self, name):
1826 return self.path + OS_SEP + name
1827
1828 - def entry_tpath(self, name):
1829 return self.tpath + OS_SEP + name
1830
1831 - def entry_exists_on_disk(self, name):
1832 try: 1833 d = self.on_disk_entries 1834 except AttributeError: 1835 d = {} 1836 try: 1837 entries = os.listdir(self.abspath) 1838 except OSError: 1839 pass 1840 else: 1841 for entry in map(_my_normcase, entries): 1842 d[entry] = True 1843 self.on_disk_entries = d 1844 if sys.platform == 'win32': 1845 name = _my_normcase(name) 1846 result = d.get(name) 1847 if result is None: 1848 # Belt-and-suspenders for Windows: check directly for 1849 # 8.3 file names that don't show up in os.listdir(). 1850 result = os.path.exists(self.abspath + OS_SEP + name) 1851 d[name] = result 1852 return result 1853 else: 1854 return name in d
1855 1856 memoizer_counters.append(SCons.Memoize.CountValue('srcdir_list')) 1857
1858 - def srcdir_list(self):
1859 try: 1860 return self._memo['srcdir_list'] 1861 except KeyError: 1862 pass 1863 1864 result = [] 1865 1866 dirname = '.' 1867 dir = self 1868 while dir: 1869 if dir.srcdir: 1870 result.append(dir.srcdir.Dir(dirname)) 1871 dirname = dir.name + OS_SEP + dirname 1872 dir = dir.up() 1873 1874 self._memo['srcdir_list'] = result 1875 1876 return result
1877
1878 - def srcdir_duplicate(self, name):
1879 for dir in self.srcdir_list(): 1880 if self.is_under(dir): 1881 # We shouldn't source from something in the build path; 1882 # variant_dir is probably under src_dir, in which case 1883 # we are reflecting. 1884 break 1885 if dir.entry_exists_on_disk(name): 1886 srcnode = dir.Entry(name).disambiguate() 1887 if self.duplicate: 1888 node = self.Entry(name).disambiguate() 1889 node.do_duplicate(srcnode) 1890 return node 1891 else: 1892 return srcnode 1893 return None
1894
1895 - def _srcdir_find_file_key(self, filename):
1896 return filename
1897 1898 memoizer_counters.append(SCons.Memoize.CountDict('srcdir_find_file', _srcdir_find_file_key)) 1899
1900 - def srcdir_find_file(self, filename):
1901 try: 1902 memo_dict = self._memo['srcdir_find_file'] 1903 except KeyError: 1904 memo_dict = {} 1905 self._memo['srcdir_find_file'] = memo_dict 1906 else: 1907 try: 1908 return memo_dict[filename] 1909 except KeyError: 1910 pass 1911 1912 def func(node): 1913 if (isinstance(node, File) or isinstance(node, Entry)) and \ 1914 (node.is_derived() or node.exists()): 1915 return node 1916 return None
1917 1918 norm_name = _my_normcase(filename) 1919 1920 for rdir in self.get_all_rdirs(): 1921 try: node = rdir.entries[norm_name] 1922 except KeyError: node = rdir.file_on_disk(filename) 1923 else: node = func(node) 1924 if node: 1925 result = (node, self) 1926 memo_dict[filename] = result 1927 return result 1928 1929 for srcdir in self.srcdir_list(): 1930 for rdir in srcdir.get_all_rdirs(): 1931 try: node = rdir.entries[norm_name] 1932 except KeyError: node = rdir.file_on_disk(filename) 1933 else: node = func(node) 1934 if node: 1935 result = (File(filename, self, self.fs), srcdir) 1936 memo_dict[filename] = result 1937 return result 1938 1939 result = (None, None) 1940 memo_dict[filename] = result 1941 return result
1942
1943 - def dir_on_disk(self, name):
1944 if self.entry_exists_on_disk(name): 1945 try: return self.Dir(name) 1946 except TypeError: pass 1947 node = self.srcdir_duplicate(name) 1948 if isinstance(node, File): 1949 return None 1950 return node
1951
1952 - def file_on_disk(self, name):
1953 if self.entry_exists_on_disk(name) or \ 1954 diskcheck_rcs(self, name) or \ 1955 diskcheck_sccs(self, name): 1956 try: return self.File(name) 1957 except TypeError: pass 1958 node = self.srcdir_duplicate(name) 1959 if isinstance(node, Dir): 1960 return None 1961 return node
1962
1963 - def walk(self, func, arg):
1964 """ 1965 Walk this directory tree by calling the specified function 1966 for each directory in the tree. 1967 1968 This behaves like the os.path.walk() function, but for in-memory 1969 Node.FS.Dir objects. The function takes the same arguments as 1970 the functions passed to os.path.walk(): 1971 1972 func(arg, dirname, fnames) 1973 1974 Except that "dirname" will actually be the directory *Node*, 1975 not the string. The '.' and '..' entries are excluded from 1976 fnames. The fnames list may be modified in-place to filter the 1977 subdirectories visited or otherwise impose a specific order. 1978 The "arg" argument is always passed to func() and may be used 1979 in any way (or ignored, passing None is common). 1980 """ 1981 entries = self.entries 1982 names = list(entries.keys()) 1983 names.remove('.') 1984 names.remove('..') 1985 func(arg, self, names) 1986 for dirname in [n for n in names if isinstance(entries[n], Dir)]: 1987 entries[dirname].walk(func, arg)
1988
1989 - def glob(self, pathname, ondisk=True, source=False, strings=False):
1990 """ 1991 Returns a list of Nodes (or strings) matching a specified 1992 pathname pattern. 1993 1994 Pathname patterns follow UNIX shell semantics: * matches 1995 any-length strings of any characters, ? matches any character, 1996 and [] can enclose lists or ranges of characters. Matches do 1997 not span directory separators. 1998 1999 The matches take into account Repositories, returning local 2000 Nodes if a corresponding entry exists in a Repository (either 2001 an in-memory Node or something on disk). 2002 2003 By defafult, the glob() function matches entries that exist 2004 on-disk, in addition to in-memory Nodes. Setting the "ondisk" 2005 argument to False (or some other non-true value) causes the glob() 2006 function to only match in-memory Nodes. The default behavior is 2007 to return both the on-disk and in-memory Nodes. 2008 2009 The "source" argument, when true, specifies that corresponding 2010 source Nodes must be returned if you're globbing in a build 2011 directory (initialized with VariantDir()). The default behavior 2012 is to return Nodes local to the VariantDir(). 2013 2014 The "strings" argument, when true, returns the matches as strings, 2015 not Nodes. The strings are path names relative to this directory. 2016 2017 The underlying algorithm is adapted from the glob.glob() function 2018 in the Python library (but heavily modified), and uses fnmatch() 2019 under the covers. 2020 """ 2021 dirname, basename = os.path.split(pathname) 2022 if not dirname: 2023 return sorted(self._glob1(basename, ondisk, source, strings), 2024 key=lambda t: str(t)) 2025 if has_glob_magic(dirname): 2026 list = self.glob(dirname, ondisk, source, strings=False) 2027 else: 2028 list = [self.Dir(dirname, create=True)] 2029 result = [] 2030 for dir in list: 2031 r = dir._glob1(basename, ondisk, source, strings) 2032 if strings: 2033 r = [os.path.join(str(dir), x) for x in r] 2034 result.extend(r) 2035 return sorted(result, key=lambda a: str(a))
2036
2037 - def _glob1(self, pattern, ondisk=True, source=False, strings=False):
2038 """ 2039 Globs for and returns a list of entry names matching a single 2040 pattern in this directory. 2041 2042 This searches any repositories and source directories for 2043 corresponding entries and returns a Node (or string) relative 2044 to the current directory if an entry is found anywhere. 2045 2046 TODO: handle pattern with no wildcard 2047 """ 2048 search_dir_list = self.get_all_rdirs() 2049 for srcdir in self.srcdir_list(): 2050 search_dir_list.extend(srcdir.get_all_rdirs()) 2051 2052 selfEntry = self.Entry 2053 names = [] 2054 for dir in search_dir_list: 2055 # We use the .name attribute from the Node because the keys of 2056 # the dir.entries dictionary are normalized (that is, all upper 2057 # case) on case-insensitive systems like Windows. 2058 node_names = [ v.name for k, v in dir.entries.items() 2059 if k not in ('.', '..') ] 2060 names.extend(node_names) 2061 if not strings: 2062 # Make sure the working directory (self) actually has 2063 # entries for all Nodes in repositories or variant dirs. 2064 for name in node_names: selfEntry(name) 2065 if ondisk: 2066 try: 2067 disk_names = os.listdir(dir.abspath) 2068 except os.error: 2069 continue 2070 names.extend(disk_names) 2071 if not strings: 2072 # We're going to return corresponding Nodes in 2073 # the local directory, so we need to make sure 2074 # those Nodes exist. We only want to create 2075 # Nodes for the entries that will match the 2076 # specified pattern, though, which means we 2077 # need to filter the list here, even though 2078 # the overall list will also be filtered later, 2079 # after we exit this loop. 2080 if pattern[0] != '.': 2081 #disk_names = [ d for d in disk_names if d[0] != '.' ] 2082 disk_names = [x for x in disk_names if x[0] != '.'] 2083 disk_names = fnmatch.filter(disk_names, pattern) 2084 dirEntry = dir.Entry 2085 for name in disk_names: 2086 # Add './' before disk filename so that '#' at 2087 # beginning of filename isn't interpreted. 2088 name = './' + name 2089 node = dirEntry(name).disambiguate() 2090 n = selfEntry(name) 2091 if n.__class__ != node.__class__: 2092 n.__class__ = node.__class__ 2093 n._morph() 2094 2095 names = set(names) 2096 if pattern[0] != '.': 2097 #names = [ n for n in names if n[0] != '.' ] 2098 names = [x for x in names if x[0] != '.'] 2099 names = fnmatch.filter(names, pattern) 2100 2101 if strings: 2102 return names 2103 2104 #return [ self.entries[_my_normcase(n)] for n in names ] 2105 return [self.entries[_my_normcase(n)] for n in names]
2106
2107 -class RootDir(Dir):
2108 """A class for the root directory of a file system. 2109 2110 This is the same as a Dir class, except that the path separator 2111 ('/' or '\\') is actually part of the name, so we don't need to 2112 add a separator when creating the path names of entries within 2113 this directory. 2114 """
2115 - def __init__(self, drive, fs):
2116 if __debug__: logInstanceCreation(self, 'Node.FS.RootDir') 2117 # We're going to be our own parent directory (".." entry and .dir 2118 # attribute) so we have to set up some values so Base.__init__() 2119 # won't gag won't it calls some of our methods. 2120 self.abspath = '' 2121 self.labspath = '' 2122 self.path = '' 2123 self.tpath = '' 2124 self.path_elements = [] 2125 self.duplicate = 0 2126 self.root = self 2127 2128 # Handle all the types of drives: 2129 if drive == '': 2130 # No drive, regular UNIX root or Windows default drive. 2131 name = OS_SEP 2132 dirname = OS_SEP 2133 elif drive == '//': 2134 # UNC path 2135 name = UNC_PREFIX 2136 dirname = UNC_PREFIX 2137 else: 2138 # Windows drive letter 2139 name = drive 2140 dirname = drive + OS_SEP 2141 2142 Base.__init__(self, name, self, fs) 2143 2144 # Now set our paths to what we really want them to be. The 2145 # name should already contain any necessary separators, such 2146 # as the initial drive letter (the name) plus the directory 2147 # separator, except for the "lookup abspath," which does not 2148 # have the drive letter. 2149 self.abspath = dirname 2150 self.labspath = '' 2151 self.path = dirname 2152 self.tpath = dirname 2153 self._morph() 2154 2155 # Must be reset after Dir._morph() is invoked... 2156 self.dirname = dirname 2157 2158 self._lookupDict = {} 2159 2160 self._lookupDict[''] = self 2161 self._lookupDict['/'] = self 2162 2163 # The // entry is necessary because os.path.normpath() 2164 # preserves double slashes at the beginning of a path on Posix 2165 # platforms. 2166 if not has_unc: 2167 self._lookupDict['//'] = self
2168
2169 - def must_be_same(self, klass):
2170 if klass is Dir: 2171 return 2172 Base.must_be_same(self, klass)
2173
2174 - def _lookup_abs(self, p, klass, create=1):
2175 """ 2176 Fast (?) lookup of a *normalized* absolute path. 2177 2178 This method is intended for use by internal lookups with 2179 already-normalized path data. For general-purpose lookups, 2180 use the FS.Entry(), FS.Dir() or FS.File() methods. 2181 2182 The caller is responsible for making sure we're passed a 2183 normalized absolute path; we merely let Python's dictionary look 2184 up and return the One True Node.FS object for the path. 2185 2186 If a Node for the specified "p" doesn't already exist, and 2187 "create" is specified, the Node may be created after recursive 2188 invocation to find or create the parent directory or directories. 2189 """ 2190 k = _my_normcase(p) 2191 try: 2192 result = self._lookupDict[k] 2193 except KeyError: 2194 if not create: 2195 msg = "No such file or directory: '%s' in '%s' (and create is False)" % (p, str(self)) 2196 raise SCons.Errors.UserError(msg) 2197 # There is no Node for this path name, and we're allowed 2198 # to create it. 2199 # (note: would like to use p.rsplit('/',1) here but 2200 # that's not in python 2.3) 2201 # e.g.: dir_name, file_name = p.rsplit('/',1) 2202 last_slash = p.rindex('/') 2203 if (last_slash >= 0): 2204 dir_name = p[:last_slash] 2205 file_name = p[last_slash+1:] 2206 else: 2207 dir_name = p # shouldn't happen, just in case 2208 file_name = '' 2209 2210 dir_node = self._lookup_abs(dir_name, Dir) 2211 result = klass(file_name, dir_node, self.fs) 2212 2213 # Double-check on disk (as configured) that the Node we 2214 # created matches whatever is out there in the real world. 2215 result.diskcheck_match() 2216 2217 self._lookupDict[k] = result 2218 dir_node.entries[_my_normcase(file_name)] = result 2219 dir_node.implicit = None 2220 else: 2221 # There is already a Node for this path name. Allow it to 2222 # complain if we were looking for an inappropriate type. 2223 result.must_be_same(klass) 2224 return result
2225
2226 - def __str__(self):
2227 return self.abspath
2228
2229 - def entry_abspath(self, name):
2230 return self.abspath + name
2231
2232 - def entry_labspath(self, name):
2233 return '/' + name
2234
2235 - def entry_path(self, name):
2236 return self.path + name
2237
2238 - def entry_tpath(self, name):
2239 return self.tpath + name
2240
2241 - def is_under(self, dir):
2242 if self is dir: 2243 return 1 2244 else: 2245 return 0
2246
2247 - def up(self):
2248 return None
2249
2250 - def get_dir(self):
2251 return None
2252
2253 - def src_builder(self):
2254 return _null
2255
2256 -class FileNodeInfo(SCons.Node.NodeInfoBase):
2257 current_version_id = 1 2258 2259 field_list = ['csig', 'timestamp', 'size'] 2260 2261 # This should get reset by the FS initialization. 2262 fs = None 2263
2264 - def str_to_node(self, s):
2265 top = self.fs.Top 2266 root = top.root 2267 if do_splitdrive: 2268 drive, s = _my_splitdrive(s) 2269 if drive: 2270 root = self.fs.get_root(drive) 2271 if not os.path.isabs(s): 2272 s = top.labspath + '/' + s 2273 return root._lookup_abs(s, Entry)
2274
2275 -class FileBuildInfo(SCons.Node.BuildInfoBase):
2276 current_version_id = 1 2277
2278 - def convert_to_sconsign(self):
2279 """ 2280 Converts this FileBuildInfo object for writing to a .sconsign file 2281 2282 This replaces each Node in our various dependency lists with its 2283 usual string representation: relative to the top-level SConstruct 2284 directory, or an absolute path if it's outside. 2285 """ 2286 if os_sep_is_slash: 2287 node_to_str = str 2288 else: 2289 def node_to_str(n): 2290 try: 2291 s = n.path 2292 except AttributeError: 2293 s = str(n) 2294 else: 2295 s = s.replace(OS_SEP, '/') 2296 return s
2297 for attr in ['bsources', 'bdepends', 'bimplicit']: 2298 try: 2299 val = getattr(self, attr) 2300 except AttributeError: 2301 pass 2302 else: 2303 setattr(self, attr, list(map(node_to_str, val)))
2304 - def convert_from_sconsign(self, dir, name):
2305 """ 2306 Converts a newly-read FileBuildInfo object for in-SCons use 2307 2308 For normal up-to-date checking, we don't have any conversion to 2309 perform--but we're leaving this method here to make that clear. 2310 """ 2311 pass
2312 - def prepare_dependencies(self):
2313 """ 2314 Prepares a FileBuildInfo object for explaining what changed 2315 2316 The bsources, bdepends and bimplicit lists have all been 2317 stored on disk as paths relative to the top-level SConstruct 2318 directory. Convert the strings to actual Nodes (for use by the 2319 --debug=explain code and --implicit-cache). 2320 """ 2321 attrs = [ 2322 ('bsources', 'bsourcesigs'), 2323 ('bdepends', 'bdependsigs'), 2324 ('bimplicit', 'bimplicitsigs'), 2325 ] 2326 for (nattr, sattr) in attrs: 2327 try: 2328 strings = getattr(self, nattr) 2329 nodeinfos = getattr(self, sattr) 2330 except AttributeError: 2331 continue 2332 nodes = [] 2333 for s, ni in zip(strings, nodeinfos): 2334 if not isinstance(s, SCons.Node.Node): 2335 s = ni.str_to_node(s) 2336 nodes.append(s) 2337 setattr(self, nattr, nodes)
2338 - def format(self, names=0):
2339 result = [] 2340 bkids = self.bsources + self.bdepends + self.bimplicit 2341 bkidsigs = self.bsourcesigs + self.bdependsigs + self.bimplicitsigs 2342 for bkid, bkidsig in zip(bkids, bkidsigs): 2343 result.append(str(bkid) + ': ' + 2344 ' '.join(bkidsig.format(names=names))) 2345 result.append('%s [%s]' % (self.bactsig, self.bact)) 2346 return '\n'.join(result)
2347
2348 -class File(Base):
2349 """A class for files in a file system. 2350 """ 2351 2352 memoizer_counters = [] 2353 2354 NodeInfo = FileNodeInfo 2355 BuildInfo = FileBuildInfo 2356 2357 md5_chunksize = 64 2358
2359 - def diskcheck_match(self):
2360 diskcheck_match(self, self.isdir, 2361 "Directory %s found where file expected.")
2362
2363 - def __init__(self, name, directory, fs):
2364 if __debug__: logInstanceCreation(self, 'Node.FS.File') 2365 Base.__init__(self, name, directory, fs) 2366 self._morph()
2367
2368 - def Entry(self, name):
2369 """Create an entry node named 'name' relative to 2370 the directory of this file.""" 2371 return self.dir.Entry(name)
2372
2373 - def Dir(self, name, create=True):
2374 """Create a directory node named 'name' relative to 2375 the directory of this file.""" 2376 return self.dir.Dir(name, create=create)
2377
2378 - def Dirs(self, pathlist):
2379 """Create a list of directories relative to the SConscript 2380 directory of this file.""" 2381 return [self.Dir(p) for p in pathlist]
2382
2383 - def File(self, name):
2384 """Create a file node named 'name' relative to 2385 the directory of this file.""" 2386 return self.dir.File(name)
2387 2388 #def generate_build_dict(self): 2389 # """Return an appropriate dictionary of values for building 2390 # this File.""" 2391 # return {'Dir' : self.Dir, 2392 # 'File' : self.File, 2393 # 'RDirs' : self.RDirs} 2394
2395 - def _morph(self):
2396 """Turn a file system node into a File object.""" 2397 self.scanner_paths = {} 2398 if not hasattr(self, '_local'): 2399 self._local = 0 2400 2401 # If there was already a Builder set on this entry, then 2402 # we need to make sure we call the target-decider function, 2403 # not the source-decider. Reaching in and doing this by hand 2404 # is a little bogus. We'd prefer to handle this by adding 2405 # an Entry.builder_set() method that disambiguates like the 2406 # other methods, but that starts running into problems with the 2407 # fragile way we initialize Dir Nodes with their Mkdir builders, 2408 # yet still allow them to be overridden by the user. Since it's 2409 # not clear right now how to fix that, stick with what works 2410 # until it becomes clear... 2411 if self.has_builder(): 2412 self.changed_since_last_build = self.decide_target
2413
2414 - def scanner_key(self):
2415 return self.get_suffix()
2416
2417 - def get_contents(self):
2418 if not self.rexists(): 2419 return '' 2420 fname = self.rfile().abspath 2421 try: 2422 contents = open(fname, "rb").read() 2423 except EnvironmentError, e: 2424 if not e.filename: 2425 e.filename = fname 2426 raise 2427 return contents
2428 2429 # This attempts to figure out what the encoding of the text is 2430 # based upon the BOM bytes, and then decodes the contents so that 2431 # it's a valid python string.
2432 - def get_text_contents(self):
2433 contents = self.get_contents() 2434 # The behavior of various decode() methods and functions 2435 # w.r.t. the initial BOM bytes is different for different 2436 # encodings and/or Python versions. ('utf-8' does not strip 2437 # them, but has a 'utf-8-sig' which does; 'utf-16' seems to 2438 # strip them; etc.) Just sidestep all the complication by 2439 # explicitly stripping the BOM before we decode(). 2440 if contents.startswith(codecs.BOM_UTF8): 2441 return contents[len(codecs.BOM_UTF8):].decode('utf-8') 2442 if contents.startswith(codecs.BOM_UTF16_LE): 2443 return contents[len(codecs.BOM_UTF16_LE):].decode('utf-16-le') 2444 if contents.startswith(codecs.BOM_UTF16_BE): 2445 return contents[len(codecs.BOM_UTF16_BE):].decode('utf-16-be') 2446 return contents
2447
2448 - def get_content_hash(self):
2449 """ 2450 Compute and return the MD5 hash for this file. 2451 """ 2452 if not self.rexists(): 2453 return SCons.Util.MD5signature('') 2454 fname = self.rfile().abspath 2455 try: 2456 cs = SCons.Util.MD5filesignature(fname, 2457 chunksize=SCons.Node.FS.File.md5_chunksize*1024) 2458 except EnvironmentError, e: 2459 if not e.filename: 2460 e.filename = fname 2461 raise 2462 return cs
2463 2464 2465 memoizer_counters.append(SCons.Memoize.CountValue('get_size')) 2466
2467 - def get_size(self):
2468 try: 2469 return self._memo['get_size'] 2470 except KeyError: 2471 pass 2472 2473 if self.rexists(): 2474 size = self.rfile().getsize() 2475 else: 2476 size = 0 2477 2478 self._memo['get_size'] = size 2479 2480 return size
2481 2482 memoizer_counters.append(SCons.Memoize.CountValue('get_timestamp')) 2483
2484 - def get_timestamp(self):
2485 try: 2486 return self._memo['get_timestamp'] 2487 except KeyError: 2488 pass 2489 2490 if self.rexists(): 2491 timestamp = self.rfile().getmtime() 2492 else: 2493 timestamp = 0 2494 2495 self._memo['get_timestamp'] = timestamp 2496 2497 return timestamp
2498
2499 - def store_info(self):
2500 # Merge our build information into the already-stored entry. 2501 # This accomodates "chained builds" where a file that's a target 2502 # in one build (SConstruct file) is a source in a different build. 2503 # See test/chained-build.py for the use case. 2504 if do_store_info: 2505 self.dir.sconsign().store_info(self.name, self)
2506 2507 convert_copy_attrs = [ 2508 'bsources', 2509 'bimplicit', 2510 'bdepends', 2511 'bact', 2512 'bactsig', 2513 'ninfo', 2514 ] 2515 2516 2517 convert_sig_attrs = [ 2518 'bsourcesigs', 2519 'bimplicitsigs', 2520 'bdependsigs', 2521 ] 2522
2523 - def convert_old_entry(self, old_entry):
2524 # Convert a .sconsign entry from before the Big Signature 2525 # Refactoring, doing what we can to convert its information 2526 # to the new .sconsign entry format. 2527 # 2528 # The old format looked essentially like this: 2529 # 2530 # BuildInfo 2531 # .ninfo (NodeInfo) 2532 # .bsig 2533 # .csig 2534 # .timestamp 2535 # .size 2536 # .bsources 2537 # .bsourcesigs ("signature" list) 2538 # .bdepends 2539 # .bdependsigs ("signature" list) 2540 # .bimplicit 2541 # .bimplicitsigs ("signature" list) 2542 # .bact 2543 # .bactsig 2544 # 2545 # The new format looks like this: 2546 # 2547 # .ninfo (NodeInfo) 2548 # .bsig 2549 # .csig 2550 # .timestamp 2551 # .size 2552 # .binfo (BuildInfo) 2553 # .bsources 2554 # .bsourcesigs (NodeInfo list) 2555 # .bsig 2556 # .csig 2557 # .timestamp 2558 # .size 2559 # .bdepends 2560 # .bdependsigs (NodeInfo list) 2561 # .bsig 2562 # .csig 2563 # .timestamp 2564 # .size 2565 # .bimplicit 2566 # .bimplicitsigs (NodeInfo list) 2567 # .bsig 2568 # .csig 2569 # .timestamp 2570 # .size 2571 # .bact 2572 # .bactsig 2573 # 2574 # The basic idea of the new structure is that a NodeInfo always 2575 # holds all available information about the state of a given Node 2576 # at a certain point in time. The various .b*sigs lists can just 2577 # be a list of pointers to the .ninfo attributes of the different 2578 # dependent nodes, without any copying of information until it's 2579 # time to pickle it for writing out to a .sconsign file. 2580 # 2581 # The complicating issue is that the *old* format only stored one 2582 # "signature" per dependency, based on however the *last* build 2583 # was configured. We don't know from just looking at it whether 2584 # it was a build signature, a content signature, or a timestamp 2585 # "signature". Since we no longer use build signatures, the 2586 # best we can do is look at the length and if it's thirty two, 2587 # assume that it was (or might have been) a content signature. 2588 # If it was actually a build signature, then it will cause a 2589 # rebuild anyway when it doesn't match the new content signature, 2590 # but that's probably the best we can do. 2591 import SCons.SConsign 2592 new_entry = SCons.SConsign.SConsignEntry() 2593 new_entry.binfo = self.new_binfo() 2594 binfo = new_entry.binfo 2595 for attr in self.convert_copy_attrs: 2596 try: 2597 value = getattr(old_entry, attr) 2598 except AttributeError: 2599 continue 2600 setattr(binfo, attr, value) 2601 delattr(old_entry, attr) 2602 for attr in self.convert_sig_attrs: 2603 try: 2604 sig_list = getattr(old_entry, attr) 2605 except AttributeError: 2606 continue 2607 value = [] 2608 for sig in sig_list: 2609 ninfo = self.new_ninfo() 2610 if len(sig) == 32: 2611 ninfo.csig = sig 2612 else: 2613 ninfo.timestamp = sig 2614 value.append(ninfo) 2615 setattr(binfo, attr, value) 2616 delattr(old_entry, attr) 2617 return new_entry
2618 2619 memoizer_counters.append(SCons.Memoize.CountValue('get_stored_info')) 2620
2621 - def get_stored_info(self):
2622 try: 2623 return self._memo['get_stored_info'] 2624 except KeyError: 2625 pass 2626 2627 try: 2628 sconsign_entry = self.dir.sconsign().get_entry(self.name) 2629 except (KeyError, EnvironmentError): 2630 import SCons.SConsign 2631 sconsign_entry = SCons.SConsign.SConsignEntry() 2632 sconsign_entry.binfo = self.new_binfo() 2633 sconsign_entry.ninfo = self.new_ninfo() 2634 else: 2635 if isinstance(sconsign_entry, FileBuildInfo): 2636 # This is a .sconsign file from before the Big Signature 2637 # Refactoring; convert it as best we can. 2638 sconsign_entry = self.convert_old_entry(sconsign_entry) 2639 try: 2640 delattr(sconsign_entry.ninfo, 'bsig') 2641 except AttributeError: 2642 pass 2643 2644 self._memo['get_stored_info'] = sconsign_entry 2645 2646 return sconsign_entry
2647
2648 - def get_stored_implicit(self):
2649 binfo = self.get_stored_info().binfo 2650 binfo.prepare_dependencies() 2651 try: return binfo.bimplicit 2652 except AttributeError: return None
2653
2654 - def rel_path(self, other):
2655 return self.dir.rel_path(other)
2656
2657 - def _get_found_includes_key(self, env, scanner, path):
2658 return (id(env), id(scanner), path)
2659 2660 memoizer_counters.append(SCons.Memoize.CountDict('get_found_includes', _get_found_includes_key)) 2661
2662 - def get_found_includes(self, env, scanner, path):
2663 """Return the included implicit dependencies in this file. 2664 Cache results so we only scan the file once per path 2665 regardless of how many times this information is requested. 2666 """ 2667 memo_key = (id(env), id(scanner), path) 2668 try: 2669 memo_dict = self._memo['get_found_includes'] 2670 except KeyError: 2671 memo_dict = {} 2672 self._memo['get_found_includes'] = memo_dict 2673 else: 2674 try: 2675 return memo_dict[memo_key] 2676 except KeyError: 2677 pass 2678 2679 if scanner: 2680 # result = [n.disambiguate() for n in scanner(self, env, path)] 2681 result = scanner(self, env, path) 2682 result = [N.disambiguate() for N in result] 2683 else: 2684 result = [] 2685 2686 memo_dict[memo_key] = result 2687 2688 return result
2689
2690 - def _createDir(self):
2691 # ensure that the directories for this node are 2692 # created. 2693 self.dir._create()
2694
2695 - def push_to_cache(self):
2696 """Try to push the node into a cache 2697 """ 2698 # This should get called before the Nodes' .built() method is 2699 # called, which would clear the build signature if the file has 2700 # a source scanner. 2701 # 2702 # We have to clear the local memoized values *before* we push 2703 # the node to cache so that the memoization of the self.exists() 2704 # return value doesn't interfere. 2705 if self.nocache: 2706 return 2707 self.clear_memoized_values() 2708 if self.exists(): 2709 self.get_build_env().get_CacheDir().push(self)
2710
2711 - def retrieve_from_cache(self):
2712 """Try to retrieve the node's content from a cache 2713 2714 This method is called from multiple threads in a parallel build, 2715 so only do thread safe stuff here. Do thread unsafe stuff in 2716 built(). 2717 2718 Returns true iff the node was successfully retrieved. 2719 """ 2720 if self.nocache: 2721 return None 2722 if not self.is_derived(): 2723 return None 2724 return self.get_build_env().get_CacheDir().retrieve(self)
2725
2726 - def visited(self):
2727 if self.exists(): 2728 self.get_build_env().get_CacheDir().push_if_forced(self) 2729 2730 ninfo = self.get_ninfo() 2731 2732 csig = self.get_max_drift_csig() 2733 if csig: 2734 ninfo.csig = csig 2735 2736 ninfo.timestamp = self.get_timestamp() 2737 ninfo.size = self.get_size() 2738 2739 if not self.has_builder(): 2740 # This is a source file, but it might have been a target file 2741 # in another build that included more of the DAG. Copy 2742 # any build information that's stored in the .sconsign file 2743 # into our binfo object so it doesn't get lost. 2744 old = self.get_stored_info() 2745 self.get_binfo().__dict__.update(old.binfo.__dict__) 2746 2747 self.store_info()
2748
2749 - def find_src_builder(self):
2750 if self.rexists(): 2751 return None 2752 scb = self.dir.src_builder() 2753 if scb is _null: 2754 if diskcheck_sccs(self.dir, self.name): 2755 scb = get_DefaultSCCSBuilder() 2756 elif diskcheck_rcs(self.dir, self.name): 2757 scb = get_DefaultRCSBuilder() 2758 else: 2759 scb = None 2760 if scb is not None: 2761 try: 2762 b = self.builder 2763 except AttributeError: 2764 b = None 2765 if b is None: 2766 self.builder_set(scb) 2767 return scb
2768
2769 - def has_src_builder(self):
2770 """Return whether this Node has a source builder or not. 2771 2772 If this Node doesn't have an explicit source code builder, this 2773 is where we figure out, on the fly, if there's a transparent 2774 source code builder for it. 2775 2776 Note that if we found a source builder, we also set the 2777 self.builder attribute, so that all of the methods that actually 2778 *build* this file don't have to do anything different. 2779 """ 2780 try: 2781 scb = self.sbuilder 2782 except AttributeError: 2783 scb = self.sbuilder = self.find_src_builder() 2784 return scb is not None
2785
2786 - def alter_targets(self):
2787 """Return any corresponding targets in a variant directory. 2788 """ 2789 if self.is_derived(): 2790 return [], None 2791 return self.fs.variant_dir_target_climb(self, self.dir, [self.name])
2792
2793 - def _rmv_existing(self):
2794 self.clear_memoized_values() 2795 if print_duplicate: 2796 print "dup: removing existing target %s"%self 2797 e = Unlink(self, [], None) 2798 if isinstance(e, SCons.Errors.BuildError): 2799 raise e
2800 2801 # 2802 # Taskmaster interface subsystem 2803 # 2804
2805 - def make_ready(self):
2806 self.has_src_builder() 2807 self.get_binfo()
2808
2809 - def prepare(self):
2810 """Prepare for this file to be created.""" 2811 SCons.Node.Node.prepare(self) 2812 2813 if self.get_state() != SCons.Node.up_to_date: 2814 if self.exists(): 2815 if self.is_derived() and not self.precious: 2816 self._rmv_existing() 2817 else: 2818 try: 2819 self._createDir() 2820 except SCons.Errors.StopError, drive: 2821 desc = "No drive `%s' for target `%s'." % (drive, self) 2822 raise SCons.Errors.StopError(desc)
2823 2824 # 2825 # 2826 # 2827
2828 - def remove(self):
2829 """Remove this file.""" 2830 if self.exists() or self.islink(): 2831 self.fs.unlink(self.path) 2832 return 1 2833 return None
2834
2835 - def do_duplicate(self, src):
2836 self._createDir() 2837 if print_duplicate: 2838 print "dup: relinking variant '%s' from '%s'"%(self, src) 2839 Unlink(self, None, None) 2840 e = Link(self, src, None) 2841 if isinstance(e, SCons.Errors.BuildError): 2842 desc = "Cannot duplicate `%s' in `%s': %s." % (src.path, self.dir.path, e.errstr) 2843 raise SCons.Errors.StopError(desc) 2844 self.linked = 1 2845 # The Link() action may or may not have actually 2846 # created the file, depending on whether the -n 2847 # option was used or not. Delete the _exists and 2848 # _rexists attributes so they can be reevaluated. 2849 self.clear()
2850 2851 memoizer_counters.append(SCons.Memoize.CountValue('exists')) 2852
2853 - def exists(self):
2854 try: 2855 return self._memo['exists'] 2856 except KeyError: 2857 pass 2858 # Duplicate from source path if we are set up to do this. 2859 if self.duplicate and not self.is_derived() and not self.linked: 2860 src = self.srcnode() 2861 if src is not self: 2862 # At this point, src is meant to be copied in a variant directory. 2863 src = src.rfile() 2864 if src.abspath != self.abspath: 2865 if src.exists(): 2866 self.do_duplicate(src) 2867 # Can't return 1 here because the duplication might 2868 # not actually occur if the -n option is being used. 2869 else: 2870 # The source file does not exist. Make sure no old 2871 # copy remains in the variant directory. 2872 if print_duplicate: 2873 print "dup: no src for %s, unlinking old variant copy"%self 2874 if Base.exists(self) or self.islink(): 2875 self.fs.unlink(self.path) 2876 # Return None explicitly because the Base.exists() call 2877 # above will have cached its value if the file existed. 2878 self._memo['exists'] = None 2879 return None 2880 result = Base.exists(self) 2881 self._memo['exists'] = result 2882 return result
2883 2884 # 2885 # SIGNATURE SUBSYSTEM 2886 # 2887
2888 - def get_max_drift_csig(self):
2889 """ 2890 Returns the content signature currently stored for this node 2891 if it's been unmodified longer than the max_drift value, or the 2892 max_drift value is 0. Returns None otherwise. 2893 """ 2894 old = self.get_stored_info() 2895 mtime = self.get_timestamp() 2896 2897 max_drift = self.fs.max_drift 2898 if max_drift > 0: 2899 if (time.time() - mtime) > max_drift: 2900 try: 2901 n = old.ninfo 2902 if n.timestamp and n.csig and n.timestamp == mtime: 2903 return n.csig 2904 except AttributeError: 2905 pass 2906 elif max_drift == 0: 2907 try: 2908 return old.ninfo.csig 2909 except AttributeError: 2910 pass 2911 2912 return None
2913
2914 - def get_csig(self):
2915 """ 2916 Generate a node's content signature, the digested signature 2917 of its content. 2918 2919 node - the node 2920 cache - alternate node to use for the signature cache 2921 returns - the content signature 2922 """ 2923 ninfo = self.get_ninfo() 2924 try: 2925 return ninfo.csig 2926 except AttributeError: 2927 pass 2928 2929 csig = self.get_max_drift_csig() 2930 if csig is None: 2931 2932 try: 2933 if self.get_size() < SCons.Node.FS.File.md5_chunksize: 2934 contents = self.get_contents() 2935 else: 2936 csig = self.get_content_hash() 2937 except IOError: 2938 # This can happen if there's actually a directory on-disk, 2939 # which can be the case if they've disabled disk checks, 2940 # or if an action with a File target actually happens to 2941 # create a same-named directory by mistake. 2942 csig = '' 2943 else: 2944 if not csig: 2945 csig = SCons.Util.MD5signature(contents) 2946 2947 ninfo.csig = csig 2948 2949 return csig
2950 2951 # 2952 # DECISION SUBSYSTEM 2953 # 2954
2955 - def builder_set(self, builder):
2956 SCons.Node.Node.builder_set(self, builder) 2957 self.changed_since_last_build = self.decide_target
2958
2959 - def changed_content(self, target, prev_ni):
2960 cur_csig = self.get_csig() 2961 try: 2962 return cur_csig != prev_ni.csig 2963 except AttributeError: 2964 return 1
2965
2966 - def changed_state(self, target, prev_ni):
2967 return self.state != SCons.Node.up_to_date
2968
2969 - def changed_timestamp_then_content(self, target, prev_ni):
2970 if not self.changed_timestamp_match(target, prev_ni): 2971 try: 2972 self.get_ninfo().csig = prev_ni.csig 2973 except AttributeError: 2974 pass 2975 return False 2976 return self.changed_content(target, prev_ni)
2977
2978 - def changed_timestamp_newer(self, target, prev_ni):
2979 try: 2980 return self.get_timestamp() > target.get_timestamp() 2981 except AttributeError: 2982 return 1
2983
2984 - def changed_timestamp_match(self, target, prev_ni):
2985 try: 2986 return self.get_timestamp() != prev_ni.timestamp 2987 except AttributeError: 2988 return 1
2989
2990 - def decide_source(self, target, prev_ni):
2991 return target.get_build_env().decide_source(self, target, prev_ni)
2992
2993 - def decide_target(self, target, prev_ni):
2994 return target.get_build_env().decide_target(self, target, prev_ni)
2995 2996 # Initialize this Node's decider function to decide_source() because 2997 # every file is a source file until it has a Builder attached... 2998 changed_since_last_build = decide_source 2999
3000 - def is_up_to_date(self):
3001 T = 0 3002 if T: Trace('is_up_to_date(%s):' % self) 3003 if not self.exists(): 3004 if T: Trace(' not self.exists():') 3005 # The file doesn't exist locally... 3006 r = self.rfile() 3007 if r != self: 3008 # ...but there is one in a Repository... 3009 if not self.changed(r): 3010 if T: Trace(' changed(%s):' % r) 3011 # ...and it's even up-to-date... 3012 if self._local: 3013 # ...and they'd like a local copy. 3014 e = LocalCopy(self, r, None) 3015 if isinstance(e, SCons.Errors.BuildError): 3016 raise 3017 self.store_info() 3018 if T: Trace(' 1\n') 3019 return 1 3020 self.changed() 3021 if T: Trace(' None\n') 3022 return None 3023 else: 3024 r = self.changed() 3025 if T: Trace(' self.exists(): %s\n' % r) 3026 return not r
3027 3028 memoizer_counters.append(SCons.Memoize.CountValue('rfile')) 3029
3030 - def rfile(self):
3031 try: 3032 return self._memo['rfile'] 3033 except KeyError: 3034 pass 3035 result = self 3036 if not self.exists(): 3037 norm_name = _my_normcase(self.name) 3038 for dir in self.dir.get_all_rdirs(): 3039 try: node = dir.entries[norm_name] 3040 except KeyError: node = dir.file_on_disk(self.name) 3041 if node and node.exists() and \ 3042 (isinstance(node, File) or isinstance(node, Entry) \ 3043 or not node.is_derived()): 3044 result = node 3045 # Copy over our local attributes to the repository 3046 # Node so we identify shared object files in the 3047 # repository and don't assume they're static. 3048 # 3049 # This isn't perfect; the attribute would ideally 3050 # be attached to the object in the repository in 3051 # case it was built statically in the repository 3052 # and we changed it to shared locally, but that's 3053 # rarely the case and would only occur if you 3054 # intentionally used the same suffix for both 3055 # shared and static objects anyway. So this 3056 # should work well in practice. 3057 result.attributes = self.attributes 3058 break 3059 self._memo['rfile'] = result 3060 return result
3061
3062 - def rstr(self):
3063 return str(self.rfile())
3064
3065 - def get_cachedir_csig(self):
3066 """ 3067 Fetch a Node's content signature for purposes of computing 3068