Package SCons :: Package Node :: Module FS
[hide private]
[frames] | no frames]

Source Code for Module SCons.Node.FS

   1  """scons.Node.FS 
   2   
   3  File system nodes. 
   4   
   5  These Nodes represent the canonical external objects that people think 
   6  of when they think of building software: files and directories. 
   7   
   8  This holds a "default_fs" variable that should be initialized with an FS 
   9  that can be used by scripts or modules looking for the canonical default. 
  10   
  11  """ 
  12   
  13  # 
  14  # Copyright (c) 2001 - 2015 The SCons Foundation 
  15  # 
  16  # Permission is hereby granted, free of charge, to any person obtaining 
  17  # a copy of this software and associated documentation files (the 
  18  # "Software"), to deal in the Software without restriction, including 
  19  # without limitation the rights to use, copy, modify, merge, publish, 
  20  # distribute, sublicense, and/or sell copies of the Software, and to 
  21  # permit persons to whom the Software is furnished to do so, subject to 
  22  # the following conditions: 
  23  # 
  24  # The above copyright notice and this permission notice shall be included 
  25  # in all copies or substantial portions of the Software. 
  26  # 
  27  # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY 
  28  # KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE 
  29  # WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND 
  30  # NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE 
  31  # LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION 
  32  # OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION 
  33  # WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. 
  34   
  35  __revision__ = "src/engine/SCons/Node/FS.py rel_2.3.5:3347:d31d5a4e74b6 2015/07/31 14:36:10 bdbaddog" 
  36   
  37  import fnmatch 
  38  import os 
  39  import re 
  40  import shutil 
  41  import stat 
  42  import sys 
  43  import time 
  44  import codecs 
  45   
  46  import SCons.Action 
  47  import SCons.Debug 
  48  from SCons.Debug import logInstanceCreation 
  49  import SCons.Errors 
  50  import SCons.Memoize 
  51  import SCons.Node 
  52  import SCons.Node.Alias 
  53  import SCons.Subst 
  54  import SCons.Util 
  55  import SCons.Warnings 
  56   
  57  from SCons.Debug import Trace 
  58   
  59  do_store_info = True 
  60  print_duplicate = 0 
  61   
  62   
63 -class EntryProxyAttributeError(AttributeError):
64 """ 65 An AttributeError subclass for recording and displaying the name 66 of the underlying Entry involved in an AttributeError exception. 67 """
68 - def __init__(self, entry_proxy, attribute):
69 AttributeError.__init__(self) 70 self.entry_proxy = entry_proxy 71 self.attribute = attribute
72 - def __str__(self):
73 entry = self.entry_proxy.get() 74 fmt = "%s instance %s has no attribute %s" 75 return fmt % (entry.__class__.__name__, 76 repr(entry.name), 77 repr(self.attribute))
78 79 # The max_drift value: by default, use a cached signature value for 80 # any file that's been untouched for more than two days. 81 default_max_drift = 2*24*60*60 82 83 # 84 # We stringify these file system Nodes a lot. Turning a file system Node 85 # into a string is non-trivial, because the final string representation 86 # can depend on a lot of factors: whether it's a derived target or not, 87 # whether it's linked to a repository or source directory, and whether 88 # there's duplication going on. The normal technique for optimizing 89 # calculations like this is to memoize (cache) the string value, so you 90 # only have to do the calculation once. 91 # 92 # A number of the above factors, however, can be set after we've already 93 # been asked to return a string for a Node, because a Repository() or 94 # VariantDir() call or the like may not occur until later in SConscript 95 # files. So this variable controls whether we bother trying to save 96 # string values for Nodes. The wrapper interface can set this whenever 97 # they're done mucking with Repository and VariantDir and the other stuff, 98 # to let this module know it can start returning saved string values 99 # for Nodes. 100 # 101 Save_Strings = None 102
103 -def save_strings(val):
104 global Save_Strings 105 Save_Strings = val
106 107 # 108 # Avoid unnecessary function calls by recording a Boolean value that 109 # tells us whether or not os.path.splitdrive() actually does anything 110 # on this system, and therefore whether we need to bother calling it 111 # when looking up path names in various methods below. 112 # 113 114 do_splitdrive = None 115 _my_splitdrive =None 116
117 -def initialize_do_splitdrive():
118 global do_splitdrive 119 global has_unc 120 drive, path = os.path.splitdrive('X:/foo') 121 has_unc = hasattr(os.path, 'splitunc') 122 123 do_splitdrive = not not drive or has_unc 124 125 global _my_splitdrive 126 if has_unc: 127 def splitdrive(p): 128 if p[1:2] == ':': 129 return p[:2], p[2:] 130 if p[0:2] == '//': 131 # Note that we leave a leading slash in the path 132 # because UNC paths are always absolute. 133 return '//', p[1:] 134 return '', p
135 else: 136 def splitdrive(p): 137 if p[1:2] == ':': 138 return p[:2], p[2:] 139 return '', p 140 _my_splitdrive = splitdrive 141 142 # Keep some commonly used values in global variables to skip to 143 # module look-up costs. 144 global OS_SEP 145 global UNC_PREFIX 146 global os_sep_is_slash 147 148 OS_SEP = os.sep 149 UNC_PREFIX = OS_SEP + OS_SEP 150 os_sep_is_slash = OS_SEP == '/' 151 152 initialize_do_splitdrive() 153 154 # Used to avoid invoking os.path.normpath if not necessary. 155 needs_normpath_check = re.compile( 156 r''' 157 # We need to renormalize the path if it contains any consecutive 158 # '/' characters. 159 .*// | 160 161 # We need to renormalize the path if it contains a '..' directory. 162 # Note that we check for all the following cases: 163 # 164 # a) The path is a single '..' 165 # b) The path starts with '..'. E.g. '../' or '../moredirs' 166 # but we not match '..abc/'. 167 # c) The path ends with '..'. E.g. '/..' or 'dirs/..' 168 # d) The path contains a '..' in the middle. 169 # E.g. dirs/../moredirs 170 171 (.*/)?\.\.(?:/|$) | 172 173 # We need to renormalize the path if it contains a '.' 174 # directory, but NOT if it is a single '.' '/' characters. We 175 # do not want to match a single '.' because this case is checked 176 # for explicitely since this is common enough case. 177 # 178 # Note that we check for all the following cases: 179 # 180 # a) We don't match a single '.' 181 # b) We match if the path starts with '.'. E.g. './' or 182 # './moredirs' but we not match '.abc/'. 183 # c) We match if the path ends with '.'. E.g. '/.' or 184 # 'dirs/.' 185 # d) We match if the path contains a '.' in the middle. 186 # E.g. dirs/./moredirs 187 188 \./|.*/\.(?:/|$) 189 190 ''', 191 re.VERBOSE 192 ) 193 needs_normpath_match = needs_normpath_check.match 194 195 # 196 # SCons.Action objects for interacting with the outside world. 197 # 198 # The Node.FS methods in this module should use these actions to 199 # create and/or remove files and directories; they should *not* use 200 # os.{link,symlink,unlink,mkdir}(), etc., directly. 201 # 202 # Using these SCons.Action objects ensures that descriptions of these 203 # external activities are properly displayed, that the displays are 204 # suppressed when the -s (silent) option is used, and (most importantly) 205 # the actions are disabled when the the -n option is used, in which case 206 # there should be *no* changes to the external file system(s)... 207 # 208 209 if hasattr(os, 'link'): 222 else: 223 _hardlink_func = None 224 225 if hasattr(os, 'symlink'): 228 else: 229 _softlink_func = None 230
231 -def _copy_func(fs, src, dest):
232 shutil.copy2(src, dest) 233 st = fs.stat(src) 234 fs.chmod(dest, stat.S_IMODE(st[stat.ST_MODE]) | stat.S_IWRITE)
235 236 237 Valid_Duplicates = ['hard-soft-copy', 'soft-hard-copy', 238 'hard-copy', 'soft-copy', 'copy'] 239 240 Link_Funcs = [] # contains the callables of the specified duplication style 241
242 -def set_duplicate(duplicate):
243 # Fill in the Link_Funcs list according to the argument 244 # (discarding those not available on the platform). 245 246 # Set up the dictionary that maps the argument names to the 247 # underlying implementations. We do this inside this function, 248 # not in the top-level module code, so that we can remap os.link 249 # and os.symlink for testing purposes. 250 link_dict = { 251 'hard' : _hardlink_func, 252 'soft' : _softlink_func, 253 'copy' : _copy_func 254 } 255 256 if not duplicate in Valid_Duplicates: 257 raise SCons.Errors.InternalError("The argument of set_duplicate " 258 "should be in Valid_Duplicates") 259 global Link_Funcs 260 Link_Funcs = [] 261 for func in duplicate.split('-'): 262 if link_dict[func]: 263 Link_Funcs.append(link_dict[func])
264
265 -def LinkFunc(target, source, env):
266 # Relative paths cause problems with symbolic links, so 267 # we use absolute paths, which may be a problem for people 268 # who want to move their soft-linked src-trees around. Those 269 # people should use the 'hard-copy' mode, softlinks cannot be 270 # used for that; at least I have no idea how ... 271 src = source[0].abspath 272 dest = target[0].abspath 273 dir, file = os.path.split(dest) 274 if dir and not target[0].fs.isdir(dir): 275 os.makedirs(dir) 276 if not Link_Funcs: 277 # Set a default order of link functions. 278 set_duplicate('hard-soft-copy') 279 fs = source[0].fs 280 # Now link the files with the previously specified order. 281 for func in Link_Funcs: 282 try: 283 func(fs, src, dest) 284 break 285 except (IOError, OSError): 286 # An OSError indicates something happened like a permissions 287 # problem or an attempt to symlink across file-system 288 # boundaries. An IOError indicates something like the file 289 # not existing. In either case, keeping trying additional 290 # functions in the list and only raise an error if the last 291 # one failed. 292 if func == Link_Funcs[-1]: 293 # exception of the last link method (copy) are fatal 294 raise 295 return 0
296 297 Link = SCons.Action.Action(LinkFunc, None)
298 -def LocalString(target, source, env):
299 return 'Local copy of %s from %s' % (target[0], source[0])
300 301 LocalCopy = SCons.Action.Action(LinkFunc, LocalString) 302
303 -def UnlinkFunc(target, source, env):
304 t = target[0] 305 t.fs.unlink(t.abspath) 306 return 0
307 308 Unlink = SCons.Action.Action(UnlinkFunc, None) 309
310 -def MkdirFunc(target, source, env):
311 t = target[0] 312 if not t.exists(): 313 t.fs.mkdir(t.abspath) 314 return 0
315 316 Mkdir = SCons.Action.Action(MkdirFunc, None, presub=None) 317 318 MkdirBuilder = None 319
320 -def get_MkdirBuilder():
321 global MkdirBuilder 322 if MkdirBuilder is None: 323 import SCons.Builder 324 import SCons.Defaults 325 # "env" will get filled in by Executor.get_build_env() 326 # calling SCons.Defaults.DefaultEnvironment() when necessary. 327 MkdirBuilder = SCons.Builder.Builder(action = Mkdir, 328 env = None, 329 explain = None, 330 is_explicit = None, 331 target_scanner = SCons.Defaults.DirEntryScanner, 332 name = "MkdirBuilder") 333 return MkdirBuilder
334
335 -class _Null(object):
336 pass
337 338 _null = _Null() 339 340 DefaultSCCSBuilder = None 341 DefaultRCSBuilder = None 342
343 -def get_DefaultSCCSBuilder():
344 global DefaultSCCSBuilder 345 if DefaultSCCSBuilder is None: 346 import SCons.Builder 347 # "env" will get filled in by Executor.get_build_env() 348 # calling SCons.Defaults.DefaultEnvironment() when necessary. 349 act = SCons.Action.Action('$SCCSCOM', '$SCCSCOMSTR') 350 DefaultSCCSBuilder = SCons.Builder.Builder(action = act, 351 env = None, 352 name = "DefaultSCCSBuilder") 353 return DefaultSCCSBuilder
354
355 -def get_DefaultRCSBuilder():
356 global DefaultRCSBuilder 357 if DefaultRCSBuilder is None: 358 import SCons.Builder 359 # "env" will get filled in by Executor.get_build_env() 360 # calling SCons.Defaults.DefaultEnvironment() when necessary. 361 act = SCons.Action.Action('$RCS_COCOM', '$RCS_COCOMSTR') 362 DefaultRCSBuilder = SCons.Builder.Builder(action = act, 363 env = None, 364 name = "DefaultRCSBuilder") 365 return DefaultRCSBuilder
366 367 # Cygwin's os.path.normcase pretends it's on a case-sensitive filesystem. 368 _is_cygwin = sys.platform == "cygwin" 369 if os.path.normcase("TeSt") == os.path.normpath("TeSt") and not _is_cygwin:
370 - def _my_normcase(x):
371 return x
372 else:
373 - def _my_normcase(x):
374 return x.upper()
375 376 377
378 -class DiskChecker(object):
379 - def __init__(self, type, do, ignore):
380 self.type = type 381 self.do = do 382 self.ignore = ignore 383 self.func = do
384 - def __call__(self, *args, **kw):
385 return self.func(*args, **kw)
386 - def set(self, list):
387 if self.type in list: 388 self.func = self.do 389 else: 390 self.func = self.ignore
391
392 -def do_diskcheck_match(node, predicate, errorfmt):
393 result = predicate() 394 try: 395 # If calling the predicate() cached a None value from stat(), 396 # remove it so it doesn't interfere with later attempts to 397 # build this Node as we walk the DAG. (This isn't a great way 398 # to do this, we're reaching into an interface that doesn't 399 # really belong to us, but it's all about performance, so 400 # for now we'll just document the dependency...) 401 if node._memo['stat'] is None: 402 del node._memo['stat'] 403 except (AttributeError, KeyError): 404 pass 405 if result: 406 raise TypeError(errorfmt % node.abspath)
407
408 -def ignore_diskcheck_match(node, predicate, errorfmt):
409 pass
410
411 -def do_diskcheck_rcs(node, name):
412 try: 413 rcs_dir = node.rcs_dir 414 except AttributeError: 415 if node.entry_exists_on_disk('RCS'): 416 rcs_dir = node.Dir('RCS') 417 else: 418 rcs_dir = None 419 node.rcs_dir = rcs_dir 420 if rcs_dir: 421 return rcs_dir.entry_exists_on_disk(name+',v') 422 return None
423
424 -def ignore_diskcheck_rcs(node, name):
425 return None
426
427 -def do_diskcheck_sccs(node, name):
428 try: 429 sccs_dir = node.sccs_dir 430 except AttributeError: 431 if node.entry_exists_on_disk('SCCS'): 432 sccs_dir = node.Dir('SCCS') 433 else: 434 sccs_dir = None 435 node.sccs_dir = sccs_dir 436 if sccs_dir: 437 return sccs_dir.entry_exists_on_disk('s.'+name) 438 return None
439
440 -def ignore_diskcheck_sccs(node, name):
441 return None
442 443 diskcheck_match = DiskChecker('match', do_diskcheck_match, ignore_diskcheck_match) 444 diskcheck_rcs = DiskChecker('rcs', do_diskcheck_rcs, ignore_diskcheck_rcs) 445 diskcheck_sccs = DiskChecker('sccs', do_diskcheck_sccs, ignore_diskcheck_sccs) 446 447 diskcheckers = [ 448 diskcheck_match, 449 diskcheck_rcs, 450 diskcheck_sccs, 451 ] 452
453 -def set_diskcheck(list):
454 for dc in diskcheckers: 455 dc.set(list)
456
457 -def diskcheck_types():
458 return [dc.type for dc in diskcheckers]
459 460 461
462 -class EntryProxy(SCons.Util.Proxy):
463 464 __str__ = SCons.Util.Delegate('__str__') 465
466 - def __get_abspath(self):
467 entry = self.get() 468 return SCons.Subst.SpecialAttrWrapper(entry.get_abspath(), 469 entry.name + "_abspath")
470
471 - def __get_filebase(self):
472 name = self.get().name 473 return SCons.Subst.SpecialAttrWrapper(SCons.Util.splitext(name)[0], 474 name + "_filebase")
475
476 - def __get_suffix(self):
477 name = self.get().name 478 return SCons.Subst.SpecialAttrWrapper(SCons.Util.splitext(name)[1], 479 name + "_suffix")
480
481 - def __get_file(self):
482 name = self.get().name 483 return SCons.Subst.SpecialAttrWrapper(name, name + "_file")
484
485 - def __get_base_path(self):
486 """Return the file's directory and file name, with the 487 suffix stripped.""" 488 entry = self.get() 489 return SCons.Subst.SpecialAttrWrapper(SCons.Util.splitext(entry.get_path())[0], 490 entry.name + "_base")
491
492 - def __get_posix_path(self):
493 """Return the path with / as the path separator, 494 regardless of platform.""" 495 if os_sep_is_slash: 496 return self 497 else: 498 entry = self.get() 499 r = entry.get_path().replace(OS_SEP, '/') 500 return SCons.Subst.SpecialAttrWrapper(r, entry.name + "_posix")
501
502 - def __get_windows_path(self):
503 """Return the path with \ as the path separator, 504 regardless of platform.""" 505 if OS_SEP == '\\': 506 return self 507 else: 508 entry = self.get() 509 r = entry.get_path().replace(OS_SEP, '\\') 510 return SCons.Subst.SpecialAttrWrapper(r, entry.name + "_windows")
511
512 - def __get_srcnode(self):
513 return EntryProxy(self.get().srcnode())
514
515 - def __get_srcdir(self):
516 """Returns the directory containing the source node linked to this 517 node via VariantDir(), or the directory of this node if not linked.""" 518 return EntryProxy(self.get().srcnode().dir)
519
520 - def __get_rsrcnode(self):
521 return EntryProxy(self.get().srcnode().rfile())
522
523 - def __get_rsrcdir(self):
524 """Returns the directory containing the source node linked to this 525 node via VariantDir(), or the directory of this node if not linked.""" 526 return EntryProxy(self.get().srcnode().rfile().dir)
527
528 - def __get_dir(self):
529 return EntryProxy(self.get().dir)
530 531 dictSpecialAttrs = { "base" : __get_base_path, 532 "posix" : __get_posix_path, 533 "windows" : __get_windows_path, 534 "win32" : __get_windows_path, 535 "srcpath" : __get_srcnode, 536 "srcdir" : __get_srcdir, 537 "dir" : __get_dir, 538 "abspath" : __get_abspath, 539 "filebase" : __get_filebase, 540 "suffix" : __get_suffix, 541 "file" : __get_file, 542 "rsrcpath" : __get_rsrcnode, 543 "rsrcdir" : __get_rsrcdir, 544 } 545
546 - def __getattr__(self, name):
547 # This is how we implement the "special" attributes 548 # such as base, posix, srcdir, etc. 549 try: 550 attr_function = self.dictSpecialAttrs[name] 551 except KeyError: 552 try: 553 attr = SCons.Util.Proxy.__getattr__(self, name) 554 except AttributeError, e: 555 # Raise our own AttributeError subclass with an 556 # overridden __str__() method that identifies the 557 # name of the entry that caused the exception. 558 raise EntryProxyAttributeError(self, name) 559 return attr 560 else: 561 return attr_function(self)
562
563 -class Base(SCons.Node.Node):
564 """A generic class for file system entries. This class is for 565 when we don't know yet whether the entry being looked up is a file 566 or a directory. Instances of this class can morph into either 567 Dir or File objects by a later, more precise lookup. 568 569 Note: this class does not define __cmp__ and __hash__ for 570 efficiency reasons. SCons does a lot of comparing of 571 Node.FS.{Base,Entry,File,Dir} objects, so those operations must be 572 as fast as possible, which means we want to use Python's built-in 573 object identity comparisons. 574 """ 575 576 memoizer_counters = [] 577
578 - def __init__(self, name, directory, fs):
579 """Initialize a generic Node.FS.Base object. 580 581 Call the superclass initialization, take care of setting up 582 our relative and absolute paths, identify our parent 583 directory, and indicate that this node should use 584 signatures.""" 585 if SCons.Debug.track_instances: logInstanceCreation(self, 'Node.FS.Base') 586 SCons.Node.Node.__init__(self) 587 588 # Filenames and paths are probably reused and are intern'ed to 589 # save some memory. 590 591 #: Filename with extension as it was specified when the object was 592 #: created; to obtain filesystem path, use Python str() function 593 self.name = SCons.Util.silent_intern(name) 594 #: Cached filename extension 595 self.suffix = SCons.Util.silent_intern(SCons.Util.splitext(name)[1]) 596 self.fs = fs #: Reference to parent Node.FS object 597 598 assert directory, "A directory must be provided" 599 600 self.abspath = SCons.Util.silent_intern(directory.entry_abspath(name)) 601 self.labspath = SCons.Util.silent_intern(directory.entry_labspath(name)) 602 if directory.path == '.': 603 self.path = SCons.Util.silent_intern(name) 604 else: 605 self.path = SCons.Util.silent_intern(directory.entry_path(name)) 606 if directory.tpath == '.': 607 self.tpath = SCons.Util.silent_intern(name) 608 else: 609 self.tpath = SCons.Util.silent_intern(directory.entry_tpath(name)) 610 self.path_elements = directory.path_elements + [self] 611 612 self.dir = directory 613 self.cwd = None # will hold the SConscript directory for target nodes 614 self.duplicate = directory.duplicate
615
616 - def str_for_display(self):
617 return '"' + self.__str__() + '"'
618
619 - def must_be_same(self, klass):
620 """ 621 This node, which already existed, is being looked up as the 622 specified klass. Raise an exception if it isn't. 623 """ 624 if isinstance(self, klass) or klass is Entry: 625 return 626 raise TypeError("Tried to lookup %s '%s' as a %s." %\ 627 (self.__class__.__name__, self.path, klass.__name__))
628
629 - def get_dir(self):
630 return self.dir
631
632 - def get_suffix(self):
633 return self.suffix
634
635 - def rfile(self):
636 return self
637
638 - def __str__(self):
639 """A Node.FS.Base object's string representation is its path 640 name.""" 641 global Save_Strings 642 if Save_Strings: 643 return self._save_str() 644 return self._get_str()
645 646 memoizer_counters.append(SCons.Memoize.CountValue('_save_str')) 647
648 - def _save_str(self):
649 try: 650 return self._memo['_save_str'] 651 except KeyError: 652 pass 653 result = sys.intern(self._get_str()) 654 self._memo['_save_str'] = result 655 return result
656
657 - def _get_str(self):
658 global Save_Strings 659 if self.duplicate or self.is_derived(): 660 return self.get_path() 661 srcnode = self.srcnode() 662 if srcnode.stat() is None and self.stat() is not None: 663 result = self.get_path() 664 else: 665 result = srcnode.get_path() 666 if not Save_Strings: 667 # We're not at the point where we're saving the string 668 # representations of FS Nodes (because we haven't finished 669 # reading the SConscript files and need to have str() return 670 # things relative to them). That also means we can't yet 671 # cache values returned (or not returned) by stat(), since 672 # Python code in the SConscript files might still create 673 # or otherwise affect the on-disk file. So get rid of the 674 # values that the underlying stat() method saved. 675 try: del self._memo['stat'] 676 except KeyError: pass 677 if self is not srcnode: 678 try: del srcnode._memo['stat'] 679 except KeyError: pass 680 return result
681 682 rstr = __str__ 683 684 memoizer_counters.append(SCons.Memoize.CountValue('stat')) 685
686 - def stat(self):
687 try: return self._memo['stat'] 688 except KeyError: pass 689 try: result = self.fs.stat(self.abspath) 690 except os.error: result = None 691 self._memo['stat'] = result 692 return result
693
694 - def exists(self):
695 return self.stat() is not None
696
697 - def rexists(self):
698 return self.rfile().exists()
699
700 - def getmtime(self):
701 st = self.stat() 702 if st: return st[stat.ST_MTIME] 703 else: return None
704
705 - def getsize(self):
706 st = self.stat() 707 if st: return st[stat.ST_SIZE] 708 else: return None
709
710 - def isdir(self):
711 st = self.stat() 712 return st is not None and stat.S_ISDIR(st[stat.ST_MODE])
713
714 - def isfile(self):
715 st = self.stat() 716 return st is not None and stat.S_ISREG(st[stat.ST_MODE])
717 718 if hasattr(os, 'symlink'): 723 else: 726
727 - def is_under(self, dir):
728 if self is dir: 729 return 1 730 else: 731 return self.dir.is_under(dir)
732
733 - def set_local(self):
734 self._local = 1
735
736 - def srcnode(self):
737 """If this node is in a build path, return the node 738 corresponding to its source file. Otherwise, return 739 ourself. 740 """ 741 srcdir_list = self.dir.srcdir_list() 742 if srcdir_list: 743 srcnode = srcdir_list[0].Entry(self.name) 744 srcnode.must_be_same(self.__class__) 745 return srcnode 746 return self
747
748 - def get_path(self, dir=None):
749 """Return path relative to the current working directory of the 750 Node.FS.Base object that owns us.""" 751 if not dir: 752 dir = self.fs.getcwd() 753 if self == dir: 754 return '.' 755 path_elems = self.path_elements 756 pathname = '' 757 try: i = path_elems.index(dir) 758 except ValueError: 759 for p in path_elems[:-1]: 760 pathname += p.dirname 761 else: 762 for p in path_elems[i+1:-1]: 763 pathname += p.dirname 764 return pathname + path_elems[-1].name
765
766 - def set_src_builder(self, builder):
767 """Set the source code builder for this node.""" 768 self.sbuilder = builder 769 if not self.has_builder(): 770 self.builder_set(builder)
771
772 - def src_builder(self):
773 """Fetch the source code builder for this node. 774 775 If there isn't one, we cache the source code builder specified 776 for the directory (which in turn will cache the value from its 777 parent directory, and so on up to the file system root). 778 """ 779 try: 780 scb = self.sbuilder 781 except AttributeError: 782 scb = self.dir.src_builder() 783 self.sbuilder = scb 784 return scb
785
786 - def get_abspath(self):
787 """Get the absolute path of the file.""" 788 return self.abspath
789
790 - def for_signature(self):
791 # Return just our name. Even an absolute path would not work, 792 # because that can change thanks to symlinks or remapped network 793 # paths. 794 return self.name
795
796 - def get_subst_proxy(self):
797 try: 798 return self._proxy 799 except AttributeError: 800 ret = EntryProxy(self) 801 self._proxy = ret 802 return ret
803
804 - def target_from_source(self, prefix, suffix, splitext=SCons.Util.splitext):
805 """ 806 807 Generates a target entry that corresponds to this entry (usually 808 a source file) with the specified prefix and suffix. 809 810 Note that this method can be overridden dynamically for generated 811 files that need different behavior. See Tool/swig.py for 812 an example. 813 """ 814 return self.dir.Entry(prefix + splitext(self.name)[0] + suffix)
815
816 - def _Rfindalldirs_key(self, pathlist):
817 return pathlist
818 819 memoizer_counters.append(SCons.Memoize.CountDict('Rfindalldirs', _Rfindalldirs_key)) 820
821 - def Rfindalldirs(self, pathlist):
822 """ 823 Return all of the directories for a given path list, including 824 corresponding "backing" directories in any repositories. 825 826 The Node lookups are relative to this Node (typically a 827 directory), so memoizing result saves cycles from looking 828 up the same path for each target in a given directory. 829 """ 830 try: 831 memo_dict = self._memo['Rfindalldirs'] 832 except KeyError: 833 memo_dict = {} 834 self._memo['Rfindalldirs'] = memo_dict 835 else: 836 try: 837 return memo_dict[pathlist] 838 except KeyError: 839 pass 840 841 create_dir_relative_to_self = self.Dir 842 result = [] 843 for path in pathlist: 844 if isinstance(path, SCons.Node.Node): 845 result.append(path) 846 else: 847 dir = create_dir_relative_to_self(path) 848 result.extend(dir.get_all_rdirs()) 849 850 memo_dict[pathlist] = result 851 852 return result
853
854 - def RDirs(self, pathlist):
855 """Search for a list of directories in the Repository list.""" 856 cwd = self.cwd or self.fs._cwd 857 return cwd.Rfindalldirs(pathlist)
858 859 memoizer_counters.append(SCons.Memoize.CountValue('rentry')) 860
861 - def rentry(self):
862 try: 863 return self._memo['rentry'] 864 except KeyError: 865 pass 866 result = self 867 if not self.exists(): 868 norm_name = _my_normcase(self.name) 869 for dir in self.dir.get_all_rdirs(): 870 try: 871 node = dir.entries[norm_name] 872 except KeyError: 873 if dir.entry_exists_on_disk(self.name): 874 result = dir.Entry(self.name) 875 break 876 self._memo['rentry'] = result 877 return result
878
879 - def _glob1(self, pattern, ondisk=True, source=False, strings=False):
880 return []
881
882 -class Entry(Base):
883 """This is the class for generic Node.FS entries--that is, things 884 that could be a File or a Dir, but we're just not sure yet. 885 Consequently, the methods in this class really exist just to 886 transform their associated object into the right class when the 887 time comes, and then call the same-named method in the transformed 888 class.""" 889
890 - def diskcheck_match(self):
891 pass
892
893 - def disambiguate(self, must_exist=None):
894 """ 895 """ 896 if self.isdir(): 897 self.__class__ = Dir 898 self._morph() 899 elif self.isfile(): 900 self.__class__ = File 901 self._morph() 902 self.clear() 903 else: 904 # There was nothing on-disk at this location, so look in 905 # the src directory. 906 # 907 # We can't just use self.srcnode() straight away because 908 # that would create an actual Node for this file in the src 909 # directory, and there might not be one. Instead, use the 910 # dir_on_disk() method to see if there's something on-disk 911 # with that name, in which case we can go ahead and call 912 # self.srcnode() to create the right type of entry. 913 srcdir = self.dir.srcnode() 914 if srcdir != self.dir and \ 915 srcdir.entry_exists_on_disk(self.name) and \ 916 self.srcnode().isdir(): 917 self.__class__ = Dir 918 self._morph() 919 elif must_exist: 920 msg = "No such file or directory: '%s'" % self.abspath 921 raise SCons.Errors.UserError(msg) 922 else: 923 self.__class__ = File 924 self._morph() 925 self.clear() 926 return self
927
928 - def rfile(self):
929 """We're a generic Entry, but the caller is actually looking for 930 a File at this point, so morph into one.""" 931 self.__class__ = File 932 self._morph() 933 self.clear() 934 return File.rfile(self)
935
936 - def scanner_key(self):
937 return self.get_suffix()
938
939 - def get_contents(self):
940 """Fetch the contents of the entry. Returns the exact binary 941 contents of the file.""" 942 try: 943 self = self.disambiguate(must_exist=1) 944 except SCons.Errors.UserError: 945 # There was nothing on disk with which to disambiguate 946 # this entry. Leave it as an Entry, but return a null 947 # string so calls to get_contents() in emitters and the 948 # like (e.g. in qt.py) don't have to disambiguate by hand 949 # or catch the exception. 950 return '' 951 else: 952 return self.get_contents()
953
954 - def get_text_contents(self):
955 """Fetch the decoded text contents of a Unicode encoded Entry. 956 957 Since this should return the text contents from the file 958 system, we check to see into what sort of subclass we should 959 morph this Entry.""" 960 try: 961 self = self.disambiguate(must_exist=1) 962 except SCons.Errors.UserError: 963 # There was nothing on disk with which to disambiguate 964 # this entry. Leave it as an Entry, but return a null 965 # string so calls to get_text_contents() in emitters and 966 # the like (e.g. in qt.py) don't have to disambiguate by 967 # hand or catch the exception. 968 return '' 969 else: 970 return self.get_text_contents()
971
972 - def must_be_same(self, klass):
973 """Called to make sure a Node is a Dir. Since we're an 974 Entry, we can morph into one.""" 975 if self.__class__ is not klass: 976 self.__class__ = klass 977 self._morph() 978 self.clear()
979 980 # The following methods can get called before the Taskmaster has 981 # had a chance to call disambiguate() directly to see if this Entry 982 # should really be a Dir or a File. We therefore use these to call 983 # disambiguate() transparently (from our caller's point of view). 984 # 985 # Right now, this minimal set of methods has been derived by just 986 # looking at some of the methods that will obviously be called early 987 # in any of the various Taskmasters' calling sequences, and then 988 # empirically figuring out which additional methods are necessary 989 # to make various tests pass. 990
991 - def exists(self):
992 """Return if the Entry exists. Check the file system to see 993 what we should turn into first. Assume a file if there's no 994 directory.""" 995 return self.disambiguate().exists()
996
997 - def rel_path(self, other):
998 d = self.disambiguate() 999 if d.__class__ is Entry: 1000 raise Exception("rel_path() could not disambiguate File/Dir") 1001 return d.rel_path(other)
1002
1003 - def new_ninfo(self):
1004 return self.disambiguate().new_ninfo()
1005
1006 - def changed_since_last_build(self, target, prev_ni):
1007 return self.disambiguate().changed_since_last_build(target, prev_ni)
1008
1009 - def _glob1(self, pattern, ondisk=True, source=False, strings=False):
1010 return self.disambiguate()._glob1(pattern, ondisk, source, strings)
1011
1012 - def get_subst_proxy(self):
1013 return self.disambiguate().get_subst_proxy()
1014 1015 # This is for later so we can differentiate between Entry the class and Entry 1016 # the method of the FS class. 1017 _classEntry = Entry 1018 1019
1020 -class LocalFS(object):
1021 1022 if SCons.Memoize.use_memoizer: 1023 __metaclass__ = SCons.Memoize.Memoized_Metaclass 1024 1025 # This class implements an abstraction layer for operations involving 1026 # a local file system. Essentially, this wraps any function in 1027 # the os, os.path or shutil modules that we use to actually go do 1028 # anything with or to the local file system. 1029 # 1030 # Note that there's a very good chance we'll refactor this part of 1031 # the architecture in some way as we really implement the interface(s) 1032 # for remote file system Nodes. For example, the right architecture 1033 # might be to have this be a subclass instead of a base class. 1034 # Nevertheless, we're using this as a first step in that direction. 1035 # 1036 # We're not using chdir() yet because the calling subclass method 1037 # needs to use os.chdir() directly to avoid recursion. Will we 1038 # really need this one? 1039 #def chdir(self, path): 1040 # return os.chdir(path)
1041 - def chmod(self, path, mode):
1042 return os.chmod(path, mode)
1043 - def copy(self, src, dst):
1044 return shutil.copy(src, dst)
1045 - def copy2(self, src, dst):
1046 return shutil.copy2(src, dst)
1047 - def exists(self, path):
1048 return os.path.exists(path)
1049 - def getmtime(self, path):
1050 return os.path.getmtime(path)
1051 - def getsize(self, path):
1052 return os.path.getsize(path)
1053 - def isdir(self, path):
1054 return os.path.isdir(path)
1055 - def isfile(self, path):
1056 return os.path.isfile(path)
1059 - def lstat(self, path):
1060 return os.lstat(path)
1061 - def listdir(self, path):
1062 return os.listdir(path)
1063 - def makedirs(self, path):
1064 return os.makedirs(path)
1065 - def mkdir(self, path):
1066 return os.mkdir(path)
1067 - def rename(self, old, new):
1068 return os.rename(old, new)
1069 - def stat(self, path):
1070 return os.stat(path)
1073 - def open(self, path):
1074 return open(path)
1077 1078 if hasattr(os, 'symlink'): 1081 else: 1084 1085 if hasattr(os, 'readlink'): 1088 else:
1091 1092 1093 #class RemoteFS: 1094 # # Skeleton for the obvious methods we might need from the 1095 # # abstraction layer for a remote filesystem. 1096 # def upload(self, local_src, remote_dst): 1097 # pass 1098 # def download(self, remote_src, local_dst): 1099 # pass 1100 1101
1102 -class FS(LocalFS):
1103 1104 memoizer_counters = [] 1105
1106 - def __init__(self, path = None):
1107 """Initialize the Node.FS subsystem. 1108 1109 The supplied path is the top of the source tree, where we 1110 expect to find the top-level build file. If no path is 1111 supplied, the current directory is the default. 1112 1113 The path argument must be a valid absolute path. 1114 """ 1115 if SCons.Debug.track_instances: logInstanceCreation(self, 'Node.FS') 1116 1117 self._memo = {} 1118 1119 self.Root = {} 1120 self.SConstruct_dir = None 1121 self.max_drift = default_max_drift 1122 1123 self.Top = None 1124 if path is None: 1125 self.pathTop = os.getcwd() 1126 else: 1127 self.pathTop = path 1128 self.defaultDrive = _my_normcase(_my_splitdrive(self.pathTop)[0]) 1129 1130 self.Top = self.Dir(self.pathTop) 1131 self.Top.path = '.' 1132 self.Top.tpath = '.' 1133 self._cwd = self.Top 1134 1135 DirNodeInfo.fs = self 1136 FileNodeInfo.fs = self
1137
1138 - def set_SConstruct_dir(self, dir):
1139 self.SConstruct_dir = dir
1140
1141 - def get_max_drift(self):
1142 return self.max_drift
1143
1144 - def set_max_drift(self, max_drift):
1145 self.max_drift = max_drift
1146
1147 - def getcwd(self):
1148 if hasattr(self, "_cwd"): 1149 return self._cwd 1150 else: 1151 return "<no cwd>"
1152
1153 - def chdir(self, dir, change_os_dir=0):
1154 """Change the current working directory for lookups. 1155 If change_os_dir is true, we will also change the "real" cwd 1156 to match. 1157 """ 1158 curr=self._cwd 1159 try: 1160 if dir is not None: 1161 self._cwd = dir 1162 if change_os_dir: 1163 os.chdir(dir.abspath) 1164 except OSError: 1165 self._cwd = curr 1166 raise
1167
1168 - def get_root(self, drive):
1169 """ 1170 Returns the root directory for the specified drive, creating 1171 it if necessary. 1172 """ 1173 drive = _my_normcase(drive) 1174 try: 1175 return self.Root[drive] 1176 except KeyError: 1177 root = RootDir(drive, self) 1178 self.Root[drive] = root 1179 if not drive: 1180 self.Root[self.defaultDrive] = root 1181 elif drive == self.defaultDrive: 1182 self.Root[''] = root 1183 return root
1184
1185 - def _lookup(self, p, directory, fsclass, create=1):
1186 """ 1187 The generic entry point for Node lookup with user-supplied data. 1188 1189 This translates arbitrary input into a canonical Node.FS object 1190 of the specified fsclass. The general approach for strings is 1191 to turn it into a fully normalized absolute path and then call 1192 the root directory's lookup_abs() method for the heavy lifting. 1193 1194 If the path name begins with '#', it is unconditionally 1195 interpreted relative to the top-level directory of this FS. '#' 1196 is treated as a synonym for the top-level SConstruct directory, 1197 much like '~' is treated as a synonym for the user's home 1198 directory in a UNIX shell. So both '#foo' and '#/foo' refer 1199 to the 'foo' subdirectory underneath the top-level SConstruct 1200 directory. 1201 1202 If the path name is relative, then the path is looked up relative 1203 to the specified directory, or the current directory (self._cwd, 1204 typically the SConscript directory) if the specified directory 1205 is None. 1206 """ 1207 if isinstance(p, Base): 1208 # It's already a Node.FS object. Make sure it's the right 1209 # class and return. 1210 p.must_be_same(fsclass) 1211 return p 1212 # str(p) in case it's something like a proxy object 1213 p = str(p) 1214 1215 if not os_sep_is_slash: 1216 p = p.replace(OS_SEP, '/') 1217 1218 if p[0:1] == '#': 1219 # There was an initial '#', so we strip it and override 1220 # whatever directory they may have specified with the 1221 # top-level SConstruct directory. 1222 p = p[1:] 1223 directory = self.Top 1224 1225 # There might be a drive letter following the 1226 # '#'. Although it is not described in the SCons man page, 1227 # the regression test suite explicitly tests for that 1228 # syntax. It seems to mean the following thing: 1229 # 1230 # Assuming the the SCons top dir is in C:/xxx/yyy, 1231 # '#X:/toto' means X:/xxx/yyy/toto. 1232 # 1233 # i.e. it assumes that the X: drive has a directory 1234 # structure similar to the one found on drive C:. 1235 if do_splitdrive: 1236 drive, p = _my_splitdrive(p) 1237 if drive: 1238 root = self.get_root(drive) 1239 else: 1240 root = directory.root 1241 else: 1242 root = directory.root 1243 1244 # We can only strip trailing after splitting the drive 1245 # since the drive might the UNC '//' prefix. 1246 p = p.strip('/') 1247 1248 needs_normpath = needs_normpath_match(p) 1249 1250 # The path is relative to the top-level SCons directory. 1251 if p in ('', '.'): 1252 p = directory.labspath 1253 else: 1254 p = directory.labspath + '/' + p 1255 else: 1256 if do_splitdrive: 1257 drive, p = _my_splitdrive(p) 1258 if drive and not p: 1259 # This causes a naked drive letter to be treated 1260 # as a synonym for the root directory on that 1261 # drive. 1262 p = '/' 1263 else: 1264 drive = '' 1265 1266 # We can only strip trailing '/' since the drive might the 1267 # UNC '//' prefix. 1268 if p != '/': 1269 p = p.rstrip('/') 1270 1271 needs_normpath = needs_normpath_match(p) 1272 1273 if p[0:1] == '/': 1274 # Absolute path 1275 root = self.get_root(drive) 1276 else: 1277 # This is a relative lookup or to the current directory 1278 # (the path name is not absolute). Add the string to the 1279 # appropriate directory lookup path, after which the whole 1280 # thing gets normalized. 1281 if directory: 1282 if not isinstance(directory, Dir): 1283 directory = self.Dir(directory) 1284 else: 1285 directory = self._cwd 1286 1287 if p in ('', '.'): 1288 p = directory.labspath 1289 else: 1290 p = directory.labspath + '/' + p 1291 1292 if drive: 1293 root = self.get_root(drive) 1294 else: 1295 root = directory.root 1296 1297 if needs_normpath is not None: 1298 # Normalize a pathname. Will return the same result for 1299 # equivalent paths. 1300 # 1301 # We take advantage of the fact that we have an absolute 1302 # path here for sure. In addition, we know that the 1303 # components of lookup path are separated by slashes at 1304 # this point. Because of this, this code is about 2X 1305 # faster than calling os.path.normpath() followed by 1306 # replacing os.sep with '/' again. 1307 ins = p.split('/')[1:] 1308 outs = [] 1309 for d in ins: 1310 if d == '..': 1311 try: 1312 outs.pop() 1313 except IndexError: 1314 pass 1315 elif d not in ('', '.'): 1316 outs.append(d) 1317 p = '/' + '/'.join(outs) 1318 1319 return root._lookup_abs(p, fsclass, create)
1320
1321 - def Entry(self, name, directory = None, create = 1):
1322 """Look up or create a generic Entry node with the specified name. 1323 If the name is a relative path (begins with ./, ../, or a file 1324 name), then it is looked up relative to the supplied directory 1325 node, or to the top level directory of the FS (supplied at 1326 construction time) if no directory is supplied. 1327 """ 1328 return self._lookup(name, directory, Entry, create)
1329
1330 - def File(self, name, directory = None, create = 1):
1331 """Look up or create a File node with the specified name. If 1332 the name is a relative path (begins with ./, ../, or a file name), 1333 then it is looked up relative to the supplied directory node, 1334 or to the top level directory of the FS (supplied at construction 1335 time) if no directory is supplied. 1336 1337 This method will raise TypeError if a directory is found at the 1338 specified path. 1339 """ 1340 return self._lookup(name, directory, File, create)
1341
1342 - def Dir(self, name, directory = None, create = True):
1343 """Look up or create a Dir node with the specified name. If 1344 the name is a relative path (begins with ./, ../, or a file name), 1345 then it is looked up relative to the supplied directory node, 1346 or to the top level directory of the FS (supplied at construction 1347 time) if no directory is supplied. 1348 1349 This method will raise TypeError if a normal file is found at the 1350 specified path. 1351 """ 1352 return self._lookup(name, directory, Dir, create)
1353
1354 - def VariantDir(self, variant_dir, src_dir, duplicate=1):
1355 """Link the supplied variant directory to the source directory 1356 for purposes of building files.""" 1357 1358 if not isinstance(src_dir, SCons.Node.Node): 1359 src_dir = self.Dir(src_dir) 1360 if not isinstance(variant_dir, SCons.Node.Node): 1361 variant_dir = self.Dir(variant_dir) 1362 if src_dir.is_under(variant_dir): 1363 raise SCons.Errors.UserError("Source directory cannot be under variant directory.") 1364 if variant_dir.srcdir: 1365 if variant_dir.srcdir == src_dir: 1366 return # We already did this. 1367 raise SCons.Errors.UserError("'%s' already has a source directory: '%s'."%(variant_dir, variant_dir.srcdir)) 1368 variant_dir.link(src_dir, duplicate)
1369
1370 - def Repository(self, *dirs):
1371 """Specify Repository directories to search.""" 1372 for d in dirs: 1373 if not isinstance(d, SCons.Node.Node): 1374 d = self.Dir(d) 1375 self.Top.addRepository(d)
1376
1377 - def variant_dir_target_climb(self, orig, dir, tail):
1378 """Create targets in corresponding variant directories 1379 1380 Climb the directory tree, and look up path names 1381 relative to any linked variant directories we find. 1382 1383 Even though this loops and walks up the tree, we don't memoize 1384 the return value because this is really only used to process 1385 the command-line targets. 1386 """ 1387 targets = [] 1388 message = None 1389 fmt = "building associated VariantDir targets: %s" 1390 start_dir = dir 1391 while dir: 1392 for bd in dir.variant_dirs: 1393 if start_dir.is_under(bd): 1394 # If already in the build-dir location, don't reflect 1395 return [orig], fmt % str(orig) 1396 p = os.path.join(bd.path, *tail) 1397 targets.append(self.Entry(p)) 1398 tail = [dir.name] + tail 1399 dir = dir.up() 1400 if targets: 1401 message = fmt % ' '.join(map(str, targets)) 1402 return targets, message
1403
1404 - def Glob(self, pathname, ondisk=True, source=True, strings=False, exclude=None, cwd=None):
1405 """ 1406 Globs 1407 1408 This is mainly a shim layer 1409 """ 1410 if cwd is None: 1411 cwd = self.getcwd() 1412 return cwd.glob(pathname, ondisk, source, strings, exclude)
1413
1414 -class DirNodeInfo(SCons.Node.NodeInfoBase):
1415 # This should get reset by the FS initialization. 1416 current_version_id = 1 1417 1418 fs = None 1419
1420 - def str_to_node(self, s):
1421 top = self.fs.Top 1422 root = top.root 1423 if do_splitdrive: 1424 drive, s = _my_splitdrive(s) 1425 if drive: 1426 root = self.fs.get_root(drive) 1427 if not os.path.isabs(s): 1428 s = top.labspath + '/' + s 1429 return root._lookup_abs(s, Entry)
1430
1431 -class DirBuildInfo(SCons.Node.BuildInfoBase):
1432 current_version_id = 1
1433 1434 glob_magic_check = re.compile('[*?[]') 1435
1436 -def has_glob_magic(s):
1437 return glob_magic_check.search(s) is not None
1438
1439 -class Dir(Base):
1440 """A class for directories in a file system. 1441 """ 1442 1443 memoizer_counters = [] 1444 1445 NodeInfo = DirNodeInfo 1446 BuildInfo = DirBuildInfo 1447
1448 - def __init__(self, name, directory, fs):
1449 if SCons.Debug.track_instances: logInstanceCreation(self, 'Node.FS.Dir') 1450 Base.__init__(self, name, directory, fs) 1451 self._morph()
1452
1453 - def _morph(self):
1454 """Turn a file system Node (either a freshly initialized directory 1455 object or a separate Entry object) into a proper directory object. 1456 1457 Set up this directory's entries and hook it into the file 1458 system tree. Specify that directories (this Node) don't use 1459 signatures for calculating whether they're current. 1460 """ 1461 1462 self.repositories = [] 1463 self.srcdir = None 1464 1465 self.entries = {} 1466 self.entries['.'] = self 1467 self.entries['..'] = self.dir 1468 self.cwd = self 1469 self.searched = 0 1470 self._sconsign = None 1471 self.variant_dirs = [] 1472 self.root = self.dir.root 1473 1474 # For directories, we make a difference between the directory 1475 # 'name' and the directory 'dirname'. The 'name' attribute is 1476 # used when we need to print the 'name' of the directory or 1477 # when we it is used as the last part of a path. The 'dirname' 1478 # is used when the directory is not the last element of the 1479 # path. The main reason for making that distinction is that 1480 # for RoorDir's the dirname can not be easily inferred from 1481 # the name. For example, we have to add a '/' after a drive 1482 # letter but not after a UNC path prefix ('//'). 1483 self.dirname = self.name + OS_SEP 1484 1485 # Don't just reset the executor, replace its action list, 1486 # because it might have some pre-or post-actions that need to 1487 # be preserved. 1488 # 1489 # But don't reset the executor if there is a non-null executor 1490 # attached already. The existing executor might have other 1491 # targets, in which case replacing the action list with a 1492 # Mkdir action is a big mistake. 1493 if not hasattr(self, 'executor'): 1494 self.builder = get_MkdirBuilder() 1495 self.get_executor().set_action_list(self.builder.action) 1496 else: 1497 # Prepend MkdirBuilder action to existing action list 1498 l = self.get_executor().action_list 1499 a = get_MkdirBuilder().action 1500 l.insert(0, a) 1501 self.get_executor().set_action_list(l)
1502
1503 - def diskcheck_match(self):
1504 diskcheck_match(self, self.isfile, 1505 "File %s found where directory expected.")
1506
1507 - def __clearRepositoryCache(self, duplicate=None):
1508 """Called when we change the repository(ies) for a directory. 1509 This clears any cached information that is invalidated by changing 1510 the repository.""" 1511 1512 for node in self.entries.values(): 1513 if node != self.dir: 1514 if node != self and isinstance(node, Dir): 1515 node.__clearRepositoryCache(duplicate) 1516 else: 1517 node.clear() 1518 try: 1519 del node._srcreps 1520 except AttributeError: 1521 pass 1522 if duplicate is not None: 1523 node.duplicate=duplicate
1524
1525 - def __resetDuplicate(self, node):
1526 if node != self: 1527 node.duplicate = node.get_dir().duplicate
1528
1529 - def Entry(self, name):
1530 """ 1531 Looks up or creates an entry node named 'name' relative to 1532 this directory. 1533 """ 1534 return self.fs.Entry(name, self)
1535
1536 - def Dir(self, name, create=True):
1537 """ 1538 Looks up or creates a directory node named 'name' relative to 1539 this directory. 1540 """ 1541 return self.fs.Dir(name, self, create)
1542
1543 - def File(self, name):
1544 """ 1545 Looks up or creates a file node named 'name' relative to 1546 this directory. 1547 """ 1548 return self.fs.File(name, self)
1549 1557
1558 - def getRepositories(self):
1559 """Returns a list of repositories for this directory. 1560 """ 1561 if self.srcdir and not self.duplicate: 1562 return self.srcdir.get_all_rdirs() + self.repositories 1563 return self.repositories
1564 1565 memoizer_counters.append(SCons.Memoize.CountValue('get_all_rdirs')) 1566
1567 - def get_all_rdirs(self):
1568 try: 1569 return list(self._memo['get_all_rdirs']) 1570 except KeyError: 1571 pass 1572 1573 result = [self] 1574 fname = '.' 1575 dir = self 1576 while dir: 1577 for rep in dir.getRepositories(): 1578 result.append(rep.Dir(fname)) 1579 if fname == '.': 1580 fname = dir.name 1581 else: 1582 fname = dir.name + OS_SEP + fname 1583 dir = dir.up() 1584 1585 self._memo['get_all_rdirs'] = list(result) 1586 1587 return result
1588
1589 - def addRepository(self, dir):
1590 if dir != self and not dir in self.repositories: 1591 self.repositories.append(dir) 1592 dir.tpath = '.' 1593 self.__clearRepositoryCache()
1594
1595 - def up(self):
1596 return self.dir
1597
1598 - def _rel_path_key(self, other):
1599 return str(other)
1600 1601 memoizer_counters.append(SCons.Memoize.CountDict('rel_path', _rel_path_key)) 1602
1603 - def rel_path(self, other):
1604 """Return a path to "other" relative to this directory. 1605 """ 1606 1607 # This complicated and expensive method, which constructs relative 1608 # paths between arbitrary Node.FS objects, is no longer used 1609 # by SCons itself. It was introduced to store dependency paths 1610 # in .sconsign files relative to the target, but that ended up 1611 # being significantly inefficient. 1612 # 1613 # We're continuing to support the method because some SConstruct 1614 # files out there started using it when it was available, and 1615 # we're all about backwards compatibility.. 1616 1617 try: 1618 memo_dict = self._memo['rel_path'] 1619 except KeyError: 1620 memo_dict = {} 1621 self._memo['rel_path'] = memo_dict 1622 else: 1623 try: 1624 return memo_dict[other] 1625 except KeyError: 1626 pass 1627 1628 if self is other: 1629 result = '.' 1630 1631 elif not other in self.path_elements: 1632 try: 1633 other_dir = other.get_dir() 1634 except AttributeError: 1635 result = str(other) 1636 else: 1637 if other_dir is None: 1638 result = other.name 1639 else: 1640 dir_rel_path = self.rel_path(other_dir) 1641 if dir_rel_path == '.': 1642 result = other.name 1643 else: 1644 result = dir_rel_path + OS_SEP + other.name 1645 else: 1646 i = self.path_elements.index(other) + 1 1647 1648 path_elems = ['..'] * (len(self.path_elements) - i) \ 1649 + [n.name for n in other.path_elements[i:]] 1650 1651 result = OS_SEP.join(path_elems) 1652 1653 memo_dict[other] = result 1654 1655 return result
1656
1657 - def get_env_scanner(self, env, kw={}):
1658 import SCons.Defaults 1659 return SCons.Defaults.DirEntryScanner
1660
1661 - def get_target_scanner(self):
1662 import SCons.Defaults 1663 return SCons.Defaults.DirEntryScanner
1664
1665 - def get_found_includes(self, env, scanner, path):
1666 """Return this directory's implicit dependencies. 1667 1668 We don't bother caching the results because the scan typically 1669 shouldn't be requested more than once (as opposed to scanning 1670 .h file contents, which can be requested as many times as the 1671 files is #included by other files). 1672 """ 1673 if not scanner: 1674 return [] 1675 # Clear cached info for this Dir. If we already visited this 1676 # directory on our walk down the tree (because we didn't know at 1677 # that point it was being used as the source for another Node) 1678 # then we may have calculated build signature before realizing 1679 # we had to scan the disk. Now that we have to, though, we need 1680 # to invalidate the old calculated signature so that any node 1681 # dependent on our directory structure gets one that includes 1682 # info about everything on disk. 1683 self.clear() 1684 return scanner(self, env, path)
1685 1686 # 1687 # Taskmaster interface subsystem 1688 # 1689
1690 - def prepare(self):
1691 pass
1692
1693 - def build(self, **kw):
1694 """A null "builder" for directories.""" 1695 global MkdirBuilder 1696 if self.builder is not MkdirBuilder: 1697 SCons.Node.Node.build(self, **kw)
1698 1699 # 1700 # 1701 # 1702
1703 - def _create(self):
1704 """Create this directory, silently and without worrying about 1705 whether the builder is the default or not.""" 1706 listDirs = [] 1707 parent = self 1708 while parent: 1709 if parent.exists(): 1710 break 1711 listDirs.append(parent) 1712 p = parent.up() 1713 if p is None: 1714 # Don't use while: - else: for this condition because 1715 # if so, then parent is None and has no .path attribute. 1716 raise SCons.Errors.StopError(parent.path) 1717 parent = p 1718 listDirs.reverse() 1719 for dirnode in listDirs: 1720 try: 1721 # Don't call dirnode.build(), call the base Node method 1722 # directly because we definitely *must* create this 1723 # directory. The dirnode.build() method will suppress 1724 # the build if it's the default builder. 1725 SCons.Node.Node.build(dirnode) 1726 dirnode.get_executor().nullify() 1727 # The build() action may or may not have actually 1728 # created the directory, depending on whether the -n 1729 # option was used or not. Delete the _exists and 1730 # _rexists attributes so they can be reevaluated. 1731 dirnode.clear() 1732 except OSError: 1733 pass
1734
1736 global MkdirBuilder 1737 return self.builder is not MkdirBuilder and self.has_builder()
1738
1739 - def alter_targets(self):
1740 """Return any corresponding targets in a variant directory. 1741 """ 1742 return self.fs.variant_dir_target_climb(self, self, [])
1743
1744 - def scanner_key(self):
1745 """A directory does not get scanned.""" 1746 return None
1747
1748 - def get_text_contents(self):
1749 """We already emit things in text, so just return the binary 1750 version.""" 1751 return self.get_contents()
1752
1753 - def get_contents(self):
1754 """Return content signatures and names of all our children 1755 separated by new-lines. Ensure that the nodes are sorted.""" 1756 contents = [] 1757 for node in sorted(self.children(), key=lambda t: t.name): 1758 contents.append('%s %s\n' % (node.get_csig(), node.name)) 1759 return ''.join(contents)
1760
1761 - def get_csig(self):
1762 """Compute the content signature for Directory nodes. In 1763 general, this is not needed and the content signature is not 1764 stored in the DirNodeInfo. However, if get_contents on a Dir 1765 node is called which has a child directory, the child 1766 directory should return the hash of its contents.""" 1767 contents = self.get_contents() 1768 return SCons.Util.MD5signature(contents)
1769
1770 - def do_duplicate(self, src):
1771 pass
1772 1773 changed_since_last_build = SCons.Node.Node.state_has_changed 1774
1775 - def is_up_to_date(self):
1776 """If any child is not up-to-date, then this directory isn't, 1777 either.""" 1778 if self.builder is not MkdirBuilder and not self.exists(): 1779 return 0 1780 up_to_date = SCons.Node.up_to_date 1781 for kid in self.children(): 1782 if kid.get_state() > up_to_date: 1783 return 0 1784 return 1
1785
1786 - def rdir(self):
1787 if not self.exists(): 1788 norm_name = _my_normcase(self.name) 1789 for dir in self.dir.get_all_rdirs(): 1790 try: node = dir.entries[norm_name] 1791 except KeyError: node = dir.dir_on_disk(self.name) 1792 if node and node.exists() and \ 1793 (isinstance(dir, Dir) or isinstance(dir, Entry)): 1794 return node 1795 return self
1796
1797 - def sconsign(self):
1798 """Return the .sconsign file info for this directory, 1799 creating it first if necessary.""" 1800 if not self._sconsign: 1801 import SCons.SConsign 1802 self._sconsign = SCons.SConsign.ForDirectory(self) 1803 return self._sconsign
1804
1805 - def srcnode(self):
1806 """Dir has a special need for srcnode()...if we 1807 have a srcdir attribute set, then that *is* our srcnode.""" 1808 if self.srcdir: 1809 return self.srcdir 1810 return Base.srcnode(self)
1811
1812 - def get_timestamp(self):
1813 """Return the latest timestamp from among our children""" 1814 stamp = 0 1815 for kid in self.children(): 1816 if kid.get_timestamp() > stamp: 1817 stamp = kid.get_timestamp() 1818 return stamp
1819
1820 - def entry_abspath(self, name):
1821 return self.abspath + OS_SEP + name
1822
1823 - def entry_labspath(self, name):
1824 return self.labspath + '/' + name
1825
1826 - def entry_path(self, name):
1827 return self.path + OS_SEP + name
1828
1829 - def entry_tpath(self, name):
1830 return self.tpath + OS_SEP + name
1831
1832 - def entry_exists_on_disk(self, name):
1833 """ Searches through the file/dir entries of the current 1834 directory, and returns True if a physical entry with the given 1835 name could be found. 1836 1837 @see rentry_exists_on_disk 1838 """ 1839 try: 1840 d = self.on_disk_entries 1841 except AttributeError: 1842 d = {} 1843 try: 1844 entries = os.listdir(self.abspath) 1845 except OSError: 1846 pass 1847 else: 1848 for entry in map(_my_normcase, entries): 1849 d[entry] = True 1850 self.on_disk_entries = d 1851 if sys.platform == 'win32' or sys.platform == 'cygwin': 1852 name = _my_normcase(name) 1853 result = d.get(name) 1854 if result is None: 1855 # Belt-and-suspenders for Windows: check directly for 1856 # 8.3 file names that don't show up in os.listdir(). 1857 result = os.path.exists(self.abspath + OS_SEP + name) 1858 d[name] = result 1859 return result 1860 else: 1861 return name in d
1862
1863 - def rentry_exists_on_disk(self, name):
1864 """ Searches through the file/dir entries of the current 1865 *and* all its remote directories (repos), and returns 1866 True if a physical entry with the given name could be found. 1867 The local directory (self) gets searched first, so 1868 repositories take a lower precedence regarding the 1869 searching order. 1870 1871 @see entry_exists_on_disk 1872 """ 1873 1874 rentry_exists = self.entry_exists_on_disk(name) 1875 if not rentry_exists: 1876 # Search through the repository folders 1877 norm_name = _my_normcase(name) 1878 for rdir in self.get_all_rdirs(): 1879 try: 1880 node = rdir.entries[norm_name] 1881 if node: 1882 rentry_exists = True 1883 break 1884 except KeyError: 1885 if rdir.entry_exists_on_disk(name): 1886 rentry_exists = True 1887 break 1888 return rentry_exists
1889 1890 memoizer_counters.append(SCons.Memoize.CountValue('srcdir_list')) 1891
1892 - def srcdir_list(self):
1893 try: 1894 return self._memo['srcdir_list'] 1895 except KeyError: 1896 pass 1897 1898 result = [] 1899 1900 dirname = '.' 1901 dir = self 1902 while dir: 1903 if dir.srcdir: 1904 result.append(dir.srcdir.Dir(dirname)) 1905 dirname = dir.name + OS_SEP + dirname 1906 dir = dir.up() 1907 1908 self._memo['srcdir_list'] = result 1909 1910 return result
1911
1912 - def srcdir_duplicate(self, name):
1913 for dir in self.srcdir_list(): 1914 if self.is_under(dir): 1915 # We shouldn't source from something in the build path; 1916 # variant_dir is probably under src_dir, in which case 1917 # we are reflecting. 1918 break 1919 if dir.entry_exists_on_disk(name): 1920 srcnode = dir.Entry(name).disambiguate() 1921 if self.duplicate: 1922 node = self.Entry(name).disambiguate() 1923 node.do_duplicate(srcnode) 1924 return node 1925 else: 1926 return srcnode 1927 return None
1928
1929 - def _srcdir_find_file_key(self, filename):
1930 return filename
1931 1932 memoizer_counters.append(SCons.Memoize.CountDict('srcdir_find_file', _srcdir_find_file_key)) 1933
1934 - def srcdir_find_file(self, filename):
1935 try: 1936 memo_dict = self._memo['srcdir_find_file'] 1937 except KeyError: 1938 memo_dict = {} 1939 self._memo['srcdir_find_file'] = memo_dict 1940 else: 1941 try: 1942 return memo_dict[filename] 1943 except KeyError: 1944 pass 1945 1946 def func(node): 1947 if (isinstance(node, File) or isinstance(node, Entry)) and \ 1948 (node.is_derived() or node.exists()): 1949 return node 1950 return None
1951 1952 norm_name = _my_normcase(filename) 1953 1954 for rdir in self.get_all_rdirs(): 1955 try: node = rdir.entries[norm_name] 1956 except KeyError: node = rdir.file_on_disk(filename) 1957 else: node = func(node) 1958 if node: 1959 result = (node, self) 1960 memo_dict[filename] = result 1961 return result 1962 1963 for srcdir in self.srcdir_list(): 1964 for rdir in srcdir.get_all_rdirs(): 1965 try: node = rdir.entries[norm_name] 1966 except KeyError: node = rdir.file_on_disk(filename) 1967 else: node = func(node) 1968 if node: 1969 result = (File(filename, self, self.fs), srcdir) 1970 memo_dict[filename] = result 1971 return result 1972 1973 result = (None, None) 1974 memo_dict[filename] = result 1975 return result
1976
1977 - def dir_on_disk(self, name):
1978 if self.entry_exists_on_disk(name): 1979 try: return self.Dir(name) 1980 except TypeError: pass 1981 node = self.srcdir_duplicate(name) 1982 if isinstance(node, File): 1983 return None 1984 return node
1985
1986 - def file_on_disk(self, name):
1987 if self.entry_exists_on_disk(name) or \ 1988 diskcheck_rcs(self, name) or \ 1989 diskcheck_sccs(self, name): 1990 try: return self.File(name) 1991 except TypeError: pass 1992 node = self.srcdir_duplicate(name) 1993 if isinstance(node, Dir): 1994 return None 1995 return node
1996
1997 - def walk(self, func, arg):
1998 """ 1999 Walk this directory tree by calling the specified function 2000 for each directory in the tree. 2001 2002 This behaves like the os.path.walk() function, but for in-memory 2003 Node.FS.Dir objects. The function takes the same arguments as 2004 the functions passed to os.path.walk(): 2005 2006 func(arg, dirname, fnames) 2007 2008 Except that "dirname" will actually be the directory *Node*, 2009 not the string. The '.' and '..' entries are excluded from 2010 fnames. The fnames list may be modified in-place to filter the 2011 subdirectories visited or otherwise impose a specific order. 2012 The "arg" argument is always passed to func() and may be used 2013 in any way (or ignored, passing None is common). 2014 """ 2015 entries = self.entries 2016 names = list(entries.keys()) 2017 names.remove('.') 2018 names.remove('..') 2019 func(arg, self, names) 2020 for dirname in [n for n in names if isinstance(entries[n], Dir)]: 2021 entries[dirname].walk(func, arg)
2022
2023 - def glob(self, pathname, ondisk=True, source=False, strings=False, exclude=None):
2024 """ 2025 Returns a list of Nodes (or strings) matching a specified 2026 pathname pattern. 2027 2028 Pathname patterns follow UNIX shell semantics: * matches 2029 any-length strings of any characters, ? matches any character, 2030 and [] can enclose lists or ranges of characters. Matches do 2031 not span directory separators. 2032 2033 The matches take into account Repositories, returning local 2034 Nodes if a corresponding entry exists in a Repository (either 2035 an in-memory Node or something on disk). 2036 2037 By defafult, the glob() function matches entries that exist 2038 on-disk, in addition to in-memory Nodes. Setting the "ondisk" 2039 argument to False (or some other non-true value) causes the glob() 2040 function to only match in-memory Nodes. The default behavior is 2041 to return both the on-disk and in-memory Nodes. 2042 2043 The "source" argument, when true, specifies that corresponding 2044 source Nodes must be returned if you're globbing in a build 2045 directory (initialized with VariantDir()). The default behavior 2046 is to return Nodes local to the VariantDir(). 2047 2048 The "strings" argument, when true, returns the matches as strings, 2049 not Nodes. The strings are path names relative to this directory. 2050 2051 The "exclude" argument, if not None, must be a pattern or a list 2052 of patterns following the same UNIX shell semantics. 2053 Elements matching a least one pattern of this list will be excluded 2054 from the result. 2055 2056 The underlying algorithm is adapted from the glob.glob() function 2057 in the Python library (but heavily modified), and uses fnmatch() 2058 under the covers. 2059 """ 2060 dirname, basename = os.path.split(pathname) 2061 if not dirname: 2062 result = self._glob1(basename, ondisk, source, strings) 2063 else: 2064 if has_glob_magic(dirname): 2065 list = self.glob(dirname, ondisk, source, False, exclude) 2066 else: 2067 list = [self.Dir(dirname, create=True)] 2068 result = [] 2069 for dir in list: 2070 r = dir._glob1(basename, ondisk, source, strings) 2071 if strings: 2072 r = [os.path.join(str(dir), x) for x in r] 2073 result.extend(r) 2074 if exclude: 2075 result = filter(lambda x: not any(fnmatch.fnmatch(str(x), e) for e in SCons.Util.flatten(exclude)), result) 2076 return sorted(result, key=lambda a: str(a))
2077
2078 - def _glob1(self, pattern, ondisk=True, source=False, strings=False):
2079 """ 2080 Globs for and returns a list of entry names matching a single 2081 pattern in this directory. 2082 2083 This searches any repositories and source directories for 2084 corresponding entries and returns a Node (or string) relative 2085 to the current directory if an entry is found anywhere. 2086 2087 TODO: handle pattern with no wildcard 2088 """ 2089 search_dir_list = self.get_all_rdirs() 2090 for srcdir in self.srcdir_list(): 2091 search_dir_list.extend(srcdir.get_all_rdirs()) 2092 2093 selfEntry = self.Entry 2094 names = [] 2095 for dir in search_dir_list: 2096 # We use the .name attribute from the Node because the keys of 2097 # the dir.entries dictionary are normalized (that is, all upper 2098 # case) on case-insensitive systems like Windows. 2099 node_names = [ v.name for k, v in dir.entries.items() 2100 if k not in ('.', '..') ] 2101 names.extend(node_names) 2102 if not strings: 2103 # Make sure the working directory (self) actually has 2104 # entries for all Nodes in repositories or variant dirs. 2105 for name in node_names: selfEntry(name) 2106 if ondisk: 2107 try: 2108 disk_names = os.listdir(dir.abspath) 2109 except os.error: 2110 continue 2111 names.extend(disk_names) 2112 if not strings: 2113 # We're going to return corresponding Nodes in 2114 # the local directory, so we need to make sure 2115 # those Nodes exist. We only want to create 2116 # Nodes for the entries that will match the 2117 # specified pattern, though, which means we 2118 # need to filter the list here, even though 2119 # the overall list will also be filtered later, 2120 # after we exit this loop. 2121 if pattern[0] != '.': 2122 #disk_names = [ d for d in disk_names if d[0] != '.' ] 2123 disk_names = [x for x in disk_names if x[0] != '.'] 2124 disk_names = fnmatch.filter(disk_names, pattern) 2125 dirEntry = dir.Entry 2126 for name in disk_names: 2127 # Add './' before disk filename so that '#' at 2128 # beginning of filename isn't interpreted. 2129 name = './' + name 2130 node = dirEntry(name).disambiguate() 2131 n = selfEntry(name) 2132 if n.__class__ != node.__class__: 2133 n.__class__ = node.__class__ 2134 n._morph() 2135 2136 names = set(names) 2137 if pattern[0] != '.': 2138 names = [x for x in names if x[0] != '.'] 2139 names = fnmatch.filter(names, pattern) 2140 2141 if strings: 2142 return names 2143 2144 return [self.entries[_my_normcase(n)] for n in names]
2145
2146 -class RootDir(Dir):
2147 """A class for the root directory of a file system. 2148 2149 This is the same as a Dir class, except that the path separator 2150 ('/' or '\\') is actually part of the name, so we don't need to 2151 add a separator when creating the path names of entries within 2152 this directory. 2153 """
2154 - def __init__(self, drive, fs):
2155 if SCons.Debug.track_instances: logInstanceCreation(self, 'Node.FS.RootDir') 2156 # We're going to be our own parent directory (".." entry and .dir 2157 # attribute) so we have to set up some values so Base.__init__() 2158 # won't gag won't it calls some of our methods. 2159 self.abspath = '' 2160 self.labspath = '' 2161 self.path = '' 2162 self.tpath = '' 2163 self.path_elements = [] 2164 self.duplicate = 0 2165 self.root = self 2166 2167 # Handle all the types of drives: 2168 if drive == '': 2169 # No drive, regular UNIX root or Windows default drive. 2170 name = OS_SEP 2171 dirname = OS_SEP 2172 elif drive == '//': 2173 # UNC path 2174 name = UNC_PREFIX 2175 dirname = UNC_PREFIX 2176 else: 2177 # Windows drive letter 2178 name = drive 2179 dirname = drive + OS_SEP 2180 2181 Base.__init__(self, name, self, fs) 2182 2183 # Now set our paths to what we really want them to be. The 2184 # name should already contain any necessary separators, such 2185 # as the initial drive letter (the name) plus the directory 2186 # separator, except for the "lookup abspath," which does not 2187 # have the drive letter. 2188 self.abspath = dirname 2189 self.labspath = '' 2190 self.path = dirname 2191 self.tpath = dirname 2192 self._morph() 2193 2194 # Must be reset after Dir._morph() is invoked... 2195 self.dirname = dirname 2196 2197 self._lookupDict = {} 2198 2199 self._lookupDict[''] = self 2200 self._lookupDict['/'] = self 2201 2202 # The // entry is necessary because os.path.normpath() 2203 # preserves double slashes at the beginning of a path on Posix 2204 # platforms. 2205 if not has_unc: 2206 self._lookupDict['//'] = self
2207
2208 - def must_be_same(self, klass):
2209 if klass is Dir: 2210 return 2211 Base.must_be_same(self, klass)
2212
2213 - def _lookup_abs(self, p, klass, create=1):
2214 """ 2215 Fast (?) lookup of a *normalized* absolute path. 2216 2217 This method is intended for use by internal lookups with 2218 already-normalized path data. For general-purpose lookups, 2219 use the FS.Entry(), FS.Dir() or FS.File() methods. 2220 2221 The caller is responsible for making sure we're passed a 2222 normalized absolute path; we merely let Python's dictionary look 2223 up and return the One True Node.FS object for the path. 2224 2225 If a Node for the specified "p" doesn't already exist, and 2226 "create" is specified, the Node may be created after recursive 2227 invocation to find or create the parent directory or directories. 2228 """ 2229 k = _my_normcase(p) 2230 try: 2231 result = self._lookupDict[k] 2232 except KeyError: 2233 if not create: 2234 msg = "No such file or directory: '%s' in '%s' (and create is False)" % (p, str(self)) 2235 raise SCons.Errors.UserError(msg) 2236 # There is no Node for this path name, and we're allowed 2237 # to create it. 2238 dir_name, file_name = p.rsplit('/',1) 2239 dir_node = self._lookup_abs(dir_name, Dir) 2240 result = klass(file_name, dir_node, self.fs) 2241 2242 # Double-check on disk (as configured) that the Node we 2243 # created matches whatever is out there in the real world. 2244 result.diskcheck_match() 2245 2246 self._lookupDict[k] = result 2247 dir_node.entries[_my_normcase(file_name)] = result 2248 dir_node.implicit = None 2249 else: 2250 # There is already a Node for this path name. Allow it to 2251 # complain if we were looking for an inappropriate type. 2252 result.must_be_same(klass) 2253 return result
2254
2255 - def __str__(self):
2256 return self.abspath
2257
2258 - def entry_abspath(self, name):
2259 return self.abspath + name
2260
2261 - def entry_labspath(self, name):
2262 return '/' + name
2263
2264 - def entry_path(self, name):
2265 return self.path + name
2266
2267 - def entry_tpath(self, name):
2268 return self.tpath + name
2269
2270 - def is_under(self, dir):
2271 if self is dir: 2272 return 1 2273 else: 2274 return 0
2275
2276 - def up(self):
2277 return None
2278
2279 - def get_dir(self):
2280 return None
2281
2282 - def src_builder(self):
2283 return _null
2284
2285 -class FileNodeInfo(SCons.Node.NodeInfoBase):
2286 current_version_id = 1 2287 2288 field_list = ['csig', 'timestamp', 'size'] 2289 2290 # This should get reset by the FS initialization. 2291 fs = None 2292
2293 - def str_to_node(self, s):
2294 top = self.fs.Top 2295 root = top.root 2296 if do_splitdrive: 2297 drive, s = _my_splitdrive(s) 2298 if drive: 2299 root = self.fs.get_root(drive) 2300 if not os.path.isabs(s): 2301 s = top.labspath + '/' + s 2302 return root._lookup_abs(s, Entry)
2303
2304 -class FileBuildInfo(SCons.Node.BuildInfoBase):
2305 current_version_id = 1 2306
2307 - def convert_to_sconsign(self):
2308 """ 2309 Converts this FileBuildInfo object for writing to a .sconsign file 2310 2311 This replaces each Node in our various dependency lists with its 2312 usual string representation: relative to the top-level SConstruct 2313 directory, or an absolute path if it's outside. 2314 """ 2315 if os_sep_is_slash: 2316 node_to_str = str 2317 else: 2318 def node_to_str(n): 2319 try: 2320 s = n.path 2321 except AttributeError: 2322 s = str(n) 2323 else: 2324 s = s.replace(OS_SEP, '/') 2325 return s
2326 for attr in ['bsources', 'bdepends', 'bimplicit']: 2327 try: 2328 val = getattr(self, attr) 2329 except AttributeError: 2330 pass 2331 else: 2332 setattr(self, attr, list(map(node_to_str, val)))
2333 - def convert_from_sconsign(self, dir, name):
2334 """ 2335 Converts a newly-read FileBuildInfo object for in-SCons use 2336 2337 For normal up-to-date checking, we don't have any conversion to 2338 perform--but we're leaving this method here to make that clear. 2339 """ 2340 pass
2341 - def prepare_dependencies(self):
2342 """ 2343 Prepares a FileBuildInfo object for explaining what changed 2344 2345 The bsources, bdepends and bimplicit lists have all been 2346 stored on disk as paths relative to the top-level SConstruct 2347 directory. Convert the strings to actual Nodes (for use by the 2348 --debug=explain code and --implicit-cache). 2349 """ 2350 attrs = [ 2351 ('bsources', 'bsourcesigs'), 2352 ('bdepends', 'bdependsigs'), 2353 ('bimplicit', 'bimplicitsigs'), 2354 ] 2355 for (nattr, sattr) in attrs: 2356 try: 2357 strings = getattr(self, nattr) 2358 nodeinfos = getattr(self, sattr) 2359 except AttributeError: 2360 continue 2361 nodes = [] 2362 for s, ni in zip(strings, nodeinfos): 2363 if not isinstance(s, SCons.Node.Node): 2364 s = ni.str_to_node(s) 2365 nodes.append(s) 2366 setattr(self, nattr, nodes)
2367 - def format(self, names=0):
2368 result = [] 2369 bkids = self.bsources + self.bdepends + self.bimplicit 2370 bkidsigs = self.bsourcesigs + self.bdependsigs + self.bimplicitsigs 2371 for bkid, bkidsig in zip(bkids, bkidsigs): 2372 result.append(str(bkid) + ': ' + 2373 ' '.join(bkidsig.format(names=names))) 2374 result.append('%s [%s]' % (self.bactsig, self.bact)) 2375 return '\n'.join(result)
2376
2377 -class File(Base):
2378 """A class for files in a file system. 2379 """ 2380 2381 memoizer_counters = [] 2382 2383 NodeInfo = FileNodeInfo 2384 BuildInfo = FileBuildInfo 2385 2386 md5_chunksize = 64 2387
2388 - def diskcheck_match(self):
2389 diskcheck_match(self, self.isdir, 2390 "Directory %s found where file expected.")
2391
2392 - def __init__(self, name, directory, fs):
2393 if SCons.Debug.track_instances: logInstanceCreation(self, 'Node.FS.File') 2394 Base.__init__(self, name, directory, fs) 2395 self._morph()
2396
2397 - def Entry(self, name):
2398 """Create an entry node named 'name' relative to 2399 the directory of this file.""" 2400 return self.dir.Entry(name)
2401
2402 - def Dir(self, name, create=True):
2403 """Create a directory node named 'name' relative to 2404 the directory of this file.""" 2405 return self.dir.Dir(name, create=create)
2406
2407 - def Dirs(self, pathlist):
2408 """Create a list of directories relative to the SConscript 2409 directory of this file.""" 2410 return [self.Dir(p) for p in pathlist]
2411
2412 - def File(self, name):
2413 """Create a file node named 'name' relative to 2414 the directory of this file.""" 2415 return self.dir.File(name)
2416 2417 #def generate_build_dict(self): 2418 # """Return an appropriate dictionary of values for building 2419 # this File.""" 2420 # return {'Dir' : self.Dir, 2421 # 'File' : self.File, 2422 # 'RDirs' : self.RDirs} 2423
2424 - def _morph(self):
2425 """Turn a file system node into a File object.""" 2426 self.scanner_paths = {} 2427 if not hasattr(self, '_local'): 2428 self._local = 0 2429 if not hasattr(self, 'released_target_info'): 2430 self.released_target_info = False 2431 2432 # If there was already a Builder set on this entry, then 2433 # we need to make sure we call the target-decider function, 2434 # not the source-decider. Reaching in and doing this by hand 2435 # is a little bogus. We'd prefer to handle this by adding 2436 # an Entry.builder_set() method that disambiguates like the 2437 # other methods, but that starts running into problems with the 2438 # fragile way we initialize Dir Nodes with their Mkdir builders, 2439 # yet still allow them to be overridden by the user. Since it's 2440 # not clear right now how to fix that, stick with what works 2441 # until it becomes clear... 2442 if self.has_builder(): 2443 self.changed_since_last_build = self.decide_target
2444
2445 - def scanner_key(self):
2446 return self.get_suffix()
2447
2448 - def get_contents(self):
2449 if not self.rexists(): 2450 return '' 2451 fname = self.rfile().abspath 2452 try: 2453 contents = open(fname, "rb").read() 2454 except EnvironmentError, e: 2455 if not e.filename: 2456 e.filename = fname 2457 raise 2458 return contents
2459 2460 # This attempts to figure out what the encoding of the text is 2461 # based upon the BOM bytes, and then decodes the contents so that 2462 # it's a valid python string.
2463 - def get_text_contents(self):
2464 contents = self.get_contents() 2465 # The behavior of various decode() methods and functions 2466 # w.r.t. the initial BOM bytes is different for different 2467 # encodings and/or Python versions. ('utf-8' does not strip 2468 # them, but has a 'utf-8-sig' which does; 'utf-16' seems to 2469 # strip them; etc.) Just sidestep all the complication by 2470 # explicitly stripping the BOM before we decode(). 2471 if contents.startswith(codecs.BOM_UTF8): 2472 return contents[len(codecs.BOM_UTF8):].decode('utf-8') 2473 if contents.startswith(codecs.BOM_UTF16_LE): 2474 return contents[len(codecs.BOM_UTF16_LE):].decode('utf-16-le') 2475 if contents.startswith(codecs.BOM_UTF16_BE): 2476 return contents[len(codecs.BOM_UTF16_BE):].decode('utf-16-be') 2477 return contents
2478
2479 - def get_content_hash(self):
2480 """ 2481 Compute and return the MD5 hash for this file. 2482 """ 2483 if not self.rexists(): 2484 return SCons.Util.MD5signature('') 2485 fname = self.rfile().abspath 2486 try: 2487 cs = SCons.Util.MD5filesignature(fname, 2488 chunksize=SCons.Node.FS.File.md5_chunksize*1024) 2489 except EnvironmentError, e: 2490 if not e.filename: 2491 e.filename = fname 2492 raise 2493 return cs
2494 2495 2496 memoizer_counters.append(SCons.Memoize.CountValue('get_size')) 2497
2498 - def get_size(self):
2499 try: 2500 return self._memo['get_size'] 2501 except KeyError: 2502 pass 2503 2504 if self.rexists(): 2505 size = self.rfile().getsize() 2506 else: 2507 size = 0 2508 2509 self._memo['get_size'] = size 2510 2511 return size
2512 2513 memoizer_counters.append(SCons.Memoize.CountValue('get_timestamp')) 2514
2515 - def get_timestamp(self):
2516 try: 2517 return self._memo['get_timestamp'] 2518 except KeyError: 2519 pass 2520 2521 if self.rexists(): 2522 timestamp = self.rfile().getmtime() 2523 else: 2524 timestamp = 0 2525 2526 self._memo['get_timestamp'] = timestamp 2527 2528 return timestamp
2529
2530 - def store_info(self):
2531 # Merge our build information into the already-stored entry. 2532 # This accomodates "chained builds" where a file that's a target 2533 # in one build (SConstruct file) is a source in a different build. 2534 # See test/chained-build.py for the use case. 2535 if do_store_info: 2536 self.dir.sconsign().store_info(self.name, self)
2537 2538 convert_copy_attrs = [ 2539 'bsources', 2540 'bimplicit', 2541 'bdepends', 2542 'bact', 2543 'bactsig', 2544 'ninfo', 2545 ] 2546 2547 2548 convert_sig_attrs = [ 2549 'bsourcesigs', 2550 'bimplicitsigs', 2551 'bdependsigs', 2552 ] 2553
2554 - def convert_old_entry(self, old_entry):
2555 # Convert a .sconsign entry from before the Big Signature 2556 # Refactoring, doing what we can to convert its information 2557 # to the new .sconsign entry format. 2558 # 2559 # The old format looked essentially like this: 2560 # 2561 # BuildInfo 2562 # .ninfo (NodeInfo) 2563 # .bsig 2564 # .csig 2565 # .timestamp 2566 # .size 2567 # .bsources 2568 # .bsourcesigs ("signature" list) 2569 # .bdepends 2570 # .bdependsigs ("signature" list) 2571 # .bimplicit 2572 # .bimplicitsigs ("signature" list) 2573 # .bact 2574 # .bactsig 2575 # 2576 # The new format looks like this: 2577 # 2578 # .ninfo (NodeInfo) 2579 # .bsig 2580 # .csig 2581 # .timestamp 2582 # .size 2583 # .binfo (BuildInfo) 2584 # .bsources 2585 # .bsourcesigs (NodeInfo list) 2586 # .bsig 2587 # .csig 2588 # .timestamp 2589 # .size 2590 # .bdepends 2591 # .bdependsigs (NodeInfo list) 2592 # .bsig 2593 # .csig 2594 # .timestamp 2595 # .size 2596 # .bimplicit 2597 # .bimplicitsigs (NodeInfo list) 2598 # .bsig 2599 # .csig 2600 # .timestamp 2601 # .size 2602 # .bact 2603 # .bactsig 2604 # 2605 # The basic idea of the new structure is that a NodeInfo always 2606 # holds all available information about the state of a given Node 2607 # at a certain point in time. The various .b*sigs lists can just 2608 # be a list of pointers to the .ninfo attributes of the different 2609 # dependent nodes, without any copying of information until it's 2610 # time to pickle it for writing out to a .sconsign file. 2611 # 2612 # The complicating issue is that the *old* format only stored one 2613 # "signature" per dependency, based on however the *last* build 2614 # was configured. We don't know from just looking at it whether 2615 # it was a build signature, a content signature, or a timestamp 2616 # "signature". Since we no longer use build signatures, the 2617 # best we can do is look at the length and if it's thirty two, 2618 # assume that it was (or might have been) a content signature. 2619 # If it was actually a build signature, then it will cause a 2620 # rebuild anyway when it doesn't match the new content signature, 2621 # but that's probably the best we can do. 2622 import SCons.SConsign 2623 new_entry = SCons.SConsign.SConsignEntry() 2624 new_entry.binfo = self.new_binfo() 2625 binfo = new_entry.binfo 2626 for attr in self.convert_copy_attrs: 2627 try: 2628 value = getattr(old_entry, attr) 2629 except AttributeError: 2630 continue 2631 setattr(binfo, attr, value) 2632 delattr(old_entry, attr) 2633 for attr in self.convert_sig_attrs: 2634 try: 2635 sig_list = getattr(old_entry, attr) 2636 except AttributeError: 2637 continue 2638 value = [] 2639 for sig in sig_list: 2640 ninfo = self.new_ninfo() 2641 if len(sig) == 32: 2642 ninfo.csig = sig 2643 else: 2644 ninfo.timestamp = sig 2645 value.append(ninfo) 2646 setattr(binfo, attr, value) 2647 delattr(old_entry, attr) 2648 return new_entry
2649 2650 memoizer_counters.append(SCons.Memoize.CountValue('get_stored_info')) 2651
2652 - def get_stored_info(self):
2653 try: 2654 return self._memo['get_stored_info'] 2655 except KeyError: 2656 pass 2657 2658 try: 2659 sconsign_entry = self.dir.sconsign().get_entry(self.name) 2660 except (KeyError, EnvironmentError): 2661 import SCons.SConsign 2662 sconsign_entry = SCons.SConsign.SConsignEntry() 2663 sconsign_entry.binfo = self.new_binfo() 2664 sconsign_entry.ninfo = self.new_ninfo() 2665 else: 2666 if isinstance(sconsign_entry, FileBuildInfo): 2667 # This is a .sconsign file from before the Big Signature 2668 # Refactoring; convert it as best we can. 2669 sconsign_entry = self.convert_old_entry(sconsign_entry) 2670 try: 2671 delattr(sconsign_entry.ninfo, 'bsig') 2672 except AttributeError: 2673 pass 2674 2675 self._memo['get_stored_info'] = sconsign_entry 2676 2677 return sconsign_entry
2678
2679 - def get_stored_implicit(self):
2680 binfo = self.get_stored_info().binfo 2681 binfo.prepare_dependencies() 2682 try: return binfo.bimplicit 2683 except AttributeError: return None
2684
2685 - def rel_path(self, other):
2686 return self.dir.rel_path(other)
2687
2688 - def _get_found_includes_key(self, env, scanner, path):
2689 return (id(env), id(scanner), path)
2690 2691 memoizer_counters.append(SCons.Memoize.CountDict('get_found_includes', _get_found_includes_key)) 2692
2693 - def get_found_includes(self, env, scanner, path):
2694 """Return the included implicit dependencies in this file. 2695 Cache results so we only scan the file once per path 2696 regardless of how many times this information is requested. 2697 """ 2698 memo_key = (id(env), id(scanner), path) 2699 try: 2700 memo_dict = self._memo['get_found_includes'] 2701 except KeyError: 2702 memo_dict = {} 2703 self._memo['get_found_includes'] = memo_dict 2704 else: 2705 try: 2706 return memo_dict[memo_key] 2707 except KeyError: 2708 pass 2709 2710 if scanner: 2711 # result = [n.disambiguate() for n in scanner(self, env, path)] 2712 result = scanner(self, env, path) 2713 result = [N.disambiguate() for N in result] 2714 else: 2715 result = [] 2716 2717 memo_dict[memo_key] = result 2718 2719 return result
2720
2721 - def _createDir(self):
2722 # ensure that the directories for this node are 2723 # created. 2724 self.dir._create()
2725
2726 - def push_to_cache(self):
2727 """Try to push the node into a cache 2728 """ 2729 # This should get called before the Nodes' .built() method is 2730 # called, which would clear the build signature if the file has 2731 # a source scanner. 2732 # 2733 # We have to clear the local memoized values *before* we push 2734 # the node to cache so that the memoization of the self.exists() 2735 # return value doesn't interfere. 2736 if self.nocache: 2737 return 2738 self.clear_memoized_values() 2739 if self.exists(): 2740 self.get_build_env().get_CacheDir().push(self)
2741
2742 - def retrieve_from_cache(self):
2743 """Try to retrieve the node's content from a cache 2744 2745 This method is called from multiple threads in a parallel build, 2746 so only do thread safe stuff here. Do thread unsafe stuff in 2747 built(). 2748 2749 Returns true if the node was successfully retrieved. 2750 """ 2751 if self.nocache: 2752 return None 2753 if not self.is_derived(): 2754 return None 2755 return self.get_build_env().get_CacheDir().retrieve(self)
2756
2757 - def visited(self):
2758 if self.exists() and self.executor is not None: 2759 self.get_build_env().get_CacheDir().push_if_forced(self) 2760 2761 ninfo = self.get_ninfo() 2762 2763 csig = self.get_max_drift_csig() 2764 if csig: 2765 ninfo.csig = csig 2766 2767 ninfo.timestamp = self.get_timestamp() 2768 ninfo.size = self.get_size() 2769 2770 if not self.has_builder(): 2771 # This is a source file, but it might have been a target file 2772 # in another build that included more of the DAG. Copy 2773 # any build information that's stored in the .sconsign file 2774 # into our binfo object so it doesn't get lost. 2775 old = self.get_stored_info() 2776 self.get_binfo().__dict__.update(old.binfo.__dict__) 2777 2778 self.store_info()
2779
2780 - def release_target_info(self):
2781 """Called just after this node has been marked 2782 up-to-date or was built completely. 2783 2784 This is where we try to release as many target node infos 2785 as possible for clean builds and update runs, in order 2786 to minimize the overall memory consumption. 2787 2788 We'd like to remove a lot more attributes like self.sources 2789 and self.sources_set, but they might get used 2790 in a next build step. For example, during configuration 2791 the source files for a built *.o file are used to figure out 2792 which linker to use for the resulting Program (gcc vs. g++)! 2793 That's why we check for the 'keep_targetinfo' attribute, 2794 config Nodes and the Interactive mode just don't allow 2795 an early release of most variables. 2796 2797 In the same manner, we can't simply remove the self.attributes 2798 here. The smart linking relies on the shared flag, and some 2799 parts of the java Tool use it to transport information 2800 about nodes... 2801 2802 @see: built() and Node.release_target_info() 2803 """ 2804 if (self.released_target_info or SCons.Node.interactive): 2805 return 2806 2807 if not hasattr(self.attributes, 'keep_targetinfo'): 2808 # Cache some required values, before releasing 2809 # stuff like env, executor and builder... 2810 self.changed(allowcache=True) 2811 self.get_contents_sig() 2812 self.get_build_env() 2813 # Now purge unneeded stuff to free memory... 2814 self.executor = None 2815 self._memo.pop('rfile', None) 2816 self.prerequisites = None 2817 # Cleanup lists, but only if they're empty 2818 if not len(self.ignore_set): 2819 self.ignore_set = None 2820 if not len(self.implicit_set): 2821 self.implicit_set = None 2822 if not len(self.depends_set): 2823 self.depends_set = None 2824 if not len(self.ignore): 2825 self.ignore = None 2826 if not len(self.depends): 2827 self.depends = None 2828 # Mark this node as done, we only have to release 2829 # the memory once... 2830 self.released_target_info = True
2831
2832 - def find_src_builder(self):
2833 if self.rexists(): 2834 return None 2835 scb = self.dir.src_builder() 2836 if scb is _null: 2837 if diskcheck_sccs(self.dir, self.name): 2838 scb = get_DefaultSCCSBuilder() 2839 elif diskcheck_rcs(self.dir, self.name): 2840 scb = get_DefaultRCSBuilder() 2841 else: 2842 scb = None 2843 if scb is not None: 2844 try: 2845 b = self.builder 2846 except AttributeError: 2847 b = None 2848 if b is None: 2849 self.builder_set(scb) 2850 return scb
2851
2852 - def has_src_builder(self):
2853 """Return whether this Node has a source builder or not. 2854 2855 If this Node doesn't have an explicit source code builder, this 2856 is where we figure out, on the fly, if there's a transparent 2857 source code builder for it. 2858 2859 Note that if we found a source builder, we also set the 2860 self.builder attribute, so that all of the methods that actually 2861 *build* this file don't have to do anything different. 2862 """ 2863 try: 2864 scb = self.sbuilder 2865 except AttributeError: 2866 scb = self.sbuilder = self.find_src_builder() 2867 return scb is not None
2868
2869 - def alter_targets(self):
2870 """Return any corresponding targets in a variant directory. 2871 """ 2872 if self.is_derived(): 2873 return [], None 2874 return self.fs.variant_dir_target_climb(self, self.dir, [self.name])
2875
2876 - def _rmv_existing(self):
2877 self.clear_memoized_values() 2878 if print_duplicate: 2879 print "dup: removing existing target %s"%self 2880 e = Unlink(self, [], None) 2881 if isinstance(e, SCons.Errors.BuildError): 2882 raise e
2883 2884 # 2885 # Taskmaster interface subsystem 2886 # 2887
2888 - def make_ready(self):
2889 self.has_src_builder() 2890 self.get_binfo()
2891
2892 - def prepare(self):
2893 """Prepare for this file to be created.""" 2894 SCons.Node.Node.prepare(self) 2895 2896 if self.get_state() != SCons.Node.up_to_date: 2897 if self.exists(): 2898 if self.is_derived() and not self.precious: 2899 self._rmv_existing() 2900 else: 2901 try: 2902 self._createDir() 2903 except SCons.Errors.StopError, drive: 2904 desc = "No drive `%s' for target `%s'." % (drive, self) 2905 raise SCons.Errors.StopError(desc)
2906 2907 # 2908 # 2909 # 2910
2911 - def remove(self):
2912 """Remove this file.""" 2913 if self.exists() or self.islink(): 2914 self.fs.unlink(self.path) 2915 return 1 2916 return None
2917
2918 - def do_duplicate(self, src):
2919 self._createDir() 2920 if print_duplicate: 2921 print "dup: relinking variant '%s' from '%s'"%(self, src) 2922 Unlink(self, None, None) 2923 e = Link(self, src, None) 2924 if isinstance(e, SCons.Errors.BuildError): 2925 desc = "Cannot duplicate `%s' in `%s': %s." % (src.path, self.dir.path, e.errstr) 2926 raise SCons.Errors.StopError(desc) 2927 self.linked = 1 2928 # The Link() action may or may not have actually 2929 # created the file, depending on whether the -n 2930 # option was used or not. Delete the _exists and 2931 # _rexists attributes so they can be reevaluated. 2932 self.clear()
2933 2934 memoizer_counters.append(SCons.Memoize.CountValue('exists')) 2935
2936 - def exists(self):
2937 try: 2938 return self._memo['exists'] 2939 except KeyError: 2940 pass 2941 # Duplicate from source path if we are set up to do this. 2942 if self.duplicate and not self.is_derived() and not self.linked: 2943 src = self.srcnode() 2944 if src is not self: 2945 # At this point, src is meant to be copied in a variant directory. 2946 src = src.rfile() 2947 if src.abspath != self.abspath: 2948 if src.exists(): 2949 self.do_duplicate(src) 2950 # Can't return 1 here because the duplication might 2951 # not actually occur if the -n option is being used. 2952 else: 2953 # The source file does not exist. Make sure no old 2954 # copy remains in the variant directory. 2955 if print_duplicate: 2956 print "dup: no src for %s, unlinking old variant copy"%self 2957 if Base.exists(self) or self.islink(): 2958 self.fs.unlink(self.path) 2959 # Return None explicitly because the Base.exists() call 2960 # above will have cached its value if the file existed. 2961 self._memo['exists'] = None 2962 return None 2963 result = Base.exists(self) 2964 self._memo['exists'] = result 2965 return result
2966 2967 # 2968 # SIGNATURE SUBSYSTEM 2969 # 2970
2971 - def get_max_drift_csig(self):
2972 """ 2973 Returns the content signature currently stored for this node 2974 if it's been unmodified longer than the max_drift value, or the 2975 max_drift value is 0. Returns None otherwise. 2976 """ 2977 old = self.get_stored_info() 2978 mtime = self.get_timestamp() 2979 2980 max_drift = self.fs.max_drift 2981 if max_drift > 0: 2982 if (time.time() - mtime) > max_drift: 2983 try: 2984 n = old.ninfo 2985 if n.timestamp and n.csig and n.timestamp == mtime: 2986 return n.csig 2987 except AttributeError: 2988 pass 2989 elif max_drift == 0: 2990 try: 2991 return old.ninfo.csig 2992 except AttributeError: 2993 pass 2994 2995 return None
2996
2997 - def get_csig(self):
2998 """ 2999 Generate a node's content signature, the digested signature 3000 of its content. 3001 3002 node - the node 3003 cache - alternate node to use for the signature cache 3004 returns - the content signature 3005 """ 3006 ninfo = self.get_ninfo() 3007 try: 3008 return ninfo.csig 3009 except AttributeError: 3010 pass 3011 3012 csig = self.get_max_drift_csig() 3013 if csig is None: 3014 3015 try: 3016 if self.get_size() < SCons.Node.FS.File.md5_chunksize: 3017 contents = self.get_contents() 3018 else: 3019 csig = self.get_content_hash() 3020 except IOError: 3021 # This can happen if there's actually a directory on-disk, 3022 # which can be the case if they've disabled disk checks, 3023 # or if an action with a File target actually happens to 3024 # create a same-named directory by mistake. 3025 csig = '' 3026 else: 3027 if not csig: 3028 csig = SCons.Util.MD5signature(contents) 3029 3030 ninfo.csig = csig 3031 3032 return csig
3033 3034 # 3035 # DECISION SUBSYSTEM 3036 # 3037
3038 - def builder_set(self, builder):
3039 SCons.Node.Node.builder_set(self, builder) 3040 self.changed_since_last_build = self.decide_target
3041
3042 - def built(self):
3043 """Called just after this File node is successfully built. 3044 3045 Just like for 'release_target_info' we try to release 3046 some more target node attributes in order to minimize the 3047 overall memory consumption. 3048 3049 @see: release_target_info 3050 """ 3051 3052 SCons.Node.Node.built(self) 3053 3054 if (not SCons.Node.interactive and 3055 not hasattr(self.attributes, 'keep_targetinfo')): 3056 # Ensure that the build infos get computed and cached... 3057 self.store_info() 3058 # ... then release some more variables. 3059 self._specific_sources = False 3060 self.labspath = None 3061 self._save_str() 3062 self.cwd = None 3063 3064 self.scanner_paths = None
3065
3066 - def changed(self, node=None, allowcache=False):
3067 """ 3068 Returns if the node is up-to-date with respect to the BuildInfo 3069 stored last time it was built. 3070 3071 For File nodes this is basically a wrapper around Node.changed(), 3072 but we allow the return value to get cached after the reference 3073 to the Executor got released in release_target_info(). 3074 3075 @see: Node.changed() 3076 """ 3077 if node is None: 3078 try: 3079 return self._memo['changed'] 3080 except KeyError: 3081 pass 3082 3083 has_changed = SCons.Node.Node.changed(self, node) 3084 if allowcache: 3085 self._memo['changed'] = has_changed 3086 return has_changed
3087
3088 - def changed_content(self, target, prev_ni):
3089 cur_csig = self.get_csig() 3090 try: 3091 return cur_csig != prev_ni.csig 3092 except AttributeError: 3093 return 1
3094
3095 - def changed_state(self, target, prev_ni):
3096 return self.state != SCons.Node.up_to_date
3097
3098 - def changed_timestamp_then_content(self, target, prev_ni):
3099 if not self.changed_timestamp_match(target, prev_ni): 3100 try: 3101 self.get_ninfo().csig = prev_ni.csig 3102 except AttributeError: 3103 pass 3104 return False 3105 return self.changed_content(target, prev_ni)