Package SCons :: Package Node :: Module FS
[hide private]
[frames] | no frames]

Source Code for Module SCons.Node.FS

   1  """scons.Node.FS 
   2   
   3  File system nodes. 
   4   
   5  These Nodes represent the canonical external objects that people think 
   6  of when they think of building software: files and directories. 
   7   
   8  This holds a "default_fs" variable that should be initialized with an FS 
   9  that can be used by scripts or modules looking for the canonical default. 
  10   
  11  """ 
  12   
  13  # 
  14  # Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008 The SCons Foundation 
  15  # 
  16  # Permission is hereby granted, free of charge, to any person obtaining 
  17  # a copy of this software and associated documentation files (the 
  18  # "Software"), to deal in the Software without restriction, including 
  19  # without limitation the rights to use, copy, modify, merge, publish, 
  20  # distribute, sublicense, and/or sell copies of the Software, and to 
  21  # permit persons to whom the Software is furnished to do so, subject to 
  22  # the following conditions: 
  23  # 
  24  # The above copyright notice and this permission notice shall be included 
  25  # in all copies or substantial portions of the Software. 
  26  # 
  27  # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY 
  28  # KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE 
  29  # WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND 
  30  # NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE 
  31  # LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION 
  32  # OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION 
  33  # WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. 
  34  # 
  35   
  36  __revision__ = "src/engine/SCons/Node/FS.py 3842 2008/12/20 22:59:52 scons" 
  37   
  38  import fnmatch 
  39  from itertools import izip 
  40  import os 
  41  import os.path 
  42  import re 
  43  import shutil 
  44  import stat 
  45  import string 
  46  import sys 
  47  import time 
  48  import cStringIO 
  49   
  50  import SCons.Action 
  51  from SCons.Debug import logInstanceCreation 
  52  import SCons.Errors 
  53  import SCons.Memoize 
  54  import SCons.Node 
  55  import SCons.Node.Alias 
  56  import SCons.Subst 
  57  import SCons.Util 
  58  import SCons.Warnings 
  59   
  60  from SCons.Debug import Trace 
  61   
  62  do_store_info = True 
  63   
  64   
65 -class EntryProxyAttributeError(AttributeError):
66 """ 67 An AttributeError subclass for recording and displaying the name 68 of the underlying Entry involved in an AttributeError exception. 69 """
70 - def __init__(self, entry_proxy, attribute):
71 AttributeError.__init__(self) 72 self.entry_proxy = entry_proxy 73 self.attribute = attribute
74 - def __str__(self):
75 entry = self.entry_proxy.get() 76 fmt = "%s instance %s has no attribute %s" 77 return fmt % (entry.__class__.__name__, 78 repr(entry.name), 79 repr(self.attribute))
80 81 # The max_drift value: by default, use a cached signature value for 82 # any file that's been untouched for more than two days. 83 default_max_drift = 2*24*60*60 84 85 # 86 # We stringify these file system Nodes a lot. Turning a file system Node 87 # into a string is non-trivial, because the final string representation 88 # can depend on a lot of factors: whether it's a derived target or not, 89 # whether it's linked to a repository or source directory, and whether 90 # there's duplication going on. The normal technique for optimizing 91 # calculations like this is to memoize (cache) the string value, so you 92 # only have to do the calculation once. 93 # 94 # A number of the above factors, however, can be set after we've already 95 # been asked to return a string for a Node, because a Repository() or 96 # VariantDir() call or the like may not occur until later in SConscript 97 # files. So this variable controls whether we bother trying to save 98 # string values for Nodes. The wrapper interface can set this whenever 99 # they're done mucking with Repository and VariantDir and the other stuff, 100 # to let this module know it can start returning saved string values 101 # for Nodes. 102 # 103 Save_Strings = None 104
105 -def save_strings(val):
106 global Save_Strings 107 Save_Strings = val
108 109 # 110 # Avoid unnecessary function calls by recording a Boolean value that 111 # tells us whether or not os.path.splitdrive() actually does anything 112 # on this system, and therefore whether we need to bother calling it 113 # when looking up path names in various methods below. 114 # 115 116 do_splitdrive = None 117
118 -def initialize_do_splitdrive():
119 global do_splitdrive 120 drive, path = os.path.splitdrive('X:/foo') 121 do_splitdrive = not not drive
122 123 initialize_do_splitdrive() 124 125 # 126 127 needs_normpath_check = None 128
129 -def initialize_normpath_check():
130 """ 131 Initialize the normpath_check regular expression. 132 133 This function is used by the unit tests to re-initialize the pattern 134 when testing for behavior with different values of os.sep. 135 """ 136 global needs_normpath_check 137 if os.sep == '/': 138 pattern = r'.*/|\.$|\.\.$' 139 else: 140 pattern = r'.*[/%s]|\.$|\.\.$' % re.escape(os.sep) 141 needs_normpath_check = re.compile(pattern)
142 143 initialize_normpath_check() 144 145 # 146 # SCons.Action objects for interacting with the outside world. 147 # 148 # The Node.FS methods in this module should use these actions to 149 # create and/or remove files and directories; they should *not* use 150 # os.{link,symlink,unlink,mkdir}(), etc., directly. 151 # 152 # Using these SCons.Action objects ensures that descriptions of these 153 # external activities are properly displayed, that the displays are 154 # suppressed when the -s (silent) option is used, and (most importantly) 155 # the actions are disabled when the the -n option is used, in which case 156 # there should be *no* changes to the external file system(s)... 157 # 158 159 if hasattr(os, 'link'): 172 else: 173 _hardlink_func = None 174 175 if hasattr(os, 'symlink'): 178 else: 179 _softlink_func = None 180
181 -def _copy_func(fs, src, dest):
182 shutil.copy2(src, dest) 183 st = fs.stat(src) 184 fs.chmod(dest, stat.S_IMODE(st[stat.ST_MODE]) | stat.S_IWRITE)
185 186 187 Valid_Duplicates = ['hard-soft-copy', 'soft-hard-copy', 188 'hard-copy', 'soft-copy', 'copy'] 189 190 Link_Funcs = [] # contains the callables of the specified duplication style 191
192 -def set_duplicate(duplicate):
193 # Fill in the Link_Funcs list according to the argument 194 # (discarding those not available on the platform). 195 196 # Set up the dictionary that maps the argument names to the 197 # underlying implementations. We do this inside this function, 198 # not in the top-level module code, so that we can remap os.link 199 # and os.symlink for testing purposes. 200 link_dict = { 201 'hard' : _hardlink_func, 202 'soft' : _softlink_func, 203 'copy' : _copy_func 204 } 205 206 if not duplicate in Valid_Duplicates: 207 raise SCons.Errors.InternalError, ("The argument of set_duplicate " 208 "should be in Valid_Duplicates") 209 global Link_Funcs 210 Link_Funcs = [] 211 for func in string.split(duplicate,'-'): 212 if link_dict[func]: 213 Link_Funcs.append(link_dict[func])
214
215 -def LinkFunc(target, source, env):
216 # Relative paths cause problems with symbolic links, so 217 # we use absolute paths, which may be a problem for people 218 # who want to move their soft-linked src-trees around. Those 219 # people should use the 'hard-copy' mode, softlinks cannot be 220 # used for that; at least I have no idea how ... 221 src = source[0].abspath 222 dest = target[0].abspath 223 dir, file = os.path.split(dest) 224 if dir and not target[0].fs.isdir(dir): 225 os.makedirs(dir) 226 if not Link_Funcs: 227 # Set a default order of link functions. 228 set_duplicate('hard-soft-copy') 229 fs = source[0].fs 230 # Now link the files with the previously specified order. 231 for func in Link_Funcs: 232 try: 233 func(fs, src, dest) 234 break 235 except (IOError, OSError): 236 # An OSError indicates something happened like a permissions 237 # problem or an attempt to symlink across file-system 238 # boundaries. An IOError indicates something like the file 239 # not existing. In either case, keeping trying additional 240 # functions in the list and only raise an error if the last 241 # one failed. 242 if func == Link_Funcs[-1]: 243 # exception of the last link method (copy) are fatal 244 raise 245 return 0
246 247 Link = SCons.Action.Action(LinkFunc, None)
248 -def LocalString(target, source, env):
249 return 'Local copy of %s from %s' % (target[0], source[0])
250 251 LocalCopy = SCons.Action.Action(LinkFunc, LocalString) 252
253 -def UnlinkFunc(target, source, env):
254 t = target[0] 255 t.fs.unlink(t.abspath) 256 return 0
257 258 Unlink = SCons.Action.Action(UnlinkFunc, None) 259
260 -def MkdirFunc(target, source, env):
261 t = target[0] 262 if not t.exists(): 263 t.fs.mkdir(t.abspath) 264 return 0
265 266 Mkdir = SCons.Action.Action(MkdirFunc, None, presub=None) 267 268 MkdirBuilder = None 269
270 -def get_MkdirBuilder():
271 global MkdirBuilder 272 if MkdirBuilder is None: 273 import SCons.Builder 274 import SCons.Defaults 275 # "env" will get filled in by Executor.get_build_env() 276 # calling SCons.Defaults.DefaultEnvironment() when necessary. 277 MkdirBuilder = SCons.Builder.Builder(action = Mkdir, 278 env = None, 279 explain = None, 280 is_explicit = None, 281 target_scanner = SCons.Defaults.DirEntryScanner, 282 name = "MkdirBuilder") 283 return MkdirBuilder
284
285 -class _Null:
286 pass
287 288 _null = _Null() 289 290 DefaultSCCSBuilder = None 291 DefaultRCSBuilder = None 292
293 -def get_DefaultSCCSBuilder():
294 global DefaultSCCSBuilder 295 if DefaultSCCSBuilder is None: 296 import SCons.Builder 297 # "env" will get filled in by Executor.get_build_env() 298 # calling SCons.Defaults.DefaultEnvironment() when necessary. 299 act = SCons.Action.Action('$SCCSCOM', '$SCCSCOMSTR') 300 DefaultSCCSBuilder = SCons.Builder.Builder(action = act, 301 env = None, 302 name = "DefaultSCCSBuilder") 303 return DefaultSCCSBuilder
304
305 -def get_DefaultRCSBuilder():
306 global DefaultRCSBuilder 307 if DefaultRCSBuilder is None: 308 import SCons.Builder 309 # "env" will get filled in by Executor.get_build_env() 310 # calling SCons.Defaults.DefaultEnvironment() when necessary. 311 act = SCons.Action.Action('$RCS_COCOM', '$RCS_COCOMSTR') 312 DefaultRCSBuilder = SCons.Builder.Builder(action = act, 313 env = None, 314 name = "DefaultRCSBuilder") 315 return DefaultRCSBuilder
316 317 # Cygwin's os.path.normcase pretends it's on a case-sensitive filesystem. 318 _is_cygwin = sys.platform == "cygwin" 319 if os.path.normcase("TeSt") == os.path.normpath("TeSt") and not _is_cygwin:
320 - def _my_normcase(x):
321 return x
322 else:
323 - def _my_normcase(x):
324 return string.upper(x)
325 326 327
328 -class DiskChecker:
329 - def __init__(self, type, do, ignore):
330 self.type = type 331 self.do = do 332 self.ignore = ignore 333 self.set_do()
334 - def set_do(self):
335 self.__call__ = self.do
336 - def set_ignore(self):
337 self.__call__ = self.ignore
338 - def set(self, list):
339 if self.type in list: 340 self.set_do() 341 else: 342 self.set_ignore()
343
344 -def do_diskcheck_match(node, predicate, errorfmt):
345 result = predicate() 346 try: 347 # If calling the predicate() cached a None value from stat(), 348 # remove it so it doesn't interfere with later attempts to 349 # build this Node as we walk the DAG. (This isn't a great way 350 # to do this, we're reaching into an interface that doesn't 351 # really belong to us, but it's all about performance, so 352 # for now we'll just document the dependency...) 353 if node._memo['stat'] is None: 354 del node._memo['stat'] 355 except (AttributeError, KeyError): 356 pass 357 if result: 358 raise TypeError, errorfmt % node.abspath
359
360 -def ignore_diskcheck_match(node, predicate, errorfmt):
361 pass
362
363 -def do_diskcheck_rcs(node, name):
364 try: 365 rcs_dir = node.rcs_dir 366 except AttributeError: 367 if node.entry_exists_on_disk('RCS'): 368 rcs_dir = node.Dir('RCS') 369 else: 370 rcs_dir = None 371 node.rcs_dir = rcs_dir 372 if rcs_dir: 373 return rcs_dir.entry_exists_on_disk(name+',v') 374 return None
375
376 -def ignore_diskcheck_rcs(node, name):
377 return None
378
379 -def do_diskcheck_sccs(node, name):
380 try: 381 sccs_dir = node.sccs_dir 382 except AttributeError: 383 if node.entry_exists_on_disk('SCCS'): 384 sccs_dir = node.Dir('SCCS') 385 else: 386 sccs_dir = None 387 node.sccs_dir = sccs_dir 388 if sccs_dir: 389 return sccs_dir.entry_exists_on_disk('s.'+name) 390 return None
391
392 -def ignore_diskcheck_sccs(node, name):
393 return None
394 395 diskcheck_match = DiskChecker('match', do_diskcheck_match, ignore_diskcheck_match) 396 diskcheck_rcs = DiskChecker('rcs', do_diskcheck_rcs, ignore_diskcheck_rcs) 397 diskcheck_sccs = DiskChecker('sccs', do_diskcheck_sccs, ignore_diskcheck_sccs) 398 399 diskcheckers = [ 400 diskcheck_match, 401 diskcheck_rcs, 402 diskcheck_sccs, 403 ] 404
405 -def set_diskcheck(list):
406 for dc in diskcheckers: 407 dc.set(list)
408
409 -def diskcheck_types():
410 return map(lambda dc: dc.type, diskcheckers)
411 412 413
414 -class EntryProxy(SCons.Util.Proxy):
415 - def __get_abspath(self):
416 entry = self.get() 417 return SCons.Subst.SpecialAttrWrapper(entry.get_abspath(), 418 entry.name + "_abspath")
419
420 - def __get_filebase(self):
421 name = self.get().name 422 return SCons.Subst.SpecialAttrWrapper(SCons.Util.splitext(name)[0], 423 name + "_filebase")
424
425 - def __get_suffix(self):
426 name = self.get().name 427 return SCons.Subst.SpecialAttrWrapper(SCons.Util.splitext(name)[1], 428 name + "_suffix")
429
430 - def __get_file(self):
431 name = self.get().name 432 return SCons.Subst.SpecialAttrWrapper(name, name + "_file")
433
434 - def __get_base_path(self):
435 """Return the file's directory and file name, with the 436 suffix stripped.""" 437 entry = self.get() 438 return SCons.Subst.SpecialAttrWrapper(SCons.Util.splitext(entry.get_path())[0], 439 entry.name + "_base")
440
441 - def __get_posix_path(self):
442 """Return the path with / as the path separator, 443 regardless of platform.""" 444 if os.sep == '/': 445 return self 446 else: 447 entry = self.get() 448 r = string.replace(entry.get_path(), os.sep, '/') 449 return SCons.Subst.SpecialAttrWrapper(r, entry.name + "_posix")
450
451 - def __get_windows_path(self):
452 """Return the path with \ as the path separator, 453 regardless of platform.""" 454 if os.sep == '\\': 455 return self 456 else: 457 entry = self.get() 458 r = string.replace(entry.get_path(), os.sep, '\\') 459 return SCons.Subst.SpecialAttrWrapper(r, entry.name + "_windows")
460
461 - def __get_srcnode(self):
462 return EntryProxy(self.get().srcnode())
463
464 - def __get_srcdir(self):
465 """Returns the directory containing the source node linked to this 466 node via VariantDir(), or the directory of this node if not linked.""" 467 return EntryProxy(self.get().srcnode().dir)
468
469 - def __get_rsrcnode(self):
470 return EntryProxy(self.get().srcnode().rfile())
471
472 - def __get_rsrcdir(self):
473 """Returns the directory containing the source node linked to this 474 node via VariantDir(), or the directory of this node if not linked.""" 475 return EntryProxy(self.get().srcnode().rfile().dir)
476
477 - def __get_dir(self):
478 return EntryProxy(self.get().dir)
479 480 dictSpecialAttrs = { "base" : __get_base_path, 481 "posix" : __get_posix_path, 482 "windows" : __get_windows_path, 483 "win32" : __get_windows_path, 484 "srcpath" : __get_srcnode, 485 "srcdir" : __get_srcdir, 486 "dir" : __get_dir, 487 "abspath" : __get_abspath, 488 "filebase" : __get_filebase, 489 "suffix" : __get_suffix, 490 "file" : __get_file, 491 "rsrcpath" : __get_rsrcnode, 492 "rsrcdir" : __get_rsrcdir, 493 } 494
495 - def __getattr__(self, name):
496 # This is how we implement the "special" attributes 497 # such as base, posix, srcdir, etc. 498 try: 499 attr_function = self.dictSpecialAttrs[name] 500 except KeyError: 501 try: 502 attr = SCons.Util.Proxy.__getattr__(self, name) 503 except AttributeError, e: 504 # Raise our own AttributeError subclass with an 505 # overridden __str__() method that identifies the 506 # name of the entry that caused the exception. 507 raise EntryProxyAttributeError(self, name) 508 return attr 509 else: 510 return attr_function(self)
511
512 -class Base(SCons.Node.Node):
513 """A generic class for file system entries. This class is for 514 when we don't know yet whether the entry being looked up is a file 515 or a directory. Instances of this class can morph into either 516 Dir or File objects by a later, more precise lookup. 517 518 Note: this class does not define __cmp__ and __hash__ for 519 efficiency reasons. SCons does a lot of comparing of 520 Node.FS.{Base,Entry,File,Dir} objects, so those operations must be 521 as fast as possible, which means we want to use Python's built-in 522 object identity comparisons. 523 """ 524 525 memoizer_counters = [] 526
527 - def __init__(self, name, directory, fs):
528 """Initialize a generic Node.FS.Base object. 529 530 Call the superclass initialization, take care of setting up 531 our relative and absolute paths, identify our parent 532 directory, and indicate that this node should use 533 signatures.""" 534 if __debug__: logInstanceCreation(self, 'Node.FS.Base') 535 SCons.Node.Node.__init__(self) 536 537 self.name = name 538 self.suffix = SCons.Util.splitext(name)[1] 539 self.fs = fs 540 541 assert directory, "A directory must be provided" 542 543 self.abspath = directory.entry_abspath(name) 544 self.labspath = directory.entry_labspath(name) 545 if directory.path == '.': 546 self.path = name 547 else: 548 self.path = directory.entry_path(name) 549 if directory.tpath == '.': 550 self.tpath = name 551 else: 552 self.tpath = directory.entry_tpath(name) 553 self.path_elements = directory.path_elements + [self] 554 555 self.dir = directory 556 self.cwd = None # will hold the SConscript directory for target nodes 557 self.duplicate = directory.duplicate
558
559 - def str_for_display(self):
560 return '"' + self.__str__() + '"'
561
562 - def must_be_same(self, klass):
563 """ 564 This node, which already existed, is being looked up as the 565 specified klass. Raise an exception if it isn't. 566 """ 567 if self.__class__ is klass or klass is Entry: 568 return 569 raise TypeError, "Tried to lookup %s '%s' as a %s." %\ 570 (self.__class__.__name__, self.path, klass.__name__)
571
572 - def get_dir(self):
573 return self.dir
574
575 - def get_suffix(self):
576 return self.suffix
577
578 - def rfile(self):
579 return self
580
581 - def __str__(self):
582 """A Node.FS.Base object's string representation is its path 583 name.""" 584 global Save_Strings 585 if Save_Strings: 586 return self._save_str() 587 return self._get_str()
588 589 memoizer_counters.append(SCons.Memoize.CountValue('_save_str')) 590
591 - def _save_str(self):
592 try: 593 return self._memo['_save_str'] 594 except KeyError: 595 pass 596 result = self._get_str() 597 self._memo['_save_str'] = result 598 return result
599
600 - def _get_str(self):
601 global Save_Strings 602 if self.duplicate or self.is_derived(): 603 return self.get_path() 604 srcnode = self.srcnode() 605 if srcnode.stat() is None and self.stat() is not None: 606 result = self.get_path() 607 else: 608 result = srcnode.get_path() 609 if not Save_Strings: 610 # We're not at the point where we're saving the string string 611 # representations of FS Nodes (because we haven't finished 612 # reading the SConscript files and need to have str() return 613 # things relative to them). That also means we can't yet 614 # cache values returned (or not returned) by stat(), since 615 # Python code in the SConscript files might still create 616 # or otherwise affect the on-disk file. So get rid of the 617 # values that the underlying stat() method saved. 618 try: del self._memo['stat'] 619 except KeyError: pass 620 if self is not srcnode: 621 try: del srcnode._memo['stat'] 622 except KeyError: pass 623 return result
624 625 rstr = __str__ 626 627 memoizer_counters.append(SCons.Memoize.CountValue('stat')) 628
629 - def stat(self):
630 try: return self._memo['stat'] 631 except KeyError: pass 632 try: result = self.fs.stat(self.abspath) 633 except os.error: result = None 634 self._memo['stat'] = result 635 return result
636
637 - def exists(self):
638 return self.stat() is not None
639
640 - def rexists(self):
641 return self.rfile().exists()
642
643 - def getmtime(self):
644 st = self.stat() 645 if st: return st[stat.ST_MTIME] 646 else: return None
647
648 - def getsize(self):
649 st = self.stat() 650 if st: return st[stat.ST_SIZE] 651 else: return None
652
653 - def isdir(self):
654 st = self.stat() 655 return st is not None and stat.S_ISDIR(st[stat.ST_MODE])
656
657 - def isfile(self):
658 st = self.stat() 659 return st is not None and stat.S_ISREG(st[stat.ST_MODE])
660 661 if hasattr(os, 'symlink'): 666 else: 669
670 - def is_under(self, dir):
671 if self is dir: 672 return 1 673 else: 674 return self.dir.is_under(dir)
675
676 - def set_local(self):
677 self._local = 1
678
679 - def srcnode(self):
680 """If this node is in a build path, return the node 681 corresponding to its source file. Otherwise, return 682 ourself. 683 """ 684 srcdir_list = self.dir.srcdir_list() 685 if srcdir_list: 686 srcnode = srcdir_list[0].Entry(self.name) 687 srcnode.must_be_same(self.__class__) 688 return srcnode 689 return self
690
691 - def get_path(self, dir=None):
692 """Return path relative to the current working directory of the 693 Node.FS.Base object that owns us.""" 694 if not dir: 695 dir = self.fs.getcwd() 696 if self == dir: 697 return '.' 698 path_elems = self.path_elements 699 try: i = path_elems.index(dir) 700 except ValueError: pass 701 else: path_elems = path_elems[i+1:] 702 path_elems = map(lambda n: n.name, path_elems) 703 return string.join(path_elems, os.sep)
704
705 - def set_src_builder(self, builder):
706 """Set the source code builder for this node.""" 707 self.sbuilder = builder 708 if not self.has_builder(): 709 self.builder_set(builder)
710
711 - def src_builder(self):
712 """Fetch the source code builder for this node. 713 714 If there isn't one, we cache the source code builder specified 715 for the directory (which in turn will cache the value from its 716 parent directory, and so on up to the file system root). 717 """ 718 try: 719 scb = self.sbuilder 720 except AttributeError: 721 scb = self.dir.src_builder() 722 self.sbuilder = scb 723 return scb
724
725 - def get_abspath(self):
726 """Get the absolute path of the file.""" 727 return self.abspath
728
729 - def for_signature(self):
730 # Return just our name. Even an absolute path would not work, 731 # because that can change thanks to symlinks or remapped network 732 # paths. 733 return self.name
734
735 - def get_subst_proxy(self):
736 try: 737 return self._proxy 738 except AttributeError: 739 ret = EntryProxy(self) 740 self._proxy = ret 741 return ret
742
743 - def target_from_source(self, prefix, suffix, splitext=SCons.Util.splitext):
744 """ 745 746 Generates a target entry that corresponds to this entry (usually 747 a source file) with the specified prefix and suffix. 748 749 Note that this method can be overridden dynamically for generated 750 files that need different behavior. See Tool/swig.py for 751 an example. 752 """ 753 return self.dir.Entry(prefix + splitext(self.name)[0] + suffix)
754
755 - def _Rfindalldirs_key(self, pathlist):
756 return pathlist
757 758 memoizer_counters.append(SCons.Memoize.CountDict('Rfindalldirs', _Rfindalldirs_key)) 759
760 - def Rfindalldirs(self, pathlist):
761 """ 762 Return all of the directories for a given path list, including 763 corresponding "backing" directories in any repositories. 764 765 The Node lookups are relative to this Node (typically a 766 directory), so memoizing result saves cycles from looking 767 up the same path for each target in a given directory. 768 """ 769 try: 770 memo_dict = self._memo['Rfindalldirs'] 771 except KeyError: 772 memo_dict = {} 773 self._memo['Rfindalldirs'] = memo_dict 774 else: 775 try: 776 return memo_dict[pathlist] 777 except KeyError: 778 pass 779 780 create_dir_relative_to_self = self.Dir 781 result = [] 782 for path in pathlist: 783 if isinstance(path, SCons.Node.Node): 784 result.append(path) 785 else: 786 dir = create_dir_relative_to_self(path) 787 result.extend(dir.get_all_rdirs()) 788 789 memo_dict[pathlist] = result 790 791 return result
792
793 - def RDirs(self, pathlist):
794 """Search for a list of directories in the Repository list.""" 795 cwd = self.cwd or self.fs._cwd 796 return cwd.Rfindalldirs(pathlist)
797 798 memoizer_counters.append(SCons.Memoize.CountValue('rentry')) 799
800 - def rentry(self):
801 try: 802 return self._memo['rentry'] 803 except KeyError: 804 pass 805 result = self 806 if not self.exists(): 807 norm_name = _my_normcase(self.name) 808 for dir in self.dir.get_all_rdirs(): 809 try: 810 node = dir.entries[norm_name] 811 except KeyError: 812 if dir.entry_exists_on_disk(self.name): 813 result = dir.Entry(self.name) 814 break 815 self._memo['rentry'] = result 816 return result
817
818 - def _glob1(self, pattern, ondisk=True, source=False, strings=False):
819 return []
820
821 -class Entry(Base):
822 """This is the class for generic Node.FS entries--that is, things 823 that could be a File or a Dir, but we're just not sure yet. 824 Consequently, the methods in this class really exist just to 825 transform their associated object into the right class when the 826 time comes, and then call the same-named method in the transformed 827 class.""" 828
829 - def diskcheck_match(self):
830 pass
831
832 - def disambiguate(self, must_exist=None):
833 """ 834 """ 835 if self.isdir(): 836 self.__class__ = Dir 837 self._morph() 838 elif self.isfile(): 839 self.__class__ = File 840 self._morph() 841 self.clear() 842 else: 843 # There was nothing on-disk at this location, so look in 844 # the src directory. 845 # 846 # We can't just use self.srcnode() straight away because 847 # that would create an actual Node for this file in the src 848 # directory, and there might not be one. Instead, use the 849 # dir_on_disk() method to see if there's something on-disk 850 # with that name, in which case we can go ahead and call 851 # self.srcnode() to create the right type of entry. 852 srcdir = self.dir.srcnode() 853 if srcdir != self.dir and \ 854 srcdir.entry_exists_on_disk(self.name) and \ 855 self.srcnode().isdir(): 856 self.__class__ = Dir 857 self._morph() 858 elif must_exist: 859 msg = "No such file or directory: '%s'" % self.abspath 860 raise SCons.Errors.UserError, msg 861 else: 862 self.__class__ = File 863 self._morph() 864 self.clear() 865 return self
866
867 - def rfile(self):
868 """We're a generic Entry, but the caller is actually looking for 869 a File at this point, so morph into one.""" 870 self.__class__ = File 871 self._morph() 872 self.clear() 873 return File.rfile(self)
874
875 - def scanner_key(self):
876 return self.get_suffix()
877
878 - def get_contents(self):
879 """Fetch the contents of the entry. 880 881 Since this should return the real contents from the file 882 system, we check to see into what sort of subclass we should 883 morph this Entry.""" 884 try: 885 self = self.disambiguate(must_exist=1) 886 except SCons.Errors.UserError: 887 # There was nothing on disk with which to disambiguate 888 # this entry. Leave it as an Entry, but return a null 889 # string so calls to get_contents() in emitters and the 890 # like (e.g. in qt.py) don't have to disambiguate by hand 891 # or catch the exception. 892 return '' 893 else: 894 return self.get_contents()
895
896 - def must_be_same(self, klass):
897 """Called to make sure a Node is a Dir. Since we're an 898 Entry, we can morph into one.""" 899 if self.__class__ is not klass: 900 self.__class__ = klass 901 self._morph() 902 self.clear()
903 904 # The following methods can get called before the Taskmaster has 905 # had a chance to call disambiguate() directly to see if this Entry 906 # should really be a Dir or a File. We therefore use these to call 907 # disambiguate() transparently (from our caller's point of view). 908 # 909 # Right now, this minimal set of methods has been derived by just 910 # looking at some of the methods that will obviously be called early 911 # in any of the various Taskmasters' calling sequences, and then 912 # empirically figuring out which additional methods are necessary 913 # to make various tests pass. 914
915 - def exists(self):
916 """Return if the Entry exists. Check the file system to see 917 what we should turn into first. Assume a file if there's no 918 directory.""" 919 return self.disambiguate().exists()
920
921 - def rel_path(self, other):
922 d = self.disambiguate() 923 if d.__class__ is Entry: 924 raise "rel_path() could not disambiguate File/Dir" 925 return d.rel_path(other)
926
927 - def new_ninfo(self):
928 return self.disambiguate().new_ninfo()
929
930 - def changed_since_last_build(self, target, prev_ni):
931 return self.disambiguate().changed_since_last_build(target, prev_ni)
932
933 - def _glob1(self, pattern, ondisk=True, source=False, strings=False):
934 return self.disambiguate()._glob1(pattern, ondisk, source, strings)
935 936 # This is for later so we can differentiate between Entry the class and Entry 937 # the method of the FS class. 938 _classEntry = Entry 939 940
941 -class LocalFS:
942 943 if SCons.Memoize.use_memoizer: 944 __metaclass__ = SCons.Memoize.Memoized_Metaclass 945 946 # This class implements an abstraction layer for operations involving 947 # a local file system. Essentially, this wraps any function in 948 # the os, os.path or shutil modules that we use to actually go do 949 # anything with or to the local file system. 950 # 951 # Note that there's a very good chance we'll refactor this part of 952 # the architecture in some way as we really implement the interface(s) 953 # for remote file system Nodes. For example, the right architecture 954 # might be to have this be a subclass instead of a base class. 955 # Nevertheless, we're using this as a first step in that direction. 956 # 957 # We're not using chdir() yet because the calling subclass method 958 # needs to use os.chdir() directly to avoid recursion. Will we 959 # really need this one? 960 #def chdir(self, path): 961 # return os.chdir(path)
962 - def chmod(self, path, mode):
963 return os.chmod(path, mode)
964 - def copy(self, src, dst):
965 return shutil.copy(src, dst)
966 - def copy2(self, src, dst):
967 return shutil.copy2(src, dst)
968 - def exists(self, path):
969 return os.path.exists(path)
970 - def getmtime(self, path):
971 return os.path.getmtime(path)
972 - def getsize(self, path):
973 return os.path.getsize(path)
974 - def isdir(self, path):
975 return os.path.isdir(path)
976 - def isfile(self, path):
977 return os.path.isfile(path)
980 - def lstat(self, path):
981 return os.lstat(path)
982 - def listdir(self, path):
983 return os.listdir(path)
984 - def makedirs(self, path):
985 return os.makedirs(path)
986 - def mkdir(self, path):
987 return os.mkdir(path)
988 - def rename(self, old, new):
989 return os.rename(old, new)
990 - def stat(self, path):
991 return os.stat(path)
994 - def open(self, path):
995 return open(path)
998 999 if hasattr(os, 'symlink'): 1002 else: 1005 1006 if hasattr(os, 'readlink'): 1009 else:
1012 1013 1014 #class RemoteFS: 1015 # # Skeleton for the obvious methods we might need from the 1016 # # abstraction layer for a remote filesystem. 1017 # def upload(self, local_src, remote_dst): 1018 # pass 1019 # def download(self, remote_src, local_dst): 1020 # pass 1021 1022
1023 -class FS(LocalFS):
1024 1025 memoizer_counters = [] 1026
1027 - def __init__(self, path = None):
1028 """Initialize the Node.FS subsystem. 1029 1030 The supplied path is the top of the source tree, where we 1031 expect to find the top-level build file. If no path is 1032 supplied, the current directory is the default. 1033 1034 The path argument must be a valid absolute path. 1035 """ 1036 if __debug__: logInstanceCreation(self, 'Node.FS') 1037 1038 self._memo = {} 1039 1040 self.Root = {} 1041 self.SConstruct_dir = None 1042 self.max_drift = default_max_drift 1043 1044 self.Top = None 1045 if path is None: 1046 self.pathTop = os.getcwd() 1047 else: 1048 self.pathTop = path 1049 self.defaultDrive = _my_normcase(os.path.splitdrive(self.pathTop)[0]) 1050 1051 self.Top = self.Dir(self.pathTop) 1052 self.Top.path = '.' 1053 self.Top.tpath = '.' 1054 self._cwd = self.Top 1055 1056 DirNodeInfo.fs = self 1057 FileNodeInfo.fs = self
1058
1059 - def set_SConstruct_dir(self, dir):
1060 self.SConstruct_dir = dir
1061
1062 - def get_max_drift(self):
1063 return self.max_drift
1064
1065 - def set_max_drift(self, max_drift):
1066 self.max_drift = max_drift
1067
1068 - def getcwd(self):
1069 return self._cwd
1070
1071 - def chdir(self, dir, change_os_dir=0):
1072 """Change the current working directory for lookups. 1073 If change_os_dir is true, we will also change the "real" cwd 1074 to match. 1075 """ 1076 curr=self._cwd 1077 try: 1078 if dir is not None: 1079 self._cwd = dir 1080 if change_os_dir: 1081 os.chdir(dir.abspath) 1082 except OSError: 1083 self._cwd = curr 1084 raise
1085
1086 - def get_root(self, drive):
1087 """ 1088 Returns the root directory for the specified drive, creating 1089 it if necessary. 1090 """ 1091 drive = _my_normcase(drive) 1092 try: 1093 return self.Root[drive] 1094 except KeyError: 1095 root = RootDir(drive, self) 1096 self.Root[drive] = root 1097 if not drive: 1098 self.Root[self.defaultDrive] = root 1099 elif drive == self.defaultDrive: 1100 self.Root[''] = root 1101 return root
1102
1103 - def _lookup(self, p, directory, fsclass, create=1):
1104 """ 1105 The generic entry point for Node lookup with user-supplied data. 1106 1107 This translates arbitrary input into a canonical Node.FS object 1108 of the specified fsclass. The general approach for strings is 1109 to turn it into a fully normalized absolute path and then call 1110 the root directory's lookup_abs() method for the heavy lifting. 1111 1112 If the path name begins with '#', it is unconditionally 1113 interpreted relative to the top-level directory of this FS. '#' 1114 is treated as a synonym for the top-level SConstruct directory, 1115 much like '~' is treated as a synonym for the user's home 1116 directory in a UNIX shell. So both '#foo' and '#/foo' refer 1117 to the 'foo' subdirectory underneath the top-level SConstruct 1118 directory. 1119 1120 If the path name is relative, then the path is looked up relative 1121 to the specified directory, or the current directory (self._cwd, 1122 typically the SConscript directory) if the specified directory 1123 is None. 1124 """ 1125 if isinstance(p, Base): 1126 # It's already a Node.FS object. Make sure it's the right 1127 # class and return. 1128 p.must_be_same(fsclass) 1129 return p 1130 # str(p) in case it's something like a proxy object 1131 p = str(p) 1132 1133 initial_hash = (p[0:1] == '#') 1134 if initial_hash: 1135 # There was an initial '#', so we strip it and override 1136 # whatever directory they may have specified with the 1137 # top-level SConstruct directory. 1138 p = p[1:] 1139 directory = self.Top 1140 1141 if directory and not isinstance(directory, Dir): 1142 directory = self.Dir(directory) 1143 1144 if do_splitdrive: 1145 drive, p = os.path.splitdrive(p) 1146 else: 1147 drive = '' 1148 if drive and not p: 1149 # This causes a naked drive letter to be treated as a synonym 1150 # for the root directory on that drive. 1151 p = os.sep 1152 absolute = os.path.isabs(p) 1153 1154 needs_normpath = needs_normpath_check.match(p) 1155 1156 if initial_hash or not absolute: 1157 # This is a relative lookup, either to the top-level 1158 # SConstruct directory (because of the initial '#') or to 1159 # the current directory (the path name is not absolute). 1160 # Add the string to the appropriate directory lookup path, 1161 # after which the whole thing gets normalized. 1162 if not directory: 1163 directory = self._cwd 1164 if p: 1165 p = directory.labspath + '/' + p 1166 else: 1167 p = directory.labspath 1168 1169 if needs_normpath: 1170 p = os.path.normpath(p) 1171 1172 if drive or absolute: 1173 root = self.get_root(drive) 1174 else: 1175 if not directory: 1176 directory = self._cwd 1177 root = directory.root 1178 1179 if os.sep != '/': 1180 p = string.replace(p, os.sep, '/') 1181 return root._lookup_abs(p, fsclass, create)
1182
1183 - def Entry(self, name, directory = None, create = 1):
1184 """Look up or create a generic Entry node with the specified name. 1185 If the name is a relative path (begins with ./, ../, or a file 1186 name), then it is looked up relative to the supplied directory 1187 node, or to the top level directory of the FS (supplied at 1188 construction time) if no directory is supplied. 1189 """ 1190 return self._lookup(name, directory, Entry, create)
1191
1192 - def File(self, name, directory = None, create = 1):
1193 """Look up or create a File node with the specified name. If 1194 the name is a relative path (begins with ./, ../, or a file name), 1195 then it is looked up relative to the supplied directory node, 1196 or to the top level directory of the FS (supplied at construction 1197 time) if no directory is supplied. 1198 1199 This method will raise TypeError if a directory is found at the 1200 specified path. 1201 """ 1202 return self._lookup(name, directory, File, create)
1203
1204 - def Dir(self, name, directory = None, create = True):
1205 """Look up or create a Dir node with the specified name. If 1206 the name is a relative path (begins with ./, ../, or a file name), 1207 then it is looked up relative to the supplied directory node, 1208 or to the top level directory of the FS (supplied at construction 1209 time) if no directory is supplied. 1210 1211 This method will raise TypeError if a normal file is found at the 1212 specified path. 1213 """ 1214 return self._lookup(name, directory, Dir, create)
1215
1216 - def VariantDir(self, variant_dir, src_dir, duplicate=1):
1217 """Link the supplied variant directory to the source directory 1218 for purposes of building files.""" 1219 1220 if not isinstance(src_dir, SCons.Node.Node): 1221 src_dir = self.Dir(src_dir) 1222 if not isinstance(variant_dir, SCons.Node.Node): 1223 variant_dir = self.Dir(variant_dir) 1224 if src_dir.is_under(variant_dir): 1225 raise SCons.Errors.UserError, "Source directory cannot be under variant directory." 1226 if variant_dir.srcdir: 1227 if variant_dir.srcdir == src_dir: 1228 return # We already did this. 1229 raise SCons.Errors.UserError, "'%s' already has a source directory: '%s'."%(variant_dir, variant_dir.srcdir) 1230 variant_dir.link(src_dir, duplicate)
1231
1232 - def Repository(self, *dirs):
1233 """Specify Repository directories to search.""" 1234 for d in dirs: 1235 if not isinstance(d, SCons.Node.Node): 1236 d = self.Dir(d) 1237 self.Top.addRepository(d)
1238
1239 - def variant_dir_target_climb(self, orig, dir, tail):
1240 """Create targets in corresponding variant directories 1241 1242 Climb the directory tree, and look up path names 1243 relative to any linked variant directories we find. 1244 1245 Even though this loops and walks up the tree, we don't memoize 1246 the return value because this is really only used to process 1247 the command-line targets. 1248 """ 1249 targets = [] 1250 message = None 1251 fmt = "building associated VariantDir targets: %s" 1252 start_dir = dir 1253 while dir: 1254 for bd in dir.variant_dirs: 1255 if start_dir.is_under(bd): 1256 # If already in the build-dir location, don't reflect 1257 return [orig], fmt % str(orig) 1258 p = apply(os.path.join, [bd.path] + tail) 1259 targets.append(self.Entry(p)) 1260 tail = [dir.name] + tail 1261 dir = dir.up() 1262 if targets: 1263 message = fmt % string.join(map(str, targets)) 1264 return targets, message
1265
1266 - def Glob(self, pathname, ondisk=True, source=True, strings=False, cwd=None):
1267 """ 1268 Globs 1269 1270 This is mainly a shim layer 1271 """ 1272 if cwd is None: 1273 cwd = self.getcwd() 1274 return cwd.glob(pathname, ondisk, source, strings)
1275
1276 -class DirNodeInfo(SCons.Node.NodeInfoBase):
1277 # This should get reset by the FS initialization. 1278 current_version_id = 1 1279 1280 fs = None 1281
1282 - def str_to_node(self, s):
1283 top = self.fs.Top 1284 root = top.root 1285 if do_splitdrive: 1286 drive, s = os.path.splitdrive(s) 1287 if drive: 1288 root = self.fs.get_root(drive) 1289 if not os.path.isabs(s): 1290 s = top.labspath + '/' + s 1291 return root._lookup_abs(s, Entry)
1292
1293 -class DirBuildInfo(SCons.Node.BuildInfoBase):
1294 current_version_id = 1
1295 1296 glob_magic_check = re.compile('[*?[]') 1297
1298 -def has_glob_magic(s):
1299 return glob_magic_check.search(s) is not None
1300
1301 -class Dir(Base):
1302 """A class for directories in a file system. 1303 """ 1304 1305 memoizer_counters = [] 1306 1307 NodeInfo = DirNodeInfo 1308 BuildInfo = DirBuildInfo 1309
1310 - def __init__(self, name, directory, fs):
1311 if __debug__: logInstanceCreation(self, 'Node.FS.Dir') 1312 Base.__init__(self, name, directory, fs) 1313 self._morph()
1314
1315 - def _morph(self):
1316 """Turn a file system Node (either a freshly initialized directory 1317 object or a separate Entry object) into a proper directory object. 1318 1319 Set up this directory's entries and hook it into the file 1320 system tree. Specify that directories (this Node) don't use 1321 signatures for calculating whether they're current. 1322 """ 1323 1324 self.repositories = [] 1325 self.srcdir = None 1326 1327 self.entries = {} 1328 self.entries['.'] = self 1329 self.entries['..'] = self.dir 1330 self.cwd = self 1331 self.searched = 0 1332 self._sconsign = None 1333 self.variant_dirs = [] 1334 self.root = self.dir.root 1335 1336 # Don't just reset the executor, replace its action list, 1337 # because it might have some pre-or post-actions that need to 1338 # be preserved. 1339 self.builder = get_MkdirBuilder() 1340 self.get_executor().set_action_list(self.builder.action)
1341
1342 - def diskcheck_match(self):
1343 diskcheck_match(self, self.isfile, 1344 "File %s found where directory expected.")
1345
1346 - def __clearRepositoryCache(self, duplicate=None):
1347 """Called when we change the repository(ies) for a directory. 1348 This clears any cached information that is invalidated by changing 1349 the repository.""" 1350 1351 for node in self.entries.values(): 1352 if node != self.dir: 1353 if node != self and isinstance(node, Dir): 1354 node.__clearRepositoryCache(duplicate) 1355 else: 1356 node.clear() 1357 try: 1358 del node._srcreps 1359 except AttributeError: 1360 pass 1361 if duplicate is not None: 1362 node.duplicate=duplicate
1363
1364 - def __resetDuplicate(self, node):
1365 if node != self: 1366 node.duplicate = node.get_dir().duplicate
1367
1368 - def Entry(self, name):
1369 """ 1370 Looks up or creates an entry node named 'name' relative to 1371 this directory. 1372 """ 1373 return self.fs.Entry(name, self)
1374
1375 - def Dir(self, name, create=True):
1376 """ 1377 Looks up or creates a directory node named 'name' relative to 1378 this directory. 1379 """ 1380 return self.fs.Dir(name, self, create)
1381
1382 - def File(self, name):
1383 """ 1384 Looks up or creates a file node named 'name' relative to 1385 this directory. 1386 """ 1387 return self.fs.File(name, self)
1388
1389 - def _lookup_rel(self, name, klass, create=1):
1390 """ 1391 Looks up a *normalized* relative path name, relative to this 1392 directory. 1393 1394 This method is intended for use by internal lookups with 1395 already-normalized path data. For general-purpose lookups, 1396 use the Entry(), Dir() and File() methods above. 1397 1398 This method does *no* input checking and will die or give 1399 incorrect results if it's passed a non-normalized path name (e.g., 1400 a path containing '..'), an absolute path name, a top-relative 1401 ('#foo') path name, or any kind of object. 1402 """ 1403 name = self.entry_labspath(name) 1404 return self.root._lookup_abs(name, klass, create)
1405 1413
1414 - def getRepositories(self):
1415 """Returns a list of repositories for this directory. 1416 """ 1417 if self.srcdir and not self.duplicate: 1418 return self.srcdir.get_all_rdirs() + self.repositories 1419 return self.repositories
1420 1421 memoizer_counters.append(SCons.Memoize.CountValue('get_all_rdirs')) 1422
1423 - def get_all_rdirs(self):
1424 try: 1425 return list(self._memo['get_all_rdirs']) 1426 except KeyError: 1427 pass 1428 1429 result = [self] 1430 fname = '.' 1431 dir = self 1432 while dir: 1433 for rep in dir.getRepositories(): 1434 result.append(rep.Dir(fname)) 1435 if fname == '.': 1436 fname = dir.name 1437 else: 1438 fname = dir.name + os.sep + fname 1439 dir = dir.up() 1440 1441 self._memo['get_all_rdirs'] = list(result) 1442 1443 return result
1444
1445 - def addRepository(self, dir):
1446 if dir != self and not dir in self.repositories: 1447 self.repositories.append(dir) 1448 dir.tpath = '.' 1449 self.__clearRepositoryCache()
1450
1451 - def up(self):
1452 return self.entries['..']
1453
1454 - def _rel_path_key(self, other):
1455 return str(other)
1456 1457 memoizer_counters.append(SCons.Memoize.CountDict('rel_path', _rel_path_key)) 1458
1459 - def rel_path(self, other):
1460 """Return a path to "other" relative to this directory. 1461 """ 1462 1463 # This complicated and expensive method, which constructs relative 1464 # paths between arbitrary Node.FS objects, is no longer used 1465 # by SCons itself. It was introduced to store dependency paths 1466 # in .sconsign files relative to the target, but that ended up 1467 # being significantly inefficient. 1468 # 1469 # We're continuing to support the method because some SConstruct 1470 # files out there started using it when it was available, and 1471 # we're all about backwards compatibility.. 1472 1473 try: 1474 memo_dict = self._memo['rel_path'] 1475 except KeyError: 1476 memo_dict = {} 1477 self._memo['rel_path'] = memo_dict 1478 else: 1479 try: 1480 return memo_dict[other] 1481 except KeyError: 1482 pass 1483 1484 if self is other: 1485 result = '.' 1486 1487 elif not other in self.path_elements: 1488 try: 1489 other_dir = other.get_dir() 1490 except AttributeError: 1491 result = str(other) 1492 else: 1493 if other_dir is None: 1494 result = other.name 1495 else: 1496 dir_rel_path = self.rel_path(other_dir) 1497 if dir_rel_path == '.': 1498 result = other.name 1499 else: 1500 result = dir_rel_path + os.sep + other.name 1501 else: 1502 i = self.path_elements.index(other) + 1 1503 1504 path_elems = ['..'] * (len(self.path_elements) - i) \ 1505 + map(lambda n: n.name, other.path_elements[i:]) 1506 1507 result = string.join(path_elems, os.sep) 1508 1509 memo_dict[other] = result 1510 1511 return result
1512
1513 - def get_env_scanner(self, env, kw={}):
1514 import SCons.Defaults 1515 return SCons.Defaults.DirEntryScanner
1516
1517 - def get_target_scanner(self):
1518 import SCons.Defaults 1519 return SCons.Defaults.DirEntryScanner
1520
1521 - def get_found_includes(self, env, scanner, path):
1522 """Return this directory's implicit dependencies. 1523 1524 We don't bother caching the results because the scan typically 1525 shouldn't be requested more than once (as opposed to scanning 1526 .h file contents, which can be requested as many times as the 1527 files is #included by other files). 1528 """ 1529 if not scanner: 1530 return [] 1531 # Clear cached info for this Dir. If we already visited this 1532 # directory on our walk down the tree (because we didn't know at 1533 # that point it was being used as the source for another Node) 1534 # then we may have calculated build signature before realizing 1535 # we had to scan the disk. Now that we have to, though, we need 1536 # to invalidate the old calculated signature so that any node 1537 # dependent on our directory structure gets one that includes 1538 # info about everything on disk. 1539 self.clear() 1540 return scanner(self, env, path)
1541 1542 # 1543 # Taskmaster interface subsystem 1544 # 1545
1546 - def prepare(self):
1547 pass
1548
1549 - def build(self, **kw):
1550 """A null "builder" for directories.""" 1551 global MkdirBuilder 1552 if self.builder is not MkdirBuilder: 1553 apply(SCons.Node.Node.build, [self,], kw)
1554 1555 # 1556 # 1557 # 1558
1559 - def _create(self):
1560 """Create this directory, silently and without worrying about 1561 whether the builder is the default or not.""" 1562 listDirs = [] 1563 parent = self 1564 while parent: 1565 if parent.exists(): 1566 break 1567 listDirs.append(parent) 1568 parent = parent.up() 1569 else: 1570 raise SCons.Errors.StopError, parent.path 1571 listDirs.reverse() 1572 for dirnode in listDirs: 1573 try: 1574 # Don't call dirnode.build(), call the base Node method 1575 # directly because we definitely *must* create this 1576 # directory. The dirnode.build() method will suppress 1577 # the build if it's the default builder. 1578 SCons.Node.Node.build(dirnode) 1579 dirnode.get_executor().nullify() 1580 # The build() action may or may not have actually 1581 # created the directory, depending on whether the -n 1582 # option was used or not. Delete the _exists and 1583 # _rexists attributes so they can be reevaluated. 1584 dirnode.clear() 1585 except OSError: 1586 pass
1587
1589 global MkdirBuilder 1590 return self.builder is not MkdirBuilder and self.has_builder()
1591
1592 - def alter_targets(self):
1593 """Return any corresponding targets in a variant directory. 1594 """ 1595 return self.fs.variant_dir_target_climb(self, self, [])
1596
1597 - def scanner_key(self):
1598 """A directory does not get scanned.""" 1599 return None
1600
1601 - def get_contents(self):
1602 """Return content signatures and names of all our children 1603 separated by new-lines. Ensure that the nodes are sorted.""" 1604 contents = [] 1605 name_cmp = lambda a, b: cmp(a.name, b.name) 1606 sorted_children = self.children()[:] 1607 sorted_children.sort(name_cmp) 1608 for node in sorted_children: 1609 contents.append('%s %s\n' % (node.get_csig(), node.name)) 1610 return string.join(contents, '')
1611
1612 - def get_csig(self):
1613 """Compute the content signature for Directory nodes. In 1614 general, this is not needed and the content signature is not 1615 stored in the DirNodeInfo. However, if get_contents on a Dir 1616 node is called which has a child directory, the child 1617 directory should return the hash of its contents.""" 1618 contents = self.get_contents() 1619 return SCons.Util.MD5signature(contents)
1620
1621 - def do_duplicate(self, src):
1622 pass
1623 1624 changed_since_last_build = SCons.Node.Node.state_has_changed 1625
1626 - def is_up_to_date(self):
1627 """If any child is not up-to-date, then this directory isn't, 1628 either.""" 1629 if self.builder is not MkdirBuilder and not self.exists(): 1630 return 0 1631 up_to_date = SCons.Node.up_to_date 1632 for kid in self.children(): 1633 if kid.get_state() > up_to_date: 1634 return 0 1635 return 1
1636
1637 - def rdir(self):
1638 if not self.exists(): 1639 norm_name = _my_normcase(self.name) 1640 for dir in self.dir.get_all_rdirs(): 1641 try: node = dir.entries[norm_name] 1642 except KeyError: node = dir.dir_on_disk(self.name) 1643 if node and node.exists() and \ 1644 (isinstance(dir, Dir) or isinstance(dir, Entry)): 1645 return node 1646 return self
1647
1648 - def sconsign(self):
1649 """Return the .sconsign file info for this directory, 1650 creating it first if necessary.""" 1651 if not self._sconsign: 1652 import SCons.SConsign 1653 self._sconsign = SCons.SConsign.ForDirectory(self) 1654 return self._sconsign
1655
1656 - def srcnode(self):
1657 """Dir has a special need for srcnode()...if we 1658 have a srcdir attribute set, then that *is* our srcnode.""" 1659 if self.srcdir: 1660 return self.srcdir 1661 return Base.srcnode(self)
1662
1663 - def get_timestamp(self):
1664 """Return the latest timestamp from among our children""" 1665 stamp = 0 1666 for kid in self.children(): 1667 if kid.get_timestamp() > stamp: 1668 stamp = kid.get_timestamp() 1669 return stamp
1670
1671 - def entry_abspath(self, name):
1672 return self.abspath + os.sep + name
1673
1674 - def entry_labspath(self, name):
1675 return self.labspath + '/' + name
1676
1677 - def entry_path(self, name):
1678 return self.path + os.sep + name
1679
1680 - def entry_tpath(self, name):
1681 return self.tpath + os.sep + name
1682
1683 - def entry_exists_on_disk(self, name):
1684 try: 1685 d = self.on_disk_entries 1686 except AttributeError: 1687 d = {} 1688 try: 1689 entries = os.listdir(self.abspath) 1690 except OSError: 1691 pass 1692 else: 1693 for entry in map(_my_normcase, entries): 1694 d[entry] = 1 1695 self.on_disk_entries = d 1696 return d.has_key(_my_normcase(name))
1697 1698 memoizer_counters.append(SCons.Memoize.CountValue('srcdir_list')) 1699
1700 - def srcdir_list(self):
1701 try: 1702 return self._memo['srcdir_list'] 1703 except KeyError: 1704 pass 1705 1706 result = [] 1707 1708 dirname = '.' 1709 dir = self 1710 while dir: 1711 if dir.srcdir: 1712 result.append(dir.srcdir.Dir(dirname)) 1713 dirname = dir.name + os.sep + dirname 1714 dir = dir.up() 1715 1716 self._memo['srcdir_list'] = result 1717 1718 return result
1719
1720 - def srcdir_duplicate(self, name):
1721 for dir in self.srcdir_list(): 1722 if self.is_under(dir): 1723 # We shouldn't source from something in the build path; 1724 # variant_dir is probably under src_dir, in which case 1725 # we are reflecting. 1726 break 1727 if dir.entry_exists_on_disk(name): 1728 srcnode = dir.Entry(name).disambiguate() 1729 if self.duplicate: 1730 node = self.Entry(name).disambiguate() 1731 node.do_duplicate(srcnode) 1732 return node 1733 else: 1734 return srcnode 1735 return None
1736
1737 - def _srcdir_find_file_key(self, filename):
1738 return filename
1739 1740 memoizer_counters.append(SCons.Memoize.CountDict('srcdir_find_file', _srcdir_find_file_key)) 1741
1742 - def srcdir_find_file(self, filename):
1743 try: 1744 memo_dict = self._memo['srcdir_find_file'] 1745 except KeyError: 1746 memo_dict = {} 1747 self._memo['srcdir_find_file'] = memo_dict 1748 else: 1749 try: 1750 return memo_dict[filename] 1751 except KeyError: 1752 pass 1753 1754 def func(node): 1755 if (isinstance(node, File) or isinstance(node, Entry)) and \ 1756 (node.is_derived() or node.exists()): 1757 return node 1758 return None
1759 1760 norm_name = _my_normcase(filename) 1761 1762 for rdir in self.get_all_rdirs(): 1763 try: node = rdir.entries[norm_name] 1764 except KeyError: node = rdir.file_on_disk(filename) 1765 else: node = func(node) 1766 if node: 1767 result = (node, self) 1768 memo_dict[filename] = result 1769 return result 1770 1771 for srcdir in self.srcdir_list(): 1772 for rdir in srcdir.get_all_rdirs(): 1773 try: node = rdir.entries[norm_name] 1774 except KeyError: node = rdir.file_on_disk(filename) 1775 else: node = func(node) 1776 if node: 1777 result = (File(filename, self, self.fs), srcdir) 1778 memo_dict[filename] = result 1779 return result 1780 1781 result = (None, None) 1782 memo_dict[filename] = result 1783 return result
1784
1785 - def dir_on_disk(self, name):
1786 if self.entry_exists_on_disk(name): 1787 try: return self.Dir(name) 1788 except TypeError: pass 1789 node = self.srcdir_duplicate(name) 1790 if isinstance(node, File): 1791 return None 1792 return node
1793
1794 - def file_on_disk(self, name):
1795 if self.entry_exists_on_disk(name) or \ 1796 diskcheck_rcs(self, name) or \ 1797 diskcheck_sccs(self, name): 1798 try: return self.File(name) 1799 except TypeError: pass 1800 node = self.srcdir_duplicate(name) 1801 if isinstance(node, Dir): 1802 return None 1803 return node
1804
1805 - def walk(self, func, arg):
1806 """ 1807 Walk this directory tree by calling the specified function 1808 for each directory in the tree. 1809 1810 This behaves like the os.path.walk() function, but for in-memory 1811 Node.FS.Dir objects. The function takes the same arguments as 1812 the functions passed to os.path.walk(): 1813 1814 func(arg, dirname, fnames) 1815 1816 Except that "dirname" will actually be the directory *Node*, 1817 not the string. The '.' and '..' entries are excluded from 1818 fnames. The fnames list may be modified in-place to filter the 1819 subdirectories visited or otherwise impose a specific order. 1820 The "arg" argument is always passed to func() and may be used 1821 in any way (or ignored, passing None is common). 1822 """ 1823 entries = self.entries 1824 names = entries.keys() 1825 names.remove('.') 1826 names.remove('..') 1827 func(arg, self, names) 1828 select_dirs = lambda n, e=entries: isinstance(e[n], Dir) 1829 for dirname in filter(select_dirs, names): 1830 entries[dirname].walk(func, arg)
1831
1832 - def glob(self, pathname, ondisk=True, source=False, strings=False):
1833 """ 1834 Returns a list of Nodes (or strings) matching a specified 1835 pathname pattern. 1836 1837 Pathname patterns follow UNIX shell semantics: * matches 1838 any-length strings of any characters, ? matches any character, 1839 and [] can enclose lists or ranges of characters. Matches do 1840 not span directory separators. 1841 1842 The matches take into account Repositories, returning local 1843 Nodes if a corresponding entry exists in a Repository (either 1844 an in-memory Node or something on disk). 1845 1846 By defafult, the glob() function matches entries that exist 1847 on-disk, in addition to in-memory Nodes. Setting the "ondisk" 1848 argument to False (or some other non-true value) causes the glob() 1849 function to only match in-memory Nodes. The default behavior is 1850 to return both the on-disk and in-memory Nodes. 1851 1852 The "source" argument, when true, specifies that corresponding 1853 source Nodes must be returned if you're globbing in a build 1854 directory (initialized with VariantDir()). The default behavior 1855 is to return Nodes local to the VariantDir(). 1856 1857 The "strings" argument, when true, returns the matches as strings, 1858 not Nodes. The strings are path names relative to this directory. 1859 1860 The underlying algorithm is adapted from the glob.glob() function 1861 in the Python library (but heavily modified), and uses fnmatch() 1862 under the covers. 1863 """ 1864 dirname, basename = os.path.split(pathname) 1865 if not dirname: 1866 return self._glob1(basename, ondisk, source, strings) 1867 if has_glob_magic(dirname): 1868 list = self.glob(dirname, ondisk, source, strings=False) 1869 else: 1870 list = [self.Dir(dirname, create=True)] 1871 result = [] 1872 for dir in list: 1873 r = dir._glob1(basename, ondisk, source, strings) 1874 if strings: 1875 r = map(lambda x, d=str(dir): os.path.join(d, x), r) 1876 result.extend(r) 1877 result.sort(lambda a, b: cmp(str(a), str(b))) 1878 return result
1879
1880 - def _glob1(self, pattern, ondisk=True, source=False, strings=False):
1881 """ 1882 Globs for and returns a list of entry names matching a single 1883 pattern in this directory. 1884 1885 This searches any repositories and source directories for 1886 corresponding entries and returns a Node (or string) relative 1887 to the current directory if an entry is found anywhere. 1888 1889 TODO: handle pattern with no wildcard 1890 """ 1891 search_dir_list = self.get_all_rdirs() 1892 for srcdir in self.srcdir_list(): 1893 search_dir_list.extend(srcdir.get_all_rdirs()) 1894 1895 selfEntry = self.Entry 1896 names = [] 1897 for dir in search_dir_list: 1898 # We use the .name attribute from the Node because the keys of 1899 # the dir.entries dictionary are normalized (that is, all upper 1900 # case) on case-insensitive systems like Windows. 1901 #node_names = [ v.name for k, v in dir.entries.items() if k not in ('.', '..') ] 1902 entry_names = filter(lambda n: n not in ('.', '..'), dir.entries.keys()) 1903 node_names = map(lambda n, e=dir.entries: e[n].name, entry_names) 1904 names.extend(node_names) 1905 if not strings: 1906 # Make sure the working directory (self) actually has 1907 # entries for all Nodes in repositories or variant dirs. 1908 map(selfEntry, node_names) 1909 if ondisk: 1910 try: 1911 disk_names = os.listdir(dir.abspath) 1912 except os.error: 1913 continue 1914 names.extend(disk_names) 1915 if not strings: 1916 # We're going to return corresponding Nodes in 1917 # the local directory, so we need to make sure 1918 # those Nodes exist. We only want to create 1919 # Nodes for the entries that will match the 1920 # specified pattern, though, which means we 1921 # need to filter the list here, even though 1922 # the overall list will also be filtered later, 1923 # after we exit this loop. 1924 if pattern[0] != '.': 1925 #disk_names = [ d for d in disk_names if d[0] != '.' ] 1926 disk_names = filter(lambda x: x[0] != '.', disk_names) 1927 disk_names = fnmatch.filter(disk_names, pattern) 1928 dirEntry = dir.Entry 1929 for name in disk_names: 1930 # Add './' before disk filename so that '#' at 1931 # beginning of filename isn't interpreted. 1932 name = './' + name 1933 node = dirEntry(name).disambiguate() 1934 n = selfEntry(name) 1935 if n.__class__ != node.__class__: 1936 n.__class__ = node.__class__ 1937 n._morph() 1938 1939 names = set(names) 1940 if pattern[0] != '.': 1941 #names = [ n for n in names if n[0] != '.' ] 1942 names = filter(lambda x: x[0] != '.', names) 1943 names = fnmatch.filter(names, pattern) 1944 1945 if strings: 1946 return names 1947 1948 #return [ self.entries[_my_normcase(n)] for n in names ] 1949 return map(lambda n, e=self.entries: e[_my_normcase(n)], names)
1950
1951 -class RootDir(Dir):
1952 """A class for the root directory of a file system. 1953 1954 This is the same as a Dir class, except that the path separator 1955 ('/' or '\\') is actually part of the name, so we don't need to 1956 add a separator when creating the path names of entries within 1957 this directory. 1958 """
1959 - def __init__(self, name, fs):
1960 if __debug__: logInstanceCreation(self, 'Node.FS.RootDir') 1961 # We're going to be our own parent directory (".." entry and .dir 1962 # attribute) so we have to set up some values so Base.__init__() 1963 # won't gag won't it calls some of our methods. 1964 self.abspath = '' 1965 self.labspath = '' 1966 self.path = '' 1967 self.tpath = '' 1968 self.path_elements = [] 1969 self.duplicate = 0 1970 self.root = self 1971 Base.__init__(self, name, self, fs) 1972 1973 # Now set our paths to what we really want them to be: the 1974 # initial drive letter (the name) plus the directory separator, 1975 # except for the "lookup abspath," which does not have the 1976 # drive letter. 1977 self.abspath = name + os.sep 1978 self.labspath = '' 1979 self.path = name + os.sep 1980 self.tpath = name + os.sep 1981 self._morph() 1982 1983 self._lookupDict = {} 1984 1985 # The // and os.sep + os.sep entries are necessary because 1986 # os.path.normpath() seems to preserve double slashes at the 1987 # beginning of a path (presumably for UNC path names), but 1988 # collapses triple slashes to a single slash. 1989 self._lookupDict[''] = self 1990 self._lookupDict['/'] = self 1991 self._lookupDict['//'] = self 1992 self._lookupDict[os.sep] = self 1993 self._lookupDict[os.sep + os.sep] = self
1994
1995 - def must_be_same(self, klass):
1996 if klass is Dir: 1997 return 1998 Base.must_be_same(self, klass)
1999
2000 - def _lookup_abs(self, p, klass, create=1):
2001 """ 2002 Fast (?) lookup of a *normalized* absolute path. 2003 2004 This method is intended for use by internal lookups with 2005 already-normalized path data. For general-purpose lookups, 2006 use the FS.Entry(), FS.Dir() or FS.File() methods. 2007 2008 The caller is responsible for making sure we're passed a 2009 normalized absolute path; we merely let Python's dictionary look 2010 up and return the One True Node.FS object for the path. 2011 2012 If no Node for the specified "p" doesn't already exist, and 2013 "create" is specified, the Node may be created after recursive 2014 invocation to find or create the parent directory or directories. 2015 """ 2016 k = _my_normcase(p) 2017 try: 2018 result = self._lookupDict[k] 2019 except KeyError: 2020 if not create: 2021 raise SCons.Errors.UserError 2022 # There is no Node for this path name, and we're allowed 2023 # to create it. 2024 dir_name, file_name = os.path.split(p) 2025 dir_node = self._lookup_abs(dir_name, Dir) 2026 result = klass(file_name, dir_node, self.fs) 2027 2028 # Double-check on disk (as configured) that the Node we 2029 # created matches whatever is out there in the real world. 2030 result.diskcheck_match() 2031 2032 self._lookupDict[k] = result 2033 dir_node.entries[_my_normcase(file_name)] = result 2034 dir_node.implicit = None 2035 else: 2036 # There is already a Node for this path name. Allow it to 2037 # complain if we were looking for an inappropriate type. 2038 result.must_be_same(klass) 2039 return result
2040
2041 - def __str__(self):
2042 return self.abspath
2043
2044 - def entry_abspath(self, name):
2045 return self.abspath + name
2046
2047 - def entry_labspath(self, name):
2048 return '/' + name
2049
2050 - def entry_path(self, name):
2051 return self.path + name
2052
2053 - def entry_tpath(self, name):
2054 return self.tpath + name
2055
2056 - def is_under(self, dir):
2057 if self is dir: 2058 return 1 2059 else: 2060 return 0
2061
2062 - def up(self):
2063 return None
2064
2065 - def get_dir(self):
2066 return None
2067
2068 - def src_builder(self):
2069 return _null
2070
2071 -class FileNodeInfo(SCons.Node.NodeInfoBase):
2072 current_version_id = 1 2073 2074 field_list = ['csig', 'timestamp', 'size'] 2075 2076 # This should get reset by the FS initialization. 2077 fs = None 2078
2079 - def str_to_node(self, s):
2080 top = self.fs.Top 2081 root = top.root 2082 if do_splitdrive: 2083 drive, s = os.path.splitdrive(s) 2084 if drive: 2085 root = self.fs.get_root(drive) 2086 if not os.path.isabs(s): 2087 s = top.labspath + '/' + s 2088 return root._lookup_abs(s, Entry)
2089
2090 -class FileBuildInfo(SCons.Node.BuildInfoBase):
2091 current_version_id = 1 2092
2093 - def convert_to_sconsign(self):
2094 """ 2095 Converts this FileBuildInfo object for writing to a .sconsign file 2096 2097 This replaces each Node in our various dependency lists with its 2098 usual string representation: relative to the top-level SConstruct 2099 directory, or an absolute path if it's outside. 2100 """ 2101 if os.sep == '/': 2102 node_to_str = str 2103 else: 2104 def node_to_str(n): 2105 try: 2106 s = n.path 2107 except AttributeError: 2108 s = str(n) 2109 else: 2110 s = string.replace(s, os.sep, '/') 2111 return s
2112 for attr in ['bsources', 'bdepends', 'bimplicit']: 2113 try: 2114 val = getattr(self, attr) 2115 except AttributeError: 2116 pass 2117 else: 2118 setattr(self, attr, map(node_to_str, val))
2119 - def convert_from_sconsign(self, dir, name):
2120 """ 2121 Converts a newly-read FileBuildInfo object for in-SCons use 2122 2123 For normal up-to-date checking, we don't have any conversion to 2124 perform--but we're leaving this method here to make that clear. 2125 """ 2126 pass
2127 - def prepare_dependencies(self):
2128 """ 2129 Prepares a FileBuildInfo object for explaining what changed 2130 2131 The bsources, bdepends and bimplicit lists have all been 2132 stored on disk as paths relative to the top-level SConstruct 2133 directory. Convert the strings to actual Nodes (for use by the 2134 --debug=explain code and --implicit-cache). 2135 """ 2136 attrs = [ 2137 ('bsources', 'bsourcesigs'), 2138 ('bdepends', 'bdependsigs'), 2139 ('bimplicit', 'bimplicitsigs'), 2140 ] 2141 for (nattr, sattr) in attrs: 2142 try: 2143 strings = getattr(self, nattr) 2144 nodeinfos = getattr(self, sattr) 2145 except AttributeError: 2146 continue 2147 nodes = [] 2148 for s, ni in izip(strings, nodeinfos): 2149 if not isinstance(s, SCons.Node.Node): 2150 s = ni.str_to_node(s) 2151 nodes.append(s) 2152 setattr(self, nattr, nodes)
2153 - def format(self, names=0):
2154 result = [] 2155 bkids = self.bsources + self.bdepends + self.bimplicit 2156 bkidsigs = self.bsourcesigs + self.bdependsigs + self.bimplicitsigs 2157 for bkid, bkidsig in izip(bkids, bkidsigs): 2158 result.append(str(bkid) + ': ' + 2159 string.join(bkidsig.format(names=names), ' ')) 2160 result.append('%s [%s]' % (self.bactsig, self.bact)) 2161 return string.join(result, '\n')
2162
2163 -class File(Base):
2164 """A class for files in a file system. 2165 """ 2166 2167 memoizer_counters = [] 2168 2169 NodeInfo = FileNodeInfo 2170 BuildInfo = FileBuildInfo 2171 2172 md5_chunksize = 64 2173
2174 - def diskcheck_match(self):
2175 diskcheck_match(self, self.isdir, 2176 "Directory %s found where file expected.")
2177
2178 - def __init__(self, name, directory, fs):
2179 if __debug__: logInstanceCreation(self, 'Node.FS.File') 2180 Base.__init__(self, name, directory, fs) 2181 self._morph()
2182
2183 - def Entry(self, name):
2184 """Create an entry node named 'name' relative to 2185 the directory of this file.""" 2186 return self.dir.Entry(name)
2187
2188 - def Dir(self, name, create=True):
2189 """Create a directory node named 'name' relative to 2190 the directory of this file.""" 2191 return self.dir.Dir(name, create=create)
2192
2193 - def Dirs(self, pathlist):
2194 """Create a list of directories relative to the SConscript 2195 directory of this file.""" 2196 # TODO(1.5) 2197 # return [self.Dir(p) for p in pathlist] 2198 return map(lambda p, s=self: s.Dir(p), pathlist)
2199
2200 - def File(self, name):
2201 """Create a file node named 'name' relative to 2202 the directory of this file.""" 2203 return self.dir.File(name)
2204 2205 #def generate_build_dict(self): 2206 # """Return an appropriate dictionary of values for building 2207 # this File.""" 2208 # return {'Dir' : self.Dir, 2209 # 'File' : self.File, 2210 # 'RDirs' : self.RDirs} 2211
2212 - def _morph(self):
2213 """Turn a file system node into a File object.""" 2214 self.scanner_paths = {} 2215 if not hasattr(self, '_local'): 2216 self._local = 0 2217 2218 # If there was already a Builder set on this entry, then 2219 # we need to make sure we call the target-decider function, 2220 # not the source-decider. Reaching in and doing this by hand 2221 # is a little bogus. We'd prefer to handle this by adding 2222 # an Entry.builder_set() method that disambiguates like the 2223 # other methods, but that starts running into problems with the 2224 # fragile way we initialize Dir Nodes with their Mkdir builders, 2225 # yet still allow them to be overridden by the user. Since it's 2226 # not clear right now how to fix that, stick with what works 2227 # until it becomes clear... 2228 if self.has_builder(): 2229 self.changed_since_last_build = self.decide_target
2230
2231 - def scanner_key(self):
2232 return self.get_suffix()
2233
2234 - def get_contents(self):
2235 if not self.rexists(): 2236 return '' 2237 fname = self.rfile().abspath 2238 try: 2239 r = open(fname, "rb").read() 2240 except EnvironmentError, e: 2241 if not e.filename: 2242 e.filename = fname 2243 raise 2244 return r
2245
2246 - def get_content_hash(self):
2247 """ 2248 Compute and return the MD5 hash for this file. 2249 """ 2250 if not self.rexists(): 2251 return SCons.Util.MD5signature('') 2252 fname = self.rfile().abspath 2253 try: 2254 cs = SCons.Util.MD5filesignature(fname, 2255 chunksize=SCons.Node.FS.File.md5_chunksize*1024) 2256 except EnvironmentError, e: 2257 if not e.filename: 2258 e.filename = fname 2259 raise 2260 return cs
2261 2262 2263 memoizer_counters.append(SCons.Memoize.CountValue('get_size')) 2264
2265 - def get_size(self):
2266 try: 2267 return self._memo['get_size'] 2268 except KeyError: 2269 pass 2270 2271 if self.rexists(): 2272 size = self.rfile().getsize() 2273 else: 2274 size = 0 2275 2276 self._memo['get_size'] = size 2277 2278 return size
2279 2280 memoizer_counters.append(SCons.Memoize.CountValue('get_timestamp')) 2281
2282 - def get_timestamp(self):
2283 try: 2284 return self._memo['get_timestamp'] 2285 except KeyError: 2286 pass 2287 2288 if self.rexists(): 2289 timestamp = self.rfile().getmtime() 2290 else: 2291 timestamp = 0 2292 2293 self._memo['get_timestamp'] = timestamp 2294 2295 return timestamp
2296
2297 - def store_info(self):
2298 # Merge our build information into the already-stored entry. 2299 # This accomodates "chained builds" where a file that's a target 2300 # in one build (SConstruct file) is a source in a different build. 2301 # See test/chained-build.py for the use case. 2302 if do_store_info: 2303 self.dir.sconsign().store_info(self.name, self)
2304 2305 convert_copy_attrs = [ 2306 'bsources', 2307 'bimplicit', 2308 'bdepends', 2309 'bact', 2310 'bactsig', 2311 'ninfo', 2312 ] 2313 2314 2315 convert_sig_attrs = [ 2316 'bsourcesigs', 2317 'bimplicitsigs', 2318 'bdependsigs', 2319 ] 2320
2321 - def convert_old_entry(self, old_entry):
2322 # Convert a .sconsign entry from before the Big Signature 2323 # Refactoring, doing what we can to convert its information 2324 # to the new .sconsign entry format. 2325 # 2326 # The old format looked essentially like this: 2327 # 2328 # BuildInfo 2329 # .ninfo (NodeInfo) 2330 # .bsig 2331 # .csig 2332 # .timestamp 2333 # .size 2334 # .bsources 2335 # .bsourcesigs ("signature" list) 2336 # .bdepends 2337 # .bdependsigs ("signature" list) 2338 # .bimplicit 2339 # .bimplicitsigs ("signature" list) 2340 # .bact 2341 # .bactsig 2342 # 2343 # The new format looks like this: 2344 # 2345 # .ninfo (NodeInfo) 2346 # .bsig 2347 # .csig 2348 # .timestamp 2349 # .size 2350 # .binfo (BuildInfo) 2351 # .bsources 2352 # .bsourcesigs (NodeInfo list) 2353 # .bsig 2354 # .csig 2355 # .timestamp 2356 # .size 2357 # .bdepends 2358 # .bdependsigs (NodeInfo list) 2359 # .bsig 2360 # .csig 2361 # .timestamp 2362 # .size 2363 # .bimplicit 2364 # .bimplicitsigs (NodeInfo list) 2365 # .bsig 2366 # .csig 2367 # .timestamp 2368 # .size 2369 # .bact 2370 # .bactsig 2371 # 2372 # The basic idea of the new structure is that a NodeInfo always 2373 # holds all available information about the state of a given Node 2374 # at a certain point in time. The various .b*sigs lists can just 2375 # be a list of pointers to the .ninfo attributes of the different 2376 # dependent nodes, without any copying of information until it's 2377 # time to pickle it for writing out to a .sconsign file. 2378 # 2379 # The complicating issue is that the *old* format only stored one 2380 # "signature" per dependency, based on however the *last* build 2381 # was configured. We don't know from just looking at it whether 2382 # it was a build signature, a content signature, or a timestamp 2383 # "signature". Since we no longer use build signatures, the 2384 # best we can do is look at the length and if it's thirty two, 2385 # assume that it was (or might have been) a content signature. 2386 # If it was actually a build signature, then it will cause a 2387 # rebuild anyway when it doesn't match the new content signature, 2388 # but that's probably the best we can do. 2389 import SCons.SConsign 2390 new_entry = SCons.SConsign.SConsignEntry() 2391 new_entry.binfo = self.new_binfo() 2392 binfo = new_entry.binfo 2393 for attr in self.convert_copy_attrs: 2394 try: 2395 value = getattr(old_entry, attr) 2396 except AttributeError: 2397 continue 2398 setattr(binfo, attr, value) 2399 delattr(old_entry, attr) 2400 for attr in self.convert_sig_attrs: 2401 try: 2402 sig_list = getattr(old_entry, attr) 2403 except AttributeError: 2404 continue 2405 value = [] 2406 for sig in sig_list: 2407 ninfo = self.new_ninfo() 2408 if len(sig) == 32: 2409 ninfo.csig = sig 2410 else: 2411 ninfo.timestamp = sig 2412 value.append(ninfo) 2413 setattr(binfo, attr, value) 2414 delattr(old_entry, attr) 2415 return new_entry
2416 2417 memoizer_counters.append(SCons.Memoize.CountValue('get_stored_info')) 2418
2419 - def get_stored_info(self):
2420 try: 2421 return self._memo['get_stored_info'] 2422 except KeyError: 2423 pass 2424 2425 try: 2426 sconsign_entry = self.dir.sconsign().get_entry(self.name) 2427 except (KeyError, EnvironmentError): 2428 import SCons.SConsign 2429 sconsign_entry = SCons.SConsign.SConsignEntry() 2430 sconsign_entry.binfo = self.new_binfo() 2431 sconsign_entry.ninfo = self.new_ninfo() 2432 else: 2433 if isinstance(sconsign_entry, FileBuildInfo): 2434 # This is a .sconsign file from before the Big Signature 2435 # Refactoring; convert it as best we can. 2436 sconsign_entry = self.convert_old_entry(sconsign_entry) 2437 try: 2438 delattr(sconsign_entry.ninfo, 'bsig') 2439 except AttributeError: 2440 pass 2441 2442 self._memo['get_stored_info'] = sconsign_entry 2443 2444 return sconsign_entry
2445
2446 - def get_stored_implicit(self):
2447 binfo = self.get_stored_info().binfo 2448 binfo.prepare_dependencies() 2449 try: return binfo.bimplicit 2450 except AttributeError: return None
2451
2452 - def rel_path(self, other):
2453 return self.dir.rel_path(other)
2454
2455 - def _get_found_includes_key(self, env, scanner, path):
2456 return (id(env), id(scanner), path)
2457 2458 memoizer_counters.append(SCons.Memoize.CountDict('get_found_includes', _get_found_includes_key)) 2459
2460 - def get_found_includes(self, env, scanner, path):
2461 """Return the included implicit dependencies in this file. 2462 Cache results so we only scan the file once per path 2463 regardless of how many times this information is requested. 2464 """ 2465 memo_key = (id(env), id(scanner), path) 2466 try: 2467 memo_dict = self._memo['get_found_includes'] 2468 except KeyError: 2469 memo_dict = {} 2470 self._memo['get_found_includes'] = memo_dict 2471 else: 2472 try: 2473 return memo_dict[memo_key] 2474 except KeyError: 2475 pass 2476 2477 if scanner: 2478 # result = [n.disambiguate() for n in scanner(self, env, path)] 2479 result = scanner(self, env, path) 2480 result = map(lambda N: N.disambiguate(), result) 2481 else: 2482 result = [] 2483 2484 memo_dict[memo_key] = result 2485 2486 return result
2487
2488 - def _createDir(self):
2489 # ensure that the directories for this node are 2490 # created. 2491 self.dir._create()
2492
2493 - def retrieve_from_cache(self):
2494 """Try to retrieve the node's content from a cache 2495 2496 This method is called from multiple threads in a parallel build, 2497 so only do thread safe stuff here. Do thread unsafe stuff in 2498 built(). 2499 2500 Returns true iff the node was successfully retrieved. 2501 """ 2502 if self.nocache: 2503 return None 2504 if not self.is_derived(): 2505 return None 2506 return self.get_build_env().get_CacheDir().retrieve(self)
2507
2508 - def built(self):
2509 """ 2510 Called just after this node is successfully built. 2511 """ 2512 # Push this file out to cache before the superclass Node.built() 2513 # method has a chance to clear the build signature, which it 2514 # will do if this file has a source scanner. 2515 # 2516 # We have to clear the memoized values *before* we push it to 2517 # cache so that the memoization of the self.exists() return 2518 # value doesn't interfere. 2519 self.clear_memoized_values() 2520 if self.exists(): 2521 self.get_build_env().get_CacheDir().push(self) 2522 SCons.Node.Node.built(self)
2523
2524 - def visited(self):
2525 if self.exists(): 2526 self.get_build_env().get_CacheDir().push_if_forced(self) 2527 2528 ninfo = self.get_ninfo() 2529 2530 csig = self.get_max_drift_csig() 2531 if csig: 2532 ninfo.csig = csig 2533 2534 ninfo.timestamp = self.get_timestamp() 2535 ninfo.size = self.get_size() 2536 2537 if not self.has_builder(): 2538 # This is a source file, but it might have been a target file 2539 # in another build that included more of the DAG. Copy 2540 # any build information that's stored in the .sconsign file 2541 # into our binfo object so it doesn't get lost. 2542 old = self.get_stored_info() 2543 self.get_binfo().__dict__.update(old.binfo.__dict__) 2544 2545 self.store_info()
2546
2547 - def find_src_builder(self):
2548 if self.rexists(): 2549 return None 2550 scb = self.dir.src_builder() 2551 if scb is _null: 2552 if diskcheck_sccs(self.dir, self.name): 2553 scb = get_DefaultSCCSBuilder() 2554 elif diskcheck_rcs(self.dir, self.name): 2555 scb = get_DefaultRCSBuilder() 2556 else: 2557 scb = None 2558 if scb is not None: 2559 try: 2560 b = self.builder 2561 except AttributeError: 2562 b = None 2563 if b is None: 2564 self.builder_set(scb) 2565 return scb
2566
2567 - def has_src_builder(self):
2568 """Return whether this Node has a source builder or not. 2569 2570 If this Node doesn't have an explicit source code builder, this 2571 is where we figure out, on the fly, if there's a transparent 2572 source code builder for it. 2573 2574 Note that if we found a source builder, we also set the 2575 self.builder attribute, so that all of the methods that actually 2576 *build* this file don't have to do anything different. 2577 """ 2578 try: 2579 scb = self.sbuilder 2580 except AttributeError: 2581 scb = self.sbuilder = self.find_src_builder() 2582 return scb is not None
2583
2584 - def alter_targets(self):
2585 """Return any corresponding targets in a variant directory. 2586 """ 2587 if self.is_derived(): 2588 return [], None 2589 return self.fs.variant_dir_target_climb(self, self.dir, [self.name])
2590
2591 - def _rmv_existing(self):
2592 self.clear_memoized_values() 2593 e = Unlink(self, [], None) 2594 if isinstance(e, SCons.Errors.BuildError): 2595 raise e
2596 2597 # 2598 # Taskmaster interface subsystem 2599 # 2600
2601 - def make_ready(self):
2602 self.has_src_builder() 2603 self.get_binfo()
2604
2605 - def prepare(self):
2606 """Prepare for this file to be created.""" 2607 SCons.Node.Node.prepare(self) 2608 2609 if self.get_state() != SCons.Node.up_to_date: 2610 if self.exists(): 2611 if self.is_derived() and not self.precious: 2612 self._rmv_existing() 2613 else: 2614 try: 2615 self._createDir() 2616 except SCons.Errors.StopError, drive: 2617 desc = "No drive `%s' for target `%s'." % (drive, self) 2618 raise SCons.Errors.StopError, desc
2619 2620 # 2621 # 2622 # 2623
2624 - def remove(self):
2625 """Remove this file.""" 2626 if self.exists() or self.islink(): 2627 self.fs.unlink(self.path) 2628 return 1 2629 return None
2630
2631 - def do_duplicate(self, src):
2632 self._createDir() 2633 Unlink(self, None, None) 2634 e = Link(self, src, None) 2635 if isinstance(e, SCons.Errors.BuildError): 2636 desc = "Cannot duplicate `%s' in `%s': %s." % (src.path, self.dir.path, e.errstr) 2637 raise SCons.Errors.StopError, desc 2638 self.linked = 1 2639 # The Link() action may or may not have actually 2640 # created the file, depending on whether the -n 2641 # option was used or not. Delete the _exists and 2642 # _rexists attributes so they can be reevaluated. 2643 self.clear()
2644 2645 memoizer_counters.append(SCons.Memoize.CountValue('exists')) 2646
2647 - def exists(self):
2648 try: 2649 return self._memo['exists'] 2650 except KeyError: 2651 pass 2652 # Duplicate from source path if we are set up to do this. 2653 if self.duplicate and not self.is_derived() and not self.linked: 2654 src = self.srcnode() 2655 if src is not self: 2656 # At this point, src is meant to be copied in a variant directory. 2657 src = src.rfile() 2658 if src.abspath != self.abspath: 2659 if src.exists(): 2660 self.do_duplicate(src) 2661 # Can't return 1 here because the duplication might 2662 # not actually occur if the -n option is being used. 2663 else: 2664 # The source file does not exist. Make sure no old 2665 # copy remains in the variant directory. 2666 if Base.exists(self) or self.islink(): 2667 self.fs.unlink(self.path) 2668 # Return None explicitly because the Base.exists() call 2669 # above will have cached its value if the file existed. 2670 self._memo['exists'] = None 2671 return None 2672 result = Base.exists(self) 2673 self._memo['exists'] = result 2674 return result
2675 2676 # 2677 # SIGNATURE SUBSYSTEM 2678 # 2679
2680 - def get_max_drift_csig(self):
2681 """ 2682 Returns the content signature currently stored for this node 2683 if it's been unmodified longer than the max_drift value, or the 2684 max_drift value is 0. Returns None otherwise. 2685 """ 2686 old = self.get_stored_info() 2687 mtime = self.get_timestamp() 2688 2689 max_drift = self.fs.max_drift 2690 if max_drift > 0: 2691 if (time.time() - mtime) > max_drift: 2692 try: 2693 n = old.ninfo 2694 if n.timestamp and n.csig and n.timestamp == mtime: 2695 return n.csig 2696 except AttributeError: 2697 pass 2698 elif max_drift == 0: 2699 try: 2700 return old.ninfo.csig 2701 except AttributeError: 2702 pass 2703 2704 return None
2705
2706 - def get_csig(self):
2707 """ 2708 Generate a node's content signature, the digested signature 2709 of its content. 2710 2711 node - the node 2712 cache - alternate node to use for the signature cache 2713 returns - the content signature 2714 """ 2715 ninfo = self.get_ninfo() 2716 try: 2717 return ninfo.csig 2718 except AttributeError: 2719 pass 2720 2721 csig = self.get_max_drift_csig() 2722 if csig is None: 2723 2724 try: 2725 if self.get_size() < SCons.Node.FS.File.md5_chunksize: 2726 contents = self.get_contents() 2727 else: 2728 csig = self.get_content_hash() 2729 except IOError: 2730 # This can happen if there's actually a directory on-disk, 2731 # which can be the case if they've disabled disk checks, 2732 # or if an action with a File target actually happens to 2733 # create a same-named directory by mistake. 2734 csig = '' 2735 else: 2736 if not csig: 2737 csig = SCons.Util.MD5signature(contents) 2738 2739 ninfo.csig = csig 2740 2741 return csig
2742 2743 # 2744 # DECISION SUBSYSTEM 2745 # 2746
2747 - def builder_set(self, builder):
2748 SCons.Node.Node.builder_set(self, builder) 2749 self.changed_since_last_build = self.decide_target
2750
2751 - def changed_content(self, target, prev_ni):
2752 cur_csig = self.get_csig() 2753 try: 2754 return cur_csig != prev_ni.csig 2755 except AttributeError: 2756 return 1
2757
2758 - def changed_state(self, target, prev_ni):
2759 return self.state != SCons.Node.up_to_date
2760
2761 - def changed_timestamp_then_content(self, target, prev_ni):
2762 if not self.changed_timestamp_match(target, prev_ni): 2763 try: 2764 self.get_ninfo().csig = prev_ni.csig 2765 except AttributeError: 2766 pass 2767 return False 2768 return self.changed_content(target, prev_ni)
2769
2770 - def changed_timestamp_newer(self, target, prev_ni):
2771 try: 2772 return self.get_timestamp() > target.get_timestamp() 2773 except AttributeError: 2774 return 1
2775
2776 - def changed_timestamp_match(self, target, prev_ni):
2777 try: 2778 return self.get_timestamp() != prev_ni.timestamp 2779 except AttributeError: 2780 return 1
2781
2782 - def decide_source(self, target, prev_ni):
2783 return target.get_build_env().decide_source(self, target, prev_ni)
2784
2785 - def decide_target(self, target, prev_ni):
2786 return target.get_build_env().decide_target(self, target, prev_ni)
2787 2788 # Initialize this Node's decider function to decide_source() because 2789 # every file is a source file until it has a Builder attached... 2790 changed_since_last_build = decide_source 2791
2792 - def is_up_to_date(self):
2793 T = 0 2794 if T: Trace('is_up_to_date(%s):' % self) 2795 if not self.exists(): 2796 if T: Trace(' not self.exists():') 2797 # The file doesn't exist locally... 2798 r = self.rfile() 2799 if r != self: 2800 # ...but there is one in a Repository... 2801 if not self.changed(r): 2802 if T: Trace(' changed(%s):' % r) 2803 # ...and it's even up-to-date... 2804 if self._local: 2805 # ...and they'd like a local copy. 2806 e = LocalCopy(self, r, None) 2807 if isinstance(e, SCons.Errors.BuildError): 2808 raise 2809 self.store_info() 2810 if T: Trace(' 1\n') 2811 return 1 2812 self.changed() 2813 if T: Trace(' None\n') 2814 return None 2815 else: 2816 r = self.changed() 2817 if T: Trace(' self.exists(): %s\n' % r) 2818 return not r
2819 2820 memoizer_counters.append(SCons.Memoize.CountValue('rfile')) 2821
2822 - def rfile(self):
2823 try: 2824 return self._memo['rfile'] 2825 except KeyError: 2826 pass 2827 result = self 2828 if not self.exists(): 2829 norm_name = _my_normcase(self.name) 2830 for dir in self.dir.get_all_rdirs(): 2831 try: node = dir.entries[norm_name] 2832 except KeyError: node = dir.file_on_disk(self.name) 2833 if node and node.exists() and \ 2834 (isinstance(node, File) or isinstance(node, Entry) \ 2835 or not node.is_derived()): 2836 result = node 2837 break 2838 self._memo['rfile'] = result 2839 return result
2840
2841 - def rstr(self):
2842 return str(self.rfile())
2843
2844 - def get_cachedir_csig(self):
2845 """ 2846 Fetch a Node's content signature for purposes of computing 2847 another Node's cachesig. 2848 2849 This is a wrapper around the normal get_csig() method that handles 2850 the somewhat obscure case of using CacheDir with the -n option. 2851 Any files that don't exist would normally be "built" by fetching 2852 them from the cache, but the normal get_csig() method will try 2853 to open up the local file, which doesn't exist because the -n 2854 option meant we didn't actually pull the file from cachedir. 2855 But since the file *does* actually exist in the cachedir, we 2856 can use its contents for the csig. 2857 """ 2858 try: 2859 return self.cachedir_csig 2860 except AttributeError: 2861 pass 2862 2863 cachedir, cachefile = self.get_build_env().get_CacheDir().cachepath(self) 2864 if not self.exists() and cachefile and os.path.exists(cachefile): 2865 self.cachedir_csig = SCons.Util.MD5filesignature(cachefile, \ 2866 SCons.Node.FS.File.md5_chunksize * 1024) 2867 else: 2868 self.cachedir_csig = self.get_csig() 2869 return self.cachedir_csig
2870
2871 - def get_cachedir_bsig(self):
2872 try: 2873 return self.cachesig 2874 except AttributeError: 2875 pass 2876 2877 # Add the path to the cache signature, because multiple 2878 # targets built by the same action will all have the same 2879 # build signature, and we have to differentiate them somehow. 2880 children = self.children() 2881 executor = self.get_executor() 2882 # sigs = [n.get_cachedir_csig() for n in children] 2883 sigs = map(lambda n: n.get_cachedir_csig(), children) 2884 sigs.append(SCons.Util.MD5signature(executor.get_contents())) 2885 sigs.append(self.path) 2886 result = self.cachesig = SCons.Util.MD5collect(sigs) 2887 return result
2888 2889 2890 default_fs = None 2891
2892 -def get_default_fs():
2893 global default_fs 2894 if not default_fs: 2895 default_fs = FS() 2896 return default_fs
2897
2898 -class FileFinder:
2899 """ 2900 """ 2901 if SCons.Memoize.use_memoizer: 2902 __metaclass__ = SCons.Memoize.Memoized_Metaclass 2903 2904 memoizer_counters = [] 2905
2906 - def __init__(self):
2907 self._memo = {}
2908
2909 - def filedir_lookup(self, p, fd=None):
2910 """ 2911 A helper method for find_file() that looks up a directory for 2912 a file we're trying to find. This only creates the Dir Node if 2913 it exists on-disk, since if the directory doesn't exist we know 2914 we won't find any files in it... :-) 2915 2916 It would be more compact to just use this as a nested function 2917 with a default keyword argument (see the commented-out version 2918 below), but that doesn't work unless you have nested scopes, 2919 so we define it here just so this work under Python 1.5.2. 2920 """ 2921 if fd is None: 2922 fd = self.default_filedir 2923 dir, name = os.path.split(fd) 2924 drive, d = os.path.splitdrive(dir) 2925 if d in ('/', os.sep): 2926 return p.fs.get_root(drive).dir_on_disk(name) 2927 if dir: 2928 p = self.filedir_lookup(p, dir) 2929 if not p: 2930 return None 2931 norm_name = _my_normcase(name) 2932 try: 2933 node = p.entries[norm_name] 2934 except KeyError: 2935 return p.dir_on_disk(name) 2936 if isinstance(node, Dir): 2937 return node 2938 if isinstance(node, Entry): 2939 node.must_be_same(Dir) 2940 return node 2941 return None
2942
2943 - def _find_file_key(self, filename, paths, verbose=None):
2944 return (filename, paths)
2945 2946 memoizer_counters.append(SCons.Memoize.CountDict('find_file', _find_file_key)) 2947
2948 - def find_file(self, filename, paths, verbose=None):
2949 """ 2950 find_file(str, [Dir()]) -> [nodes] 2951 2952 filename - a filename to find 2953 paths - a list of directory path *nodes* to search in. Can be 2954 represented as a list, a tuple, or a callable that is 2955 called with no arguments and returns the list or tuple. 2956 2957 returns - the node created from the found file. 2958 2959 Find a node corresponding to either a derived file or a file 2960 that exists already. 2961 2962 Only the first file found is returned, and none is returned 2963 if no file is found. 2964 """ 2965 memo_key = self._find_file_key(filename, paths) 2966 try: 2967 memo_dict = self._memo['find_file'] 2968 except KeyError: 2969 memo_dict = {} 2970 self._memo['find_file'] = memo_dict 2971 else: 2972 try: 2973 return memo_dict[memo_key] 2974 except KeyError: 2975 pass 2976 2977 if verbose and not callable(verbose): 2978 if not SCons.Util.is_String(verbose): 2979 verbose = "find_file" 2980 verbose = ' %s: ' % verbose 2981 verbose = lambda s, v=verbose: sys.stdout.write(v + s) 2982 2983 filedir, filename = os.path.split(filename) 2984 if filedir: 2985 # More compact code that we can't use until we drop 2986 # support for Python 1.5.2: 2987 # 2988 #def filedir_lookup(p, fd=filedir): 2989 # """ 2990 # A helper function that looks up a directory for a file 2991 # we're trying to find. This only creates the Dir Node 2992 # if it exists on-disk, since if the directory doesn't 2993 # exist we know we won't find any files in it... :-) 2994 # """ 2995 # dir, name = os.path.split(fd) 2996 # if dir: 2997 # p = filedir_lookup(p, dir) 2998 # if not p: 2999 # return None 3000 # norm_name = _my_normcase(name) 3001 # try: 3002 # node = p.entries[norm_name] 3003 # except KeyError: 3004 # return p.dir_on_disk(name) 3005 # if isinstance(node, Dir): 3006 # return node 3007 # if isinstance(node, Entry): 3008 # node.must_be_same(Dir) 3009 # return node 3010 # if isinstance(node, Dir) or isinstance(node, Entry): 3011 # return node 3012 # return None 3013 #paths = filter(None, map(filedir_lookup, paths)) 3014 3015 self.default_filedir = filedir 3016 paths = filter(None, map(self.filedir_lookup, paths)) 3017 3018 result = None 3019 for dir in paths: 3020 if verbose: 3021 verbose("looking for '%s' in '%s' ...\n" % (filename, dir)) 3022 node, d = dir.srcdir_find_file(filename) 3023 if node: 3024 if verbose: 3025 verbose("... FOUND '%s' in '%s'\n" % (filename, d)) 3026 result = node 3027 break 3028 3029 memo_dict[memo_key] = result 3030 3031 return result
3032 3033 find_file = FileFinder().find_file 3034 3035
3036 -def invalidate_node_memos(targets):
3037 """ 3038 Invalidate the memoized values of all Nodes (files or directories) 3039 that are associated with the given entries. Has been added to 3040 clear the cache of nodes affected by a direct execution of an 3041 action (e.g. Delete/Copy/Chmod). Existing Node caches become 3042 inconsistent if the action is run through Execute(). The argument 3043 `targets` can be a single Node object or filename, or a sequence 3044 of Nodes/filenames. 3045 """ 3046 from traceback import extract_stack 3047 3048 # First check if the cache really needs to be flushed. Only 3049 # actions run in the SConscript with Execute() seem to be 3050 # affected. XXX The way to check if Execute() is in the stacktrace 3051 # is a very dirty hack and should be replaced by a more sensible 3052 # solution. 3053 for f in extract_stack(): 3054 if f[2] == 'Execute' and f[0][-14:] == 'Environment.py': 3055 break 3056 else: 3057 # Dont have to invalidate, so return 3058 return 3059 3060 if not SCons.Util.is_List(targets): 3061 targets = [targets] 3062 3063 for entry in targets: 3064 # If the target is a Node object, clear the cache. If it is a 3065 # filename, look up potentially existing Node object first. 3066 try: 3067 entry.clear_memoized_values() 3068 except AttributeError: 3069 # Not a Node object, try to look up Node by filename. XXX 3070 # This creates Node objects even for those filenames which 3071 # do not correspond to an existing Node object. 3072 node = get_default_fs().Entry(entry) 3073 if node: 3074 node.clear_memoized_values()
3075