buildtools: Work around a . being in the target name when building python3 helpers
[nivanova/samba-autobuild/.git] / buildtools / wafsamba / samba_deps.py
index a3910b0d284dd6edd389e03a305bf128d240e8f6..978a5e9afe6f28d3c8129c2910cfb02b0ac4d223 100644 (file)
@@ -1,8 +1,14 @@
 # Samba automatic dependency handling and project rules
 
-import Build, os, re, Environment
-from samba_utils import *
-from samba_autoconf import *
+import os, sys, re, time
+
+import Build, Environment, Options, Logs, Utils
+from Logs import debug
+from Configure import conf
+
+from samba_bundled import BUILTIN_LIBRARY
+from samba_utils import LOCAL_CACHE, TO_LIST, get_tgt_list, unique_list, os_path_relpath
+from samba_autoconf import library_flags
 
 @conf
 def ADD_GLOBAL_DEPENDENCY(ctx, dep):
@@ -12,50 +18,55 @@ def ADD_GLOBAL_DEPENDENCY(ctx, dep):
     ctx.env.GLOBAL_DEPENDENCIES.append(dep)
 
 
-def TARGET_ALIAS(bld, target, alias):
-    '''define an alias for a target name'''
-    cache = LOCAL_CACHE(bld, 'TARGET_ALIAS')
-    if alias in cache:
-        print("Target alias %s already set to %s : newalias %s" % (alias, cache[alias], target))
-        raise
-    cache[alias] = target
-Build.BuildContext.TARGET_ALIAS = TARGET_ALIAS
+@conf
+def BREAK_CIRCULAR_LIBRARY_DEPENDENCIES(ctx):
+    '''indicate that circular dependencies between libraries should be broken.'''
+    ctx.env.ALLOW_CIRCULAR_LIB_DEPENDENCIES = True
 
 
-def EXPAND_ALIAS(bld, target):
-    '''expand a target name via an alias'''
-    aliases = LOCAL_CACHE(bld, 'TARGET_ALIAS')
-    if target in aliases:
-        return aliases[target]
-    return target
-Build.BuildContext.EXPAND_ALIAS = EXPAND_ALIAS
+@conf
+def SET_SYSLIB_DEPS(conf, target, deps):
+    '''setup some implied dependencies for a SYSLIB'''
+    cache = LOCAL_CACHE(conf, 'SYSLIB_DEPS')
+    cache[target] = deps
 
 
 def expand_subsystem_deps(bld):
     '''expand the reverse dependencies resulting from subsystem
-       attributes of modules'''
-    subsystems = LOCAL_CACHE(bld, 'INIT_FUNCTIONS')
-    aliases    = LOCAL_CACHE(bld, 'TARGET_ALIAS')
+       attributes of modules. This is walking over the complete list
+       of declared subsystems, and expands the samba_deps_extended list for any
+       module<->subsystem dependencies'''
+
+    subsystem_list = LOCAL_CACHE(bld, 'INIT_FUNCTIONS')
     targets    = LOCAL_CACHE(bld, 'TARGET_TYPE')
 
-    for s in subsystems:
-        if s in aliases:
-            s = aliases[s]
-        bld.ASSERT(s in targets, "Subsystem target %s not declared" % s)
-        type = targets[s]
+    for subsystem_name in subsystem_list:
+        bld.ASSERT(subsystem_name in targets, "Subsystem target %s not declared" % subsystem_name)
+        type = targets[subsystem_name]
         if type == 'DISABLED' or type == 'EMPTY':
             continue
 
-        t = bld.name_to_obj(s, bld.env)
-        bld.ASSERT(t is not None, "Subsystem target %s not found" % s)
-        for d in subsystems[s]:
-            type = targets[d['TARGET']]
-            if type != 'DISABLED' and type != 'EMPTY':
-                t.samba_deps_extended.append(d['TARGET'])
-                t2 = bld.name_to_obj(d['TARGET'], bld.env)
-                t2.samba_includes_extended.extend(t.samba_includes_extended)
-                t2.samba_deps_extended.extend(t.samba_deps_extended)
-        t.samba_deps_extended = unique_list(t.samba_deps_extended)
+        # for example,
+        #    subsystem_name = dcerpc_server (a subsystem)
+        #    subsystem      = dcerpc_server (a subsystem object)
+        #    module_name    = rpc_epmapper (a module within the dcerpc_server subsystem)
+        #    module         = rpc_epmapper (a module object within the dcerpc_server subsystem)
+
+        subsystem = bld.get_tgen_by_name(subsystem_name)
+        bld.ASSERT(subsystem is not None, "Unable to find subsystem %s" % subsystem_name)
+        for d in subsystem_list[subsystem_name]:
+            module_name = d['TARGET']
+            module_type = targets[module_name]
+            if module_type in ['DISABLED', 'EMPTY']:
+                continue
+            bld.ASSERT(subsystem is not None,
+                       "Subsystem target %s for %s (%s) not found" % (subsystem_name, module_name, module_type))
+            if module_type in ['SUBSYSTEM']:
+                # if a module is a plain object type (not a library) then the
+                # subsystem it is part of needs to have it as a dependency, so targets
+                # that depend on this subsystem get the modules of that subsystem
+                subsystem.samba_deps_extended.append(module_name)
+        subsystem.samba_deps_extended = unique_list(subsystem.samba_deps_extended)
 
 
 
@@ -66,26 +77,44 @@ def build_dependencies(self):
     the full dependency list for a target until we have all of the targets declared.
     '''
 
-    # we only should add extra library and object deps on libraries and binaries
-    if not self.samba_type in ['LIBRARY', 'BINARY', 'PYTHON']:
-        return
+    if self.samba_type in ['LIBRARY', 'BINARY', 'PYTHON']:
+        self.uselib        = list(self.final_syslibs)
+        self.uselib_local  = list(self.final_libs)
+        self.add_objects   = list(self.final_objects)
+
+        # extra link flags from pkg_config
+        libs = self.final_syslibs.copy()
+
+        (ccflags, ldflags, cpppath) = library_flags(self, list(libs))
+        new_ldflags        = getattr(self, 'samba_ldflags', [])[:]
+        new_ldflags.extend(ldflags)
+        self.ldflags       = new_ldflags
 
-    # we need to link against:
+        if getattr(self, 'allow_undefined_symbols', False) and self.env.undefined_ldflags:
+            for f in self.env.undefined_ldflags:
+                self.ldflags.remove(f)
 
-    #  1) any direct system libs
-    #  2) any indirect system libs that come from subsystem dependencies
-    #  3) any direct local libs
-    #  4) any indirect local libs that come from subsystem dependencies
-    #  5) any direct objects
-    #  6) any indirect objects that come from subsystem dependencies
+        if getattr(self, 'allow_undefined_symbols', False) and self.env.undefined_ignore_ldflags:
+            for f in self.env.undefined_ignore_ldflags:
+                self.ldflags.append(f)
 
-    self.uselib        = list(self.final_syslibs)
-    self.uselib_local  = list(self.final_libs)
-    self.add_objects   = list(self.final_objects)
+        debug('deps: computed dependencies for target %s: uselib=%s uselib_local=%s add_objects=%s',
+              self.sname, self.uselib, self.uselib_local, self.add_objects)
 
-    debug('deps: computed dependencies for target %s: uselib=%s uselib_local=%s add_objects=%s',
-          self.sname, self.uselib, self.uselib_local, self.add_objects)
+    if self.samba_type in ['SUBSYSTEM']:
+        # this is needed for the ccflags of libs that come from pkg_config
+        self.uselib = list(self.final_syslibs)
+        self.uselib.extend(list(self.direct_syslibs))
+        for lib in self.final_libs:
+            t = self.bld.get_tgen_by_name(lib)
+            self.uselib.extend(list(t.final_syslibs))
+        self.uselib = unique_list(self.uselib)
 
+    if getattr(self, 'uselib', None):
+        up_list = []
+        for l in self.uselib:
+           up_list.append(l.upper())
+        self.uselib = up_list
 
 
 def build_includes(self):
@@ -107,17 +136,17 @@ def build_includes(self):
 
     bld = self.bld
 
-    inc_deps = self.includes_objects
+    inc_deps = includes_objects(bld, self, set(), {})
 
     includes = []
 
     # maybe add local includes
-    if getattr(self, 'local_include', True) == True and getattr(self, 'local_include_first', True):
+    if getattr(self, 'local_include', True) and getattr(self, 'local_include_first', True):
         includes.append('.')
 
     includes.extend(self.samba_includes_extended)
 
-    if 'EXTRA_INCLUDES' in bld.env:
+    if 'EXTRA_INCLUDES' in bld.env and getattr(self, 'global_include', True):
         includes.extend(bld.env['EXTRA_INCLUDES'])
 
     includes.append('#')
@@ -126,10 +155,10 @@ def build_includes(self):
     inc_abs = []
 
     for d in inc_deps:
-        t = bld.name_to_obj(d, bld.env)
+        t = bld.get_tgen_by_name(d)
         bld.ASSERT(t is not None, "Unable to find dependency %s for %s" % (d, self.sname))
-        inclist = getattr(t, 'samba_includes_extended', [])
-        if getattr(t, 'local_include', True) == True:
+        inclist = getattr(t, 'samba_includes_extended', [])[:]
+        if getattr(t, 'local_include', True):
             inclist.append('.')
         if inclist == []:
             continue
@@ -145,7 +174,7 @@ def build_includes(self):
         relpath = os_path_relpath(inc, mypath)
         includes.append(relpath)
 
-    if getattr(self, 'local_include', True) == True and not getattr(self, 'local_include_first', True):
+    if getattr(self, 'local_include', True) and not getattr(self, 'local_include_first', True):
         includes.append('.')
 
     # now transform the includes list to be relative to the top directory
@@ -166,8 +195,6 @@ def build_includes(self):
           self.sname, self.includes)
 
 
-
-
 def add_init_functions(self):
     '''This builds the right set of init functions'''
 
@@ -192,70 +219,132 @@ def add_init_functions(self):
     if m is not None:
         modules.append(m)
 
-    if modules == []:
+    if 'pyembed' in self.features:
         return
 
-    sentinal = getattr(self, 'init_function_sentinal', 'NULL')
+    sentinel = getattr(self, 'init_function_sentinel', 'NULL')
 
+    targets    = LOCAL_CACHE(bld, 'TARGET_TYPE')
     cflags = getattr(self, 'samba_cflags', [])[:]
+
+    if modules == []:
+        sname = sname.replace('-','_')
+        sname = sname.replace('.','_')
+        sname = sname.replace('/','_')
+        cflags.append('-DSTATIC_%s_MODULES=%s' % (sname, sentinel))
+        if sentinel == 'NULL':
+            proto = "extern void __%s_dummy_module_proto(void)" % (sname)
+            cflags.append('-DSTATIC_%s_MODULES_PROTO=%s' % (sname, proto))
+        self.ccflags = cflags
+        return
+
     for m in modules:
         bld.ASSERT(m in subsystems,
                    "No init_function defined for module '%s' in target '%s'" % (m, self.sname))
         init_fn_list = []
         for d in subsystems[m]:
-            init_fn_list.append(d['INIT_FUNCTION'])
-        cflags.append('-DSTATIC_%s_MODULES=%s' % (m, ','.join(init_fn_list) + ',' + sentinal))
+            if targets[d['TARGET']] != 'DISABLED':
+                init_fn_list.append(d['INIT_FUNCTION'])
+        if init_fn_list == []:
+            cflags.append('-DSTATIC_%s_MODULES=%s' % (m, sentinel))
+            if sentinel == 'NULL':
+                proto = "extern void __%s_dummy_module_proto(void)" % (m)
+                cflags.append('-DSTATIC_%s_MODULES_PROTO=%s' % (m, proto))
+        else:
+            cflags.append('-DSTATIC_%s_MODULES=%s' % (m, ','.join(init_fn_list) + ',' + sentinel))
+            proto=''
+            for f in init_fn_list:
+                proto += '_MODULE_PROTO(%s)' % f
+            proto += "extern void __%s_dummy_module_proto(void)" % (m)
+            cflags.append('-DSTATIC_%s_MODULES_PROTO=%s' % (m, proto))
     self.ccflags = cflags
 
 
-
 def check_duplicate_sources(bld, tgt_list):
-    '''see if we are compiling the same source file into multiple
-    subsystem targets for the same library or binary'''
+    '''see if we are compiling the same source file more than once'''
 
     debug('deps: checking for duplicate sources')
-
     targets = LOCAL_CACHE(bld, 'TARGET_TYPE')
 
+    for t in tgt_list:
+        source_list = TO_LIST(getattr(t, 'source', ''))
+        tpath = os.path.normpath(os_path_relpath(t.path.abspath(bld.env), t.env.BUILD_DIRECTORY + '/default'))
+        obj_sources = set()
+        for s in source_list:
+            p = os.path.normpath(os.path.join(tpath, s))
+            if p in obj_sources:
+                Logs.error("ERROR: source %s appears twice in target '%s'" % (p, t.sname))
+                sys.exit(1)
+            obj_sources.add(p)
+        t.samba_source_set = obj_sources
+
+    subsystems = {}
+
+    # build a list of targets that each source file is part of
     for t in tgt_list:
         if not targets[t.sname] in [ 'LIBRARY', 'BINARY', 'PYTHON' ]:
             continue
-
-        sources = []
         for obj in t.add_objects:
-            t2 = t.bld.name_to_obj(obj, bld.env)
-            obj_sources = getattr(t2, 'source', '')
-            if obj_sources == '': continue
-            tpath = os_path_relpath(t2.path.abspath(bld.env), t.env['BUILD_DIRECTORY'] + '/default')
-            obj_sources = bld.SUBDIR(tpath, obj_sources)
-            sources.append( { 'dep':obj, 'src':set(TO_LIST(obj_sources)) } )
-            #debug('deps: dependency expansion for target %s add_object %s: %s',
-            #      t.sname, obj, obj_sources)
-            for s in sources:
-                for s2 in sources:
-                    if s['dep'] == s2['dep']: continue
-                    common = s['src'].intersection(s2['src'])
-                    if common:
-                        bld.ASSERT(False,
-                                   "Target %s has duplicate source files in %s and %s : %s" % (t.sname,
-                                                                                               s['dep'], s2['dep'],
-                                                                                               common))
-
-def check_orpaned_targets(bld, tgt_list):
-    '''check if any build targets are orphaned'''
-
-    target_dict = LOCAL_CACHE(bld, 'TARGET_TYPE')
-
-    debug('deps: checking for orphaned targets')
+            t2 = t.bld.get_tgen_by_name(obj)
+            source_set = getattr(t2, 'samba_source_set', set())
+            for s in source_set:
+                if not s in subsystems:
+                    subsystems[s] = {}
+                if not t.sname in subsystems[s]:
+                    subsystems[s][t.sname] = []
+                subsystems[s][t.sname].append(t2.sname)
+
+    for s in subsystems:
+        if len(subsystems[s]) > 1 and Options.options.SHOW_DUPLICATES:
+            Logs.warn("WARNING: source %s is in more than one target: %s" % (s, subsystems[s].keys()))
+        for tname in subsystems[s]:
+            if len(subsystems[s][tname]) > 1:
+                raise Utils.WafError("ERROR: source %s is in more than one subsystem of target '%s': %s" % (s, tname, subsystems[s][tname]))
+
+    return True
+
+def check_group_ordering(bld, tgt_list):
+    '''see if we have any dependencies that violate the group ordering
+
+    It is an error for a target to depend on a target from a later
+    build group
+    '''
+
+    def group_name(g):
+        tm = bld.task_manager
+        return [x for x in tm.groups_names if id(tm.groups_names[x]) == id(g)][0]
+
+    for g in bld.task_manager.groups:
+        gname = group_name(g)
+        for t in g.tasks_gen:
+            t.samba_group = gname
 
+    grp_map = {}
+    idx = 0
+    for g in bld.task_manager.groups:
+        name = group_name(g)
+        grp_map[name] = idx
+        idx += 1
+
+    targets = LOCAL_CACHE(bld, 'TARGET_TYPE')
+
+    ret = True
     for t in tgt_list:
-        if getattr(t, 'samba_used', False) == True:
-            continue
-        type = target_dict[t.sname]
-        if not type in ['BINARY', 'LIBRARY', 'MODULE', 'ET', 'PYTHON']:
-            if re.search('^PIDL_', t.sname) is None:
-                print "Target %s of type %s is unused by any other target" % (t.sname, type)
+        tdeps = getattr(t, 'add_objects', []) + getattr(t, 'uselib_local', [])
+        for d in tdeps:
+            t2 = bld.get_tgen_by_name(d)
+            if t2 is None:
+                continue
+            map1 = grp_map[t.samba_group]
+            map2 = grp_map[t2.samba_group]
 
+            if map2 > map1:
+                Logs.error("Target %r in build group %r depends on target %r from later build group %r" % (
+                           t.sname, t.samba_group, t2.sname, t2.samba_group))
+                ret = False
+
+    return ret
+Build.BuildContext.check_group_ordering = check_group_ordering
 
 def show_final_deps(bld, tgt_list):
     '''show the final dependencies for all targets'''
@@ -263,10 +352,10 @@ def show_final_deps(bld, tgt_list):
     targets = LOCAL_CACHE(bld, 'TARGET_TYPE')
 
     for t in tgt_list:
-        if not targets[t.sname] in ['LIBRARY', 'BINARY', 'PYTHON']:
+        if not targets[t.sname] in ['LIBRARY', 'BINARY', 'PYTHON', 'SUBSYSTEM']:
             continue
         debug('deps: final dependencies for target %s: uselib=%s uselib_local=%s add_objects=%s',
-              t.sname, t.uselib, t.uselib_local, t.add_objects)
+              t.sname, t.uselib, getattr(t, 'uselib_local', []), getattr(t, 'add_objects', []))
 
 
 def add_samba_attributes(bld, tgt_list):
@@ -285,31 +374,91 @@ def add_samba_attributes(bld, tgt_list):
         t.samba_includes_extended = TO_LIST(t.samba_includes)[:]
         t.ccflags = getattr(t, 'samba_cflags', '')
 
+def replace_grouping_libraries(bld, tgt_list):
+    '''replace dependencies based on grouping libraries
+
+    If a library is marked as a grouping library, then any target that
+    depends on a subsystem that is part of that grouping library gets
+    that dependency replaced with a dependency on the grouping library
+    '''
+
+    targets  = LOCAL_CACHE(bld, 'TARGET_TYPE')
+
+    grouping = {}
+
+    # find our list of grouping libraries, mapped from the subsystems they depend on
+    for t in tgt_list:
+        if not getattr(t, 'grouping_library', False):
+            continue
+        for dep in t.samba_deps_extended:
+            bld.ASSERT(dep in targets, "grouping library target %s not declared in %s" % (dep, t.sname))
+            if targets[dep] == 'SUBSYSTEM':
+                grouping[dep] = t.sname
+
+    # now replace any dependencies on elements of grouping libraries
+    for t in tgt_list:
+        for i in range(len(t.samba_deps_extended)):
+            dep = t.samba_deps_extended[i]
+            if dep in grouping:
+                if t.sname != grouping[dep]:
+                    debug("deps: target %s: replacing dependency %s with grouping library %s" % (t.sname, dep, grouping[dep]))
+                    t.samba_deps_extended[i] = grouping[dep]
+
+
+
 def build_direct_deps(bld, tgt_list):
     '''build the direct_objects and direct_libs sets for each target'''
 
     targets  = LOCAL_CACHE(bld, 'TARGET_TYPE')
+    syslib_deps  = LOCAL_CACHE(bld, 'SYSLIB_DEPS')
+
     global_deps = bld.env.GLOBAL_DEPENDENCIES
+    global_deps_exclude = set()
+    for dep in global_deps:
+        t = bld.get_tgen_by_name(dep)
+        for d in t.samba_deps:
+            # prevent loops from the global dependencies list
+            global_deps_exclude.add(d)
+            global_deps_exclude.add(d + '.objlist')
 
     for t in tgt_list:
         t.direct_objects = set()
         t.direct_libs = set()
         t.direct_syslibs = set()
-        deps = t.samba_deps_extended
-        deps.extend(global_deps)
+        deps = t.samba_deps_extended[:]
+        if getattr(t, 'samba_use_global_deps', False) and not t.sname in global_deps_exclude:
+            deps.extend(global_deps)
         for d in deps:
-            d = EXPAND_ALIAS(bld, d)
+            if d == t.sname: continue
             if not d in targets:
-                print "Unknown dependency %s in %s" % (d, t.sname)
-                raise
+                Logs.error("Unknown dependency '%s' in '%s'" % (d, t.sname))
+                sys.exit(1)
             if targets[d] in [ 'EMPTY', 'DISABLED' ]:
                 continue
+            if targets[d] == 'PYTHON' and targets[t.sname] != 'PYTHON' and t.sname.find('.objlist') == -1:
+                # this check should be more restrictive, but for now we have pidl-generated python
+                # code that directly depends on other python modules
+                Logs.error('ERROR: Target %s has dependency on python module %s' % (t.sname, d))
+                sys.exit(1)
             if targets[d] == 'SYSLIB':
                 t.direct_syslibs.add(d)
+                if d in syslib_deps:
+                    for implied in TO_LIST(syslib_deps[d]):
+                        if BUILTIN_LIBRARY(bld, implied):
+                            t.direct_objects.add(implied)
+                        elif targets[implied] == 'SYSLIB':
+                            t.direct_syslibs.add(implied)
+                        elif targets[implied] in ['LIBRARY', 'MODULE']:
+                            t.direct_libs.add(implied)
+                        else:
+                            Logs.error('Implied dependency %s in %s is of type %s' % (
+                                implied, t.sname, targets[implied]))
+                            sys.exit(1)
                 continue
-            t2 = bld.name_to_obj(d, bld.env)
+            t2 = bld.get_tgen_by_name(d)
             if t2 is None:
-                print "no task %s type %s" % (d, targets[d])
+                Logs.error("no task %s of type %s in %s" % (d, targets[d], t.sname))
+                sys.exit(1)
             if t2.samba_type in [ 'LIBRARY', 'MODULE' ]:
                 t.direct_libs.add(d)
             elif t2.samba_type in [ 'SUBSYSTEM', 'ASN1', 'PYTHON' ]:
@@ -317,8 +466,17 @@ def build_direct_deps(bld, tgt_list):
     debug('deps: built direct dependencies')
 
 
+def dependency_loop(loops, t, target):
+    '''add a dependency loop to the loops dictionary'''
+    if t.sname == target:
+        return
+    if not target in loops:
+        loops[target] = set()
+    if not t.sname in loops[target]:
+        loops[target].add(t.sname)
+
 
-def indirect_libs(bld, t, chain):
+def indirect_libs(bld, t, chain, loops):
     '''recursively calculate the indirect library dependencies for a target
 
     An indirect library is a library that results from a dependency on
@@ -332,20 +490,22 @@ def indirect_libs(bld, t, chain):
     ret = set()
     for obj in t.direct_objects:
         if obj in chain:
+            dependency_loop(loops, t, obj)
             continue
         chain.add(obj)
-        t2 = bld.name_to_obj(obj, bld.env)
-        r2 = indirect_libs(bld, t2, chain)
+        t2 = bld.get_tgen_by_name(obj)
+        r2 = indirect_libs(bld, t2, chain, loops)
         chain.remove(obj)
         ret = ret.union(t2.direct_libs)
         ret = ret.union(r2)
 
-    for obj in t.indirect_objects:
+    for obj in indirect_objects(bld, t, set(), loops):
         if obj in chain:
+            dependency_loop(loops, t, obj)
             continue
         chain.add(obj)
-        t2 = bld.name_to_obj(obj, bld.env)
-        r2 = indirect_libs(bld, t2, chain)
+        t2 = bld.get_tgen_by_name(obj)
+        r2 = indirect_libs(bld, t2, chain, loops)
         chain.remove(obj)
         ret = ret.union(t2.direct_libs)
         ret = ret.union(r2)
@@ -355,31 +515,7 @@ def indirect_libs(bld, t, chain):
     return ret
 
 
-def indirect_syslibs(bld, t, chain):
-    '''recursively calculate the indirect system library dependencies for a target
-
-    An indirect syslib results from a subsystem dependency
-    '''
-
-    ret = getattr(t, 'indirect_syslibs', None)
-    if ret is not None:
-        return ret
-    ret = set()
-    for obj in t.direct_objects:
-        if obj in chain:
-            continue
-        chain.add(obj)
-        t2 = bld.name_to_obj(obj, bld.env)
-        r2 = indirect_syslibs(bld, t2, chain)
-        chain.remove(obj)
-        ret = ret.union(t2.direct_syslibs)
-        ret = ret.union(r2)
-
-    t.indirect_syslibs = ret
-    return ret
-
-
-def indirect_objects(bld, t, chain):
+def indirect_objects(bld, t, chain, loops):
     '''recursively calculate the indirect object dependencies for a target
 
     indirect objects are the set of objects from expanding the
@@ -392,10 +528,11 @@ def indirect_objects(bld, t, chain):
     ret = set()
     for lib in t.direct_objects:
         if lib in chain:
+            dependency_loop(loops, t, lib)
             continue
         chain.add(lib)
-        t2 = bld.name_to_obj(lib, bld.env)
-        r2 = indirect_objects(bld, t2, chain)
+        t2 = bld.get_tgen_by_name(lib)
+        r2 = indirect_objects(bld, t2, chain, loops)
         chain.remove(lib)
         ret = ret.union(t2.direct_objects)
         ret = ret.union(r2)
@@ -404,71 +541,36 @@ def indirect_objects(bld, t, chain):
     return ret
 
 
-def expanded_targets(bld, t, chain):
-    '''recursively calculate the expanded targets for a target
+def extended_objects(bld, t, chain):
+    '''recursively calculate the extended object dependencies for a target
 
-    expanded objects are the set of objects, libraries and syslibs
-    from expanding the subsystem dependencies, library dependencies
-    and syslib dependencies
+    extended objects are the union of:
+       - direct objects
+       - indirect objects
+       - direct and indirect objects of all direct and indirect libraries
     '''
 
-    ret = getattr(t, 'expanded_targets', None)
+    ret = getattr(t, 'extended_objects', None)
     if ret is not None: return ret
 
-    ret = t.direct_objects.copy()
-    ret = ret.union(t.direct_libs)
-    ret = ret.union(t.direct_syslibs)
-
-    direct = ret.copy()
+    ret = set()
+    ret = ret.union(t.final_objects)
 
-    for d in direct:
-        if d in chain: continue
-        chain.add(d)
-        t2 = bld.name_to_obj(d, bld.env)
-        if t2 is None: continue
-        r2 = expanded_targets(bld, t2, chain)
-        chain.remove(d)
+    for lib in t.final_libs:
+        if lib in chain:
+            continue
+        t2 = bld.get_tgen_by_name(lib)
+        chain.add(lib)
+        r2 = extended_objects(bld, t2, chain)
+        chain.remove(lib)
+        ret = ret.union(t2.final_objects)
         ret = ret.union(r2)
 
-    if t.sname in ret:
-        ret.remove(t.sname)
-
-    t.expanded_targets = ret
-    return ret
-
-
-def expanded_targets2(bld, t, chain):
-    '''recursively calculate the expanded targets for a target
-
-    expanded objects are the set of objects from expanding the
-    subsystem dependencies and library dependencies
-    '''
-
-    ret = getattr(t, 'expanded_targets2', None)
-    if ret is not None: return ret
-
-    ret = t.final_objects.copy()
-
-    for attr in [ 'final_objects', 'final_libs' ]:
-        f = getattr(t, attr, set())
-        for d in f.copy():
-            if d in chain:
-                continue
-            chain.add(d)
-            t2 = bld.name_to_obj(d, bld.env)
-            if t2 is None: continue
-            r2 = expanded_targets2(bld, t2, chain)
-            chain.remove(d)
-            ret = ret.union(r2)
-
-    if t.sname in ret:
-        ret.remove(t.sname)
-
-    t.expanded_targets2 = ret
+    t.extended_objects = ret
     return ret
 
 
-def includes_objects(bld, t, chain):
+def includes_objects(bld, t, chain, inc_loops):
     '''recursively calculate the includes object dependencies for a target
 
     includes dependencies come from either library or object dependencies
@@ -482,20 +584,27 @@ def includes_objects(bld, t, chain):
 
     for obj in t.direct_objects:
         if obj in chain:
+            dependency_loop(inc_loops, t, obj)
             continue
         chain.add(obj)
-        t2 = bld.name_to_obj(obj, bld.env)
-        r2 = includes_objects(bld, t2, chain)
+        t2 = bld.get_tgen_by_name(obj)
+        r2 = includes_objects(bld, t2, chain, inc_loops)
         chain.remove(obj)
         ret = ret.union(t2.direct_objects)
         ret = ret.union(r2)
 
     for lib in t.direct_libs:
         if lib in chain:
+            dependency_loop(inc_loops, t, lib)
             continue
         chain.add(lib)
-        t2 = bld.name_to_obj(lib, bld.env)
-        r2 = includes_objects(bld, t2, chain)
+        t2 = bld.get_tgen_by_name(lib)
+        if t2 is None:
+            targets = LOCAL_CACHE(bld, 'TARGET_TYPE')
+            Logs.error('Target %s of type %s not found in direct_libs for %s' % (
+                lib, targets[lib], t.sname))
+            sys.exit(1)
+        r2 = includes_objects(bld, t2, chain, inc_loops)
         chain.remove(lib)
         ret = ret.union(t2.direct_objects)
         ret = ret.union(r2)
@@ -504,35 +613,153 @@ def includes_objects(bld, t, chain):
     return ret
 
 
-def build_indirect_deps(bld, tgt_list):
-    '''build the indirect_objects and indirect_libs sets for each target'''
+def break_dependency_loops(bld, tgt_list):
+    '''find and break dependency loops'''
+    loops = {}
+    inc_loops = {}
+
+    # build up the list of loops
     for t in tgt_list:
-        indirect_objects(bld, t, set())
-        indirect_libs(bld, t, set())
-        indirect_syslibs(bld, t, set())
-        includes_objects(bld, t, set())
-        expanded_targets(bld, t, set())
-    debug('deps: built indirect dependencies')
+        indirect_objects(bld, t, set(), loops)
+        indirect_libs(bld, t, set(), loops)
+        includes_objects(bld, t, set(), inc_loops)
 
+    # break the loops
+    for t in tgt_list:
+        if t.sname in loops:
+            for attr in ['direct_objects', 'indirect_objects', 'direct_libs', 'indirect_libs']:
+                objs = getattr(t, attr, set())
+                setattr(t, attr, objs.difference(loops[t.sname]))
+
+    for loop in loops:
+        debug('deps: Found dependency loops for target %s : %s', loop, loops[loop])
+
+    for loop in inc_loops:
+        debug('deps: Found include loops for target %s : %s', loop, inc_loops[loop])
+
+    # expand the loops mapping by one level
+    for loop in loops.copy():
+        for tgt in loops[loop]:
+            if tgt in loops:
+                loops[loop] = loops[loop].union(loops[tgt])
+
+    for loop in inc_loops.copy():
+        for tgt in inc_loops[loop]:
+            if tgt in inc_loops:
+                inc_loops[loop] = inc_loops[loop].union(inc_loops[tgt])
+
+
+    # expand indirect subsystem and library loops
+    for loop in loops.copy():
+        t = bld.get_tgen_by_name(loop)
+        if t.samba_type in ['SUBSYSTEM']:
+            loops[loop] = loops[loop].union(t.indirect_objects)
+            loops[loop] = loops[loop].union(t.direct_objects)
+        if t.samba_type in ['LIBRARY','PYTHON']:
+            loops[loop] = loops[loop].union(t.indirect_libs)
+            loops[loop] = loops[loop].union(t.direct_libs)
+        if loop in loops[loop]:
+            loops[loop].remove(loop)
+
+    # expand indirect includes loops
+    for loop in inc_loops.copy():
+        t = bld.get_tgen_by_name(loop)
+        inc_loops[loop] = inc_loops[loop].union(t.includes_objects)
+        if loop in inc_loops[loop]:
+            inc_loops[loop].remove(loop)
+
+    # add in the replacement dependencies
+    for t in tgt_list:
+        for loop in loops:
+            for attr in ['indirect_objects', 'indirect_libs']:
+                objs = getattr(t, attr, set())
+                if loop in objs:
+                    diff = loops[loop].difference(objs)
+                    if t.sname in diff:
+                        diff.remove(t.sname)
+                    if diff:
+                        debug('deps: Expanded target %s of type %s from loop %s by %s', t.sname, t.samba_type, loop, diff)
+                        objs = objs.union(diff)
+                setattr(t, attr, objs)
+
+        for loop in inc_loops:
+            objs = getattr(t, 'includes_objects', set())
+            if loop in objs:
+                diff = inc_loops[loop].difference(objs)
+                if t.sname in diff:
+                    diff.remove(t.sname)
+                if diff:
+                    debug('deps: Expanded target %s includes of type %s from loop %s by %s', t.sname, t.samba_type, loop, diff)
+                    objs = objs.union(diff)
+            setattr(t, 'includes_objects', objs)
+
+
+def reduce_objects(bld, tgt_list):
+    '''reduce objects by looking for indirect object dependencies'''
+    rely_on = {}
 
-def re_expand2(bld, tgt_list):
     for t in tgt_list:
-        t.expanded_targets2 = None
-    for type in ['BINARY','LIBRARY','PYTHON']:
+        t.extended_objects = None
+
+    changed = False
+
+    for type in ['BINARY', 'PYTHON', 'LIBRARY']:
         for t in tgt_list:
-            if t.samba_type == type:
-                expanded_targets2(bld, t, set())
-    for t in tgt_list:
-        expanded_targets2(bld, t, set())
+            if t.samba_type != type: continue
+            # if we will indirectly link to a target then we don't need it
+            new = t.final_objects.copy()
+            for l in t.final_libs:
+                t2 = bld.get_tgen_by_name(l)
+                t2_obj = extended_objects(bld, t2, set())
+                dup = new.intersection(t2_obj)
+                if t.sname in rely_on:
+                    dup = dup.difference(rely_on[t.sname])
+                if dup:
+                    debug('deps: removing dups from %s of type %s: %s also in %s %s',
+                          t.sname, t.samba_type, dup, t2.samba_type, l)
+                    new = new.difference(dup)
+                    changed = True
+                    if not l in rely_on:
+                        rely_on[l] = set()
+                    rely_on[l] = rely_on[l].union(dup)
+            t.final_objects = new
+
+    if not changed:
+        return False
+
+    # add back in any objects that were relied upon by the reduction rules
+    for r in rely_on:
+        t = bld.get_tgen_by_name(r)
+        t.final_objects = t.final_objects.union(rely_on[r])
+
+    return True
+
+
+def show_library_loop(bld, lib1, lib2, path, seen):
+    '''show the detailed path of a library loop between lib1 and lib2'''
+
+    t = bld.get_tgen_by_name(lib1)
+    if not lib2 in getattr(t, 'final_libs', set()):
+        return
+
+    for d in t.samba_deps_extended:
+        if d in seen:
+            continue
+        seen.add(d)
+        path2 = path + '=>' + d
+        if d == lib2:
+            Logs.warn('library loop path: ' + path2)
+            return
+        show_library_loop(bld, d, lib2, path2, seen)
+        seen.remove(d)
 
 
-def calculate_final_deps(bld, tgt_list):
+def calculate_final_deps(bld, tgt_list, loops):
     '''calculate the final library and object dependencies'''
     for t in tgt_list:
         # start with the maximum possible list
-        t.final_syslibs = t.direct_syslibs.union(t.indirect_syslibs)
-        t.final_libs    = t.direct_libs.union(t.indirect_libs)
-        t.final_objects = t.direct_objects.union(t.indirect_objects)
+        t.final_libs    = t.direct_libs.union(indirect_libs(bld, t, set(), loops))
+        t.final_objects = t.direct_objects.union(indirect_objects(bld, t, set(), loops))
 
     for t in tgt_list:
         # don't depend on ourselves
@@ -541,66 +768,194 @@ def calculate_final_deps(bld, tgt_list):
         if t.sname in t.final_objects:
             t.final_objects.remove(t.sname)
 
-    re_expand2(bld, tgt_list)
-
-    loops = {}
+    # handle any non-shared binaries
+    for t in tgt_list:
+        if t.samba_type == 'BINARY' and bld.NONSHARED_BINARY(t.sname):
+            subsystem_list = LOCAL_CACHE(bld, 'INIT_FUNCTIONS')
+            targets = LOCAL_CACHE(bld, 'TARGET_TYPE')
+
+            # replace lib deps with objlist deps
+            for l in t.final_libs:
+                objname = l + '.objlist'
+                t2 = bld.get_tgen_by_name(objname)
+                if t2 is None:
+                    Logs.error('ERROR: subsystem %s not found' % objname)
+                    sys.exit(1)
+                t.final_objects.add(objname)
+                t.final_objects = t.final_objects.union(extended_objects(bld, t2, set()))
+                if l in subsystem_list:
+                    # its a subsystem - we also need the contents of any modules
+                    for d in subsystem_list[l]:
+                        module_name = d['TARGET']
+                        if targets[module_name] == 'LIBRARY':
+                            objname = module_name + '.objlist'
+                        elif targets[module_name] == 'SUBSYSTEM':
+                            objname = module_name
+                        else:
+                            continue
+                        t2 = bld.get_tgen_by_name(objname)
+                        if t2 is None:
+                            Logs.error('ERROR: subsystem %s not found' % objname)
+                            sys.exit(1)
+                        t.final_objects.add(objname)
+                        t.final_objects = t.final_objects.union(extended_objects(bld, t2, set()))
+            t.final_libs = set()
 
     # find any library loops
     for t in tgt_list:
         if t.samba_type in ['LIBRARY', 'PYTHON']:
             for l in t.final_libs.copy():
-                t2 = bld.name_to_obj(l, bld.env)
+                t2 = bld.get_tgen_by_name(l)
                 if t.sname in t2.final_libs:
-                    # we could break this in either direction. If one of the libraries
-                    # has a version number, and will this be distributed publicly, then
-                    # we should make it the lower level library in the DAG
-                    debug('deps: removing library loop %s<->%s', t.sname, l)
-                    t2.final_libs.remove(t.sname)
-                    loops[t2.sname] = t.sname;
-
-    re_expand2(bld, tgt_list)
-
-    for type in ['BINARY']:
-        while True:
-            changed = False
-            for t in tgt_list:
-                if t.samba_type != type: continue
-                # if we will indirectly link to a target then we don't need it
-                new = t.final_objects.copy()
-                for l in t.final_libs:
-                    t2 = bld.name_to_obj(l, bld.env)
-                    dup = new.intersection(t2.expanded_targets2)
-                    if dup:
-                        debug('deps: removing dups from %s: %s also in %s %s',
-                              t.sname, dup, t2.samba_type, l)
-                        new = new.difference(dup)
-                        changed = True
-                if changed:
-                    t.final_objects = new
-                    break
-            if not changed:
-                break
+                    if getattr(bld.env, "ALLOW_CIRCULAR_LIB_DEPENDENCIES", False):
+                        # we could break this in either direction. If one of the libraries
+                        # has a version number, and will this be distributed publicly, then
+                        # we should make it the lower level library in the DAG
+                        Logs.warn('deps: removing library loop %s from %s' % (t.sname, t2.sname))
+                        dependency_loop(loops, t, t2.sname)
+                        t2.final_libs.remove(t.sname)
+                    else:
+                        Logs.error('ERROR: circular library dependency between %s and %s'
+                            % (t.sname, t2.sname))
+                        show_library_loop(bld, t.sname, t2.sname, t.sname, set())
+                        show_library_loop(bld, t2.sname, t.sname, t2.sname, set())
+                        sys.exit(1)
+
+    for loop in loops:
+        debug('deps: Found dependency loops for target %s : %s', loop, loops[loop])
 
     # we now need to make corrections for any library loops we broke up
     # any target that depended on the target of the loop and doesn't
     # depend on the source of the loop needs to get the loop source added
-    for type in ['BINARY','PYTHON']:
+    for type in ['BINARY','PYTHON','LIBRARY','BINARY']:
         for t in tgt_list:
             if t.samba_type != type: continue
             for loop in loops:
-                if loop in t.final_libs and loops[loop] not in t.final_libs:
-                    t.final_libs.add(loops[loop])
+                if loop in t.final_libs:
+                    diff = loops[loop].difference(t.final_libs)
+                    if t.sname in diff:
+                        diff.remove(t.sname)
+                    if t.sname in diff:
+                        diff.remove(t.sname)
+                    # make sure we don't recreate the loop again!
+                    for d in diff.copy():
+                        t2 = bld.get_tgen_by_name(d)
+                        if t2.samba_type == 'LIBRARY':
+                            if t.sname in t2.final_libs:
+                                debug('deps: removing expansion %s from %s', d, t.sname)
+                                diff.remove(d)
+                    if diff:
+                        debug('deps: Expanded target %s by loop %s libraries (loop %s) %s', t.sname, loop,
+                              loops[loop], diff)
+                        t.final_libs = t.final_libs.union(diff)
+
+    # remove objects that are also available in linked libs
+    count = 0
+    while reduce_objects(bld, tgt_list):
+        count += 1
+        if count > 100:
+            Logs.warn("WARNING: Unable to remove all inter-target object duplicates")
+            break
+    debug('deps: Object reduction took %u iterations', count)
+
+    # add in any syslib dependencies
+    for t in tgt_list:
+        if not t.samba_type in ['BINARY','PYTHON','LIBRARY','SUBSYSTEM']:
+            continue
+        syslibs = set()
+        for d in t.final_objects:
+            t2 = bld.get_tgen_by_name(d)
+            syslibs = syslibs.union(t2.direct_syslibs)
+        # this adds the indirect syslibs as well, which may not be needed
+        # depending on the linker flags
+        for d in t.final_libs:
+            t2 = bld.get_tgen_by_name(d)
+            syslibs = syslibs.union(t2.direct_syslibs)
+        t.final_syslibs = syslibs
+
+
+    # find any unresolved library loops
+    lib_loop_error = False
+    for t in tgt_list:
+        if t.samba_type in ['LIBRARY', 'PYTHON']:
+            for l in t.final_libs.copy():
+                t2 = bld.get_tgen_by_name(l)
+                if t.sname in t2.final_libs:
+                    Logs.error('ERROR: Unresolved library loop %s from %s' % (t.sname, t2.sname))
+                    lib_loop_error = True
+    if lib_loop_error:
+        sys.exit(1)
 
     debug('deps: removed duplicate dependencies')
 
 
+def show_dependencies(bld, target, seen):
+    '''recursively show the dependencies of target'''
+
+    if target in seen:
+        return
+
+    t = bld.get_tgen_by_name(target)
+    if t is None:
+        Logs.error("ERROR: Unable to find target '%s'" % target)
+        sys.exit(1)
+
+    Logs.info('%s(OBJECTS): %s' % (target, t.direct_objects))
+    Logs.info('%s(LIBS): %s' % (target, t.direct_libs))
+    Logs.info('%s(SYSLIBS): %s' % (target, t.direct_syslibs))
+
+    seen.add(target)
+
+    for t2 in t.direct_objects:
+        show_dependencies(bld, t2, seen)
+
+
+def show_object_duplicates(bld, tgt_list):
+    '''show a list of object files that are included in more than
+    one library or binary'''
+
+    targets = LOCAL_CACHE(bld, 'TARGET_TYPE')
+
+    used_by = {}
+
+    Logs.info("showing duplicate objects")
+
+    for t in tgt_list:
+        if not targets[t.sname] in [ 'LIBRARY', 'PYTHON' ]:
+            continue
+        for n in getattr(t, 'final_objects', set()):
+            t2 = bld.get_tgen_by_name(n)
+            if not n in used_by:
+                used_by[n] = set()
+            used_by[n].add(t.sname)
+
+    for n in used_by:
+        if len(used_by[n]) > 1:
+            Logs.info("target '%s' is used by %s" % (n, used_by[n]))
+
+    Logs.info("showing indirect dependency counts (sorted by count)")
+
+    def indirect_count(t1, t2):
+        return len(t2.indirect_objects) - len(t1.indirect_objects)
+
+    sorted_list = sorted(tgt_list, cmp=indirect_count)
+    for t in sorted_list:
+        if len(t.indirect_objects) > 1:
+            Logs.info("%s depends on %u indirect objects" % (t.sname, len(t.indirect_objects)))
+
+
 ######################################################################
 # this provides a way to save our dependency calculations between runs
-savedeps_version = 2
-savedeps_inputs  = ['samba_deps', 'samba_includes', 'local_include', 'local_include_first', 'samba_cflags']
-savedeps_outputs = ['uselib', 'uselib_local', 'add_objects', 'includes', 'ccflags']
+savedeps_version = 3
+savedeps_inputs  = ['samba_deps', 'samba_includes', 'local_include', 'local_include_first', 'samba_cflags',
+                    'source', 'grouping_library', 'samba_ldflags', 'allow_undefined_symbols',
+                    'use_global_deps', 'global_include' ]
+savedeps_outputs = ['uselib', 'uselib_local', 'add_objects', 'includes',
+                    'ccflags', 'ldflags', 'samba_deps_extended', 'final_libs']
 savedeps_outenv  = ['INC_PATHS']
-savedeps_caches  = ['GLOBAL_DEPENDENCIES', 'TARGET_ALIAS', 'TARGET_TYPE', 'INIT_FUNCTIONS']
+savedeps_envvars = ['NONSHARED_BINARIES', 'GLOBAL_DEPENDENCIES', 'EXTRA_CFLAGS', 'EXTRA_LDFLAGS', 'EXTRA_INCLUDES' ]
+savedeps_caches  = ['GLOBAL_DEPENDENCIES', 'TARGET_TYPE', 'INIT_FUNCTIONS', 'SYSLIB_DEPS']
+savedeps_files   = ['buildtools/wafsamba/samba_deps.py']
 
 def save_samba_deps(bld, tgt_list):
     '''save the dependency calculations between builds, to make
@@ -614,10 +969,18 @@ def save_samba_deps(bld, tgt_list):
     denv.output = {}
     denv.outenv = {}
     denv.caches = {}
+    denv.envvar = {}
+    denv.files  = {}
+
+    for f in savedeps_files:
+        denv.files[f] = os.stat(os.path.join(bld.srcnode.abspath(), f)).st_mtime
 
     for c in savedeps_caches:
         denv.caches[c] = LOCAL_CACHE(bld, c)
 
+    for e in savedeps_envvars:
+        denv.envvar[e] = bld.env[e]
+
     for t in tgt_list:
         # save all the input attributes for each target
         tdeps = {}
@@ -645,7 +1008,8 @@ def save_samba_deps(bld, tgt_list):
             denv.outenv[t.sname] = tdeps
 
     depsfile = os.path.join(bld.bdir, "sambadeps")
-    denv.store(depsfile)
+    denv.store_fast(depsfile)
+
 
 
 def load_samba_deps(bld, tgt_list):
@@ -654,19 +1018,31 @@ def load_samba_deps(bld, tgt_list):
     denv = Environment.Environment()
     try:
         debug('deps: checking saved dependencies')
-        denv.load(depsfile)
+        denv.load_fast(depsfile)
         if (denv.version != savedeps_version or
             denv.savedeps_inputs != savedeps_inputs or
             denv.savedeps_outputs != savedeps_outputs):
             return False
-    except:
+    except Exception:
         return False
 
+    # check if critical files have changed
+    for f in savedeps_files:
+        if f not in denv.files:
+            return False
+        if denv.files[f] != os.stat(os.path.join(bld.srcnode.abspath(), f)).st_mtime:
+            return False
+
     # check if caches are the same
     for c in savedeps_caches:
         if c not in denv.caches or denv.caches[c] != LOCAL_CACHE(bld, c):
             return False
 
+    # check if caches are the same
+    for e in savedeps_envvars:
+        if e not in denv.envvar or denv.envvar[e] != bld.env[e]:
+            return False
+
     # check inputs are the same
     for t in tgt_list:
         tdeps = {}
@@ -700,50 +1076,90 @@ def load_samba_deps(bld, tgt_list):
     return True
 
 
+
 def check_project_rules(bld):
     '''check the project rules - ensuring the targets are sane'''
 
-    targets = LOCAL_CACHE(bld, 'TARGET_TYPE')
+    loops = {}
+    inc_loops = {}
 
-    # build a list of task generators we are interested in
-    tgt_list = []
-    for tgt in targets:
-        type = targets[tgt]
-        if not type in ['SUBSYSTEM', 'MODULE', 'BINARY', 'LIBRARY', 'ASN1', 'PYTHON']:
-            continue
-        t = bld.name_to_obj(tgt, bld.env)
-        if t is None:
-            print "Target %s of type %s has no task generator" % (tgt, type)
-            raise
-        tgt_list.append(t)
+    tgt_list = get_tgt_list(bld)
 
     add_samba_attributes(bld, tgt_list)
 
-    if load_samba_deps(bld, tgt_list):
+    force_project_rules = (Options.options.SHOWDEPS or
+                           Options.options.SHOW_DUPLICATES)
+
+    if not force_project_rules and load_samba_deps(bld, tgt_list):
         return
 
+    global tstart
+    tstart = time.clock()
+
+    bld.new_rules = True
+    Logs.info("Checking project rules ...")
+
     debug('deps: project rules checking started')
 
     expand_subsystem_deps(bld)
+
+    debug("deps: expand_subsystem_deps: %f" % (time.clock() - tstart))
+
+    replace_grouping_libraries(bld, tgt_list)
+
+    debug("deps: replace_grouping_libraries: %f" % (time.clock() - tstart))
+
     build_direct_deps(bld, tgt_list)
-    build_indirect_deps(bld, tgt_list)
-    calculate_final_deps(bld, tgt_list)
+
+    debug("deps: build_direct_deps: %f" % (time.clock() - tstart))
+
+    break_dependency_loops(bld, tgt_list)
+
+    debug("deps: break_dependency_loops: %f" % (time.clock() - tstart))
+
+    if Options.options.SHOWDEPS:
+            show_dependencies(bld, Options.options.SHOWDEPS, set())
+
+    calculate_final_deps(bld, tgt_list, loops)
+
+    debug("deps: calculate_final_deps: %f" % (time.clock() - tstart))
+
+    if Options.options.SHOW_DUPLICATES:
+            show_object_duplicates(bld, tgt_list)
 
     # run the various attribute generators
     for f in [ build_dependencies, build_includes, add_init_functions ]:
         debug('deps: project rules checking %s', f)
         for t in tgt_list: f(t)
+        debug("deps: %s: %f" % (f, time.clock() - tstart))
 
     debug('deps: project rules stage1 completed')
 
-    #check_orpaned_targets(bld, tgt_list)
-    #check_duplicate_sources(bld, tgt_list)
+    if not check_duplicate_sources(bld, tgt_list):
+        Logs.error("Duplicate sources present - aborting")
+        sys.exit(1)
+
+    debug("deps: check_duplicate_sources: %f" % (time.clock() - tstart))
+
+    if not bld.check_group_ordering(tgt_list):
+        Logs.error("Bad group ordering - aborting")
+        sys.exit(1)
+
+    debug("deps: check_group_ordering: %f" % (time.clock() - tstart))
+
     show_final_deps(bld, tgt_list)
 
+    debug("deps: show_final_deps: %f" % (time.clock() - tstart))
+
     debug('deps: project rules checking completed - %u targets checked',
           len(tgt_list))
 
-    save_samba_deps(bld, tgt_list)
+    if not bld.is_install:
+        save_samba_deps(bld, tgt_list)
+
+    debug("deps: save_samba_deps: %f" % (time.clock() - tstart))
+
+    Logs.info("Project rules pass")
 
 
 def CHECK_PROJECT_RULES(bld):