# a waf tool to add autoconf-like macros to the configure section
# and for SAMBA_ macros for building libraries, binaries etc
-import Build, os, Logs, sys, Configure, Options, string, Task, Utils, optparse
+import Build, os, Options, Task, Utils, cc, TaskGen, fnmatch, re, shutil, Logs, Constants
from Configure import conf
from Logs import debug
-from TaskGen import extension
+from samba_utils import SUBST_VARS_RECURSIVE
# bring in the other samba modules
+from samba_optimisation import *
from samba_utils import *
-# should be enabled from the above?
from samba_autoconf import *
from samba_patterns import *
from samba_pidl import *
+from samba_errtable import *
from samba_asn1 import *
from samba_autoproto import *
+from samba_python import *
+from samba_deps import *
+from samba_bundled import *
+import samba_install
+import samba_conftests
+import samba_abi
+import tru64cc
+import irixcc
+import generic_cc
+import samba_dist
+import samba_wildcard
+
+O644 = 420
+
+# some systems have broken threading in python
+if os.environ.get('WAF_NOTHREADS') == '1':
+ import nothreads
LIB_PATH="shared"
+os.putenv('PYTHONUNBUFFERED', '1')
+
+
+if Constants.HEXVERSION < 0x105016:
+ Logs.error('''
+Please use the version of waf that comes with Samba, not
+a system installed version. See http://wiki.samba.org/index.php/Waf
+for details.
+
+Alternatively, please use ./autogen-waf.sh, and then
+run ./configure and make as usual. That will call the right version of waf.
+''')
+ sys.exit(1)
+
-#################################################################
-# create the samba build environment
@conf
def SAMBA_BUILD_ENV(conf):
- libpath="%s/%s" % (conf.blddir, LIB_PATH)
- conf.env['BUILD_DIRECTORY'] = conf.blddir
- if not os.path.exists(libpath):
- os.mkdir(libpath)
-
-##############################################
-# remove .. elements from a path list
-def NORMPATH(bld, ilist):
- return " ".join([os.path.normpath(p) for p in ilist.split(" ")])
-Build.BuildContext.NORMPATH = NORMPATH
-
-################################################################
-# add an init_function to the list for a subsystem
-def ADD_INIT_FUNCTION(bld, subsystem, init_function):
+ '''create the samba build environment'''
+ conf.env.BUILD_DIRECTORY = conf.blddir
+ mkdir_p(os.path.join(conf.blddir, LIB_PATH))
+ mkdir_p(os.path.join(conf.blddir, 'python/samba/dcerpc'))
+ # this allows all of the bin/shared and bin/python targets
+ # to be expressed in terms of build directory paths
+ mkdir_p(os.path.join(conf.blddir, 'default'))
+ for p in ['python','shared']:
+ link_target = os.path.join(conf.blddir, 'default/' + p)
+ if not os.path.lexists(link_target):
+ os.symlink('../' + p, link_target)
+
+ # get perl to put the blib files in the build directory
+ blib_bld = os.path.join(conf.blddir, 'default/pidl/blib')
+ blib_src = os.path.join(conf.srcdir, 'pidl/blib')
+ mkdir_p(blib_bld + '/man1')
+ mkdir_p(blib_bld + '/man3')
+ if os.path.islink(blib_src):
+ os.unlink(blib_src)
+ elif os.path.exists(blib_src):
+ shutil.rmtree(blib_src)
+
+
+def ADD_INIT_FUNCTION(bld, subsystem, target, init_function):
+ '''add an init_function to the list for a subsystem'''
if init_function is None:
return
bld.ASSERT(subsystem is not None, "You must specify a subsystem for init_function '%s'" % init_function)
cache = LOCAL_CACHE(bld, 'INIT_FUNCTIONS')
if not subsystem in cache:
- cache[subsystem] = ''
- cache[subsystem] += '%s,' % init_function
+ cache[subsystem] = []
+ cache[subsystem].append( { 'TARGET':target, 'INIT_FUNCTION':init_function } )
Build.BuildContext.ADD_INIT_FUNCTION = ADD_INIT_FUNCTION
-################################################################
-# recursively build the dependency list for a target
-def FULL_DEPENDENCIES(bld, cache, target, chain, path):
- if not target in cache:
- return {}
- deps = cache[target].copy()
- for t in cache[target]:
- bld.ASSERT(t not in chain, "Circular dependency for %s: %s->%s" % (t, path, t));
- c2 = chain.copy()
- c2[t] = True
- dict_concat(deps, FULL_DEPENDENCIES(bld, cache, t, c2, "%s->%s" % (path, t)))
- return deps
-
-############################################################
-# check our build dependencies for circular dependencies
-def CHECK_TARGET_DEPENDENCY(bld, target):
- cache = LOCAL_CACHE(bld, 'LIB_DEPS')
- return FULL_DEPENDENCIES(bld, cache, target, { target:True }, target)
-
-############################################################
-# check that all dependencies have been declared
-def CHECK_DEPENDENCIES(bld):
- cache = LOCAL_CACHE(bld, 'LIB_DEPS')
- target_cache = LOCAL_CACHE(bld, 'TARGET_TYPE')
- debug('deps: Checking dependencies')
- for t in cache:
- deps = CHECK_TARGET_DEPENDENCY(bld, t)
- for d in deps:
- if not d in target_cache:
- print "WARNING: Dependency '%s' of target '%s' not declared" % (d, t)
- #ASSERT(bld, d in target_cache,
- # "Dependency '%s' of target '%s' not declared" % (d, t))
- debug("deps: Dependencies checked for %u targets" % len(target_cache))
-Build.BuildContext.CHECK_DEPENDENCIES = CHECK_DEPENDENCIES
-
-
-############################################################
-# pre-declare a target as being of a particular type
-def PREDECLARE(bld, target, type):
- cache = LOCAL_CACHE(bld, 'PREDECLARED_TARGET')
- target_cache = LOCAL_CACHE(bld, 'TARGET_TYPE')
- ASSERT(bld, not target in target_cache, "Target '%s' is already declared" % target)
- ASSERT(bld, not target in cache, "Target '%s' already predeclared" % target)
- cache[target] = type
-Build.BuildContext.PREDECLARE = PREDECLARE
-
-
-
-################################################################
-# add to the dependency list. Return a new dependency list with
-# any circular dependencies removed
-# returns a tuple containing (systemdeps, localdeps, add_objects)
-def ADD_DEPENDENCIES(bld, name, deps):
- debug('deps: Calculating dependencies for %s' % name)
- lib_deps = LOCAL_CACHE(bld, 'LIB_DEPS')
- if not name in lib_deps:
- lib_deps[name] = {}
- list = deps.split()
- list2 = []
- for d in list:
- lib_deps[name][d] = True;
- try:
- CHECK_TARGET_DEPENDENCY(bld, name)
- list2.append(d)
- except AssertionError:
- sys.stderr.write("Removing dependency %s from target %s\n" % (d, name))
- del(lib_deps[name][d])
-
- target_cache = LOCAL_CACHE(bld, 'TARGET_TYPE')
-
- # extract out the system dependencies
- sysdeps = []
- localdeps = []
- add_objects = []
- cache = LOCAL_CACHE(bld, 'EMPTY_TARGETS')
- predeclare = LOCAL_CACHE(bld, 'PREDECLARED_TARGET')
- for d in list2:
- recurse = False
- # strip out any dependencies on empty libraries
- if d in cache:
- debug("deps: Removing empty dependency '%s' from '%s'" % (d, name))
- continue
- type = None
-
- if d in target_cache:
- type = target_cache[d]
- elif d in predeclare:
- type = predeclare[d]
- else:
- type = 'SUBSYSTEM'
- LOCAL_CACHE_SET(bld, 'ASSUMED_TARGET', d, type)
-
- if type == 'SYSLIB':
- sysdeps.append(d)
- elif type == 'LIBRARY':
- localdeps.append(d)
- elif type == 'SUBSYSTEM':
- add_objects.append(d)
- recurse = True
- elif type == 'MODULE':
- add_objects.append(d)
- recurse = True
- elif type == 'PYTHON':
- pass
- elif type == 'ASN1':
- pass
- elif type == 'BINARY':
- pass
- else:
- ASSERT(bld, False, "Unknown target type '%s' for dependency %s" % (
- type, d))
-
- # for some types we have to build the list recursively
- if recurse and (d in lib_deps):
- rec_deps = ' '.join(lib_deps[d].keys())
- (rec_sysdeps, rec_localdeps, rec_add_objects) = ADD_DEPENDENCIES(bld, d, rec_deps)
- sysdeps.extend(rec_sysdeps.split())
- localdeps.extend(rec_localdeps.split())
- add_objects.extend(rec_add_objects.split())
-
- debug('deps: Dependencies for %s: sysdeps: %u localdeps: %u add_objects=%u' % (
- name, len(sysdeps), len(localdeps), len(add_objects)))
- return (' '.join(sysdeps), ' '.join(localdeps), ' '.join(add_objects))
#################################################################
-# return a include list for a set of library dependencies
-def SAMBA_LIBRARY_INCLUDE_LIST(bld, deps):
- ret = bld.curdir + ' '
- cache = LOCAL_CACHE(bld, 'INCLUDE_LIST')
- for l in deps.split():
- if l in cache:
- ret = ret + cache[l] + ' '
- if 'EXTRA_INCLUDES' in bld.env:
- ret += ' ' + ' '.join(bld.env['EXTRA_INCLUDES'])
- return ret
-Build.BuildContext.SAMBA_LIBRARY_INCLUDE_LIST = SAMBA_LIBRARY_INCLUDE_LIST
-
-#################################################################
-# define a Samba library
def SAMBA_LIBRARY(bld, libname, source,
deps='',
public_deps='',
- includes='.',
+ includes='',
public_headers=None,
+ header_path=None,
+ pc_files=None,
vnum=None,
cflags='',
- output_type=None,
+ external_library=False,
realname=None,
autoproto=None,
group='main',
- depends_on=''):
- if not SET_TARGET_TYPE(bld, libname, 'LIBRARY'):
+ depends_on='',
+ local_include=True,
+ vars=None,
+ install_path=None,
+ install=True,
+ needs_python=False,
+ target_type='LIBRARY',
+ bundled_extension=True,
+ link_name=None,
+ abi_file=None,
+ abi_match=None,
+ hide_symbols=False,
+ enabled=True):
+ '''define a Samba library'''
+
+ if not enabled:
+ SET_TARGET_TYPE(bld, libname, 'DISABLED')
return
+ source = bld.EXPAND_VARIABLES(source, vars=vars)
+
# remember empty libraries, so we can strip the dependencies
- if (source == '') or (source == []):
- LOCAL_CACHE_SET(bld, 'EMPTY_TARGETS', libname, True)
+ if ((source == '') or (source == [])) and deps == '' and public_deps == '':
+ SET_TARGET_TYPE(bld, libname, 'EMPTY')
return
- (sysdeps, localdeps, add_objects) = ADD_DEPENDENCIES(bld, libname, deps)
+ if target_type != 'PYTHON' and BUILTIN_LIBRARY(bld, libname):
+ obj_target = libname
+ else:
+ obj_target = libname + '.objlist'
+
+ # first create a target for building the object files for this library
+ # by separating in this way, we avoid recompiling the C files
+ # separately for the install library and the build library
+ bld.SAMBA_SUBSYSTEM(obj_target,
+ source = source,
+ deps = deps,
+ public_deps = public_deps,
+ includes = includes,
+ public_headers = public_headers,
+ header_path = header_path,
+ cflags = cflags,
+ group = group,
+ autoproto = autoproto,
+ depends_on = depends_on,
+ needs_python = needs_python,
+ hide_symbols = hide_symbols,
+ local_include = local_include)
+
+ if libname == obj_target:
+ return
- ilist = bld.SUBDIR(bld.curdir, includes) + ' ' + bld.SAMBA_LIBRARY_INCLUDE_LIST(deps)
- ilist = bld.NORMPATH(ilist)
+ if not SET_TARGET_TYPE(bld, libname, target_type):
+ return
- # this print below should show that we're runnig this code
- print "Setting build group for library %s to %s" % (libname, group), bld.path
- bld.SET_BUILD_GROUP(group) # <- here
- bld(
- features = 'cc cshlib',
- source = source,
- target=libname,
- uselib_local = localdeps,
- uselib = sysdeps,
- add_objects = add_objects,
- ccflags = CURRENT_CFLAGS(bld, cflags),
- includes=ilist + ' . #',
- depends_on=depends_on,
- vnum=vnum)
-
- # I have to set it each time? I expect it to be still
- # set from the few lines above
-
- # put a link to the library in bin/shared
- soext=""
- if vnum is not None:
- soext = '.' + vnum.split('.')[0]
+ # the library itself will depend on that object target
+ deps += ' ' + public_deps
+ deps = TO_LIST(deps)
+ deps.append(obj_target)
+ if target_type == 'PYTHON' or realname:
+ bundled_name = libname
+ else:
+ bundled_name = BUNDLED_NAME(bld, libname, bundled_extension)
+
+ features = 'cc cshlib symlink_lib install_lib'
+ if target_type == 'PYTHON':
+ features += ' pyext'
+ elif needs_python:
+ features += ' pyembed'
+ if abi_file:
+ features += ' abi_check'
+
+ if abi_file:
+ abi_file = os.path.join(bld.curdir, abi_file)
+
+ bld.SET_BUILD_GROUP(group)
t = bld(
- source = 'lib%s.so' % libname,
- rule = 'ln -sf ../${SRC}%s %s/lib%s.so%s' % (soext, LIB_PATH, libname, soext),
-# rule = 'ln -sf ../%s.so%s %s/lib%s.so%s' % (libname, soext, LIB_PATH, libname, soext),
- shell = True,
- after = 'cc_link',
- always = True,
- name = 'fff' + libname,
+ features = features,
+ source = [],
+ target = bundled_name,
+ samba_cflags = CURRENT_CFLAGS(bld, libname, cflags),
+ depends_on = depends_on,
+ samba_deps = deps,
+ samba_includes = includes,
+ local_include = local_include,
+ vnum = vnum,
+ install_path = None,
+ samba_inst_path = install_path,
+ name = libname,
+ samba_realname = realname,
+ samba_install = install,
+ abi_file = abi_file,
+ abi_match = abi_match
)
- #print t.rule
- LOCAL_CACHE_SET(bld, 'INCLUDE_LIST', libname, ilist)
+
+ if realname and not link_name:
+ link_name = 'shared/%s' % realname
+
+ if link_name:
+ t.link_name = link_name
+
+ if pc_files is not None:
+ bld.PKG_CONFIG_FILES(pc_files, vnum=vnum)
Build.BuildContext.SAMBA_LIBRARY = SAMBA_LIBRARY
+
#################################################################
-# define a Samba binary
def SAMBA_BINARY(bld, binname, source,
deps='',
includes='',
public_headers=None,
+ header_path=None,
modules=None,
- installdir=None,
ldflags=None,
cflags='',
autoproto=None,
- use_hostcc=None,
+ use_hostcc=False,
+ use_global_deps=True,
compiler=None,
- group='main',
- manpages=None):
- ilist = includes + ' ' + bld.SAMBA_LIBRARY_INCLUDE_LIST(deps)
- ilist = bld.NORMPATH(ilist)
+ group='binaries',
+ manpages=None,
+ local_include=True,
+ subsystem_name=None,
+ needs_python=False,
+ vars=None,
+ install=True,
+ install_path=None,
+ enabled=True):
+ '''define a Samba binary'''
+
+ if not enabled:
+ SET_TARGET_TYPE(bld, binname, 'DISABLED')
+ return
if not SET_TARGET_TYPE(bld, binname, 'BINARY'):
return
- (sysdeps, localdeps, add_objects) = ADD_DEPENDENCIES(bld, binname, deps)
-
- cache = LOCAL_CACHE(bld, 'INIT_FUNCTIONS')
- if modules is not None:
- for m in modules.split():
- bld.ASSERT(m in cache,
- "No init_function defined for module '%s' in binary '%s'" % (m, binname))
- cflags += ' -DSTATIC_%s_MODULES="%s"' % (m, cache[m])
+ features = 'cc cprogram symlink_bin install_bin'
+ if needs_python:
+ features += ' pyembed'
+
+ obj_target = binname + '.objlist'
+
+ source = bld.EXPAND_VARIABLES(source, vars=vars)
+ source = unique_list(TO_LIST(source))
+
+ # first create a target for building the object files for this binary
+ # by separating in this way, we avoid recompiling the C files
+ # separately for the install binary and the build binary
+ bld.SAMBA_SUBSYSTEM(obj_target,
+ source = source,
+ deps = deps,
+ includes = includes,
+ cflags = cflags,
+ group = group,
+ autoproto = autoproto,
+ subsystem_name = subsystem_name,
+ needs_python = needs_python,
+ local_include = local_include,
+ use_hostcc = use_hostcc,
+ use_global_deps= use_global_deps)
bld.SET_BUILD_GROUP(group)
- bld(
- features = 'cc cprogram',
- source = source,
- target = binname,
- uselib_local = localdeps,
- uselib = sysdeps,
- includes = ilist + ' . #',
- ccflags = CURRENT_CFLAGS(bld, cflags),
- add_objects = add_objects,
- top=True)
-
- if not Options.is_install:
- bld(
- source = binname,
- rule = 'rm -f %s && cp ${SRC} .' % (binname),
- shell = True,
- after = 'cc_link',
- always = True,
- ext_in = '.bin',
- name = binname + ".copy",
- depends_on = binname
- )
-Build.BuildContext.SAMBA_BINARY = SAMBA_BINARY
-
-
-#################################################################
-# define a Samba python module
-def SAMBA_PYTHON(bld, name, source,
- deps='',
- public_deps='',
- realname=''):
-
- if not SET_TARGET_TYPE(bld, name, 'PYTHON'):
- return
-
- (sysdeps, localdeps, add_objects) = ADD_DEPENDENCIES(bld, name, deps)
-
- return
-Build.BuildContext.SAMBA_PYTHON = SAMBA_PYTHON
-
-#################################################################
-# define a Samba ET target
-def SAMBA_ERRTABLE(bld, name, source,
- options='',
- directory=''):
-# print "Skipping ERRTABLE rule for %s with source=%s" % (name, source)
-# return
- if not SET_TARGET_TYPE(bld, name, 'ET'):
- return
- bld.SET_BUILD_GROUP('build_source')
- bld(
- features = 'cc',
- source = source,
- target = name,
- includes = '# #source4/heimdal_build #source4 #lib/replace'
- )
-Build.BuildContext.SAMBA_ERRTABLE = SAMBA_ERRTABLE
+ # the binary itself will depend on that object target
+ deps = TO_LIST(deps)
+ deps.append(obj_target)
+ t = bld(
+ features = features,
+ source = [],
+ target = binname,
+ samba_cflags = CURRENT_CFLAGS(bld, binname, cflags),
+ samba_deps = deps,
+ samba_includes = includes,
+ local_include = local_include,
+ samba_modules = modules,
+ top = True,
+ samba_subsystem= subsystem_name,
+ install_path = None,
+ samba_inst_path= install_path,
+ samba_install = install
+ )
+ # setup the subsystem_name as an alias for the real
+ # binary name, so it can be found when expanding
+ # subsystem dependencies
+ if subsystem_name is not None:
+ bld.TARGET_ALIAS(subsystem_name, binname)
-################################################################
-# build a C prototype file automatically
-def AUTOPROTO(bld, header, source):
- if header is not None:
- bld.SET_BUILD_GROUP('prototypes')
- bld(
- source = source,
- target = header,
- rule = '../script/mkproto.pl --srcdir=.. --builddir=. --public=/dev/null --private=${TGT} ${SRC}'
- )
-Build.BuildContext.AUTOPROTO = AUTOPROTO
+Build.BuildContext.SAMBA_BINARY = SAMBA_BINARY
#################################################################
-# define a Samba module.
def SAMBA_MODULE(bld, modname, source,
deps='',
- includes='.',
+ includes='',
subsystem=None,
init_function=None,
autoproto=None,
+ autoproto_extra_source='',
aliases=None,
cflags='',
- output_type=None):
+ internal_module=True,
+ local_include=True,
+ vars=None,
+ enabled=True):
+ '''define a Samba module.'''
+
+ # we add the init function regardless of whether the module
+ # is enabled or not, as we need to generate a null list if
+ # all disabled
+ bld.ADD_INIT_FUNCTION(subsystem, modname, init_function)
+
+ if internal_module or BUILTIN_LIBRARY(bld, modname):
+ # treat internal modules as subsystems for now
+ SAMBA_SUBSYSTEM(bld, modname, source,
+ deps=deps,
+ includes=includes,
+ autoproto=autoproto,
+ autoproto_extra_source=autoproto_extra_source,
+ cflags=cflags,
+ local_include=local_include,
+ enabled=enabled)
+ return
- if not SET_TARGET_TYPE(bld, modname, 'MODULE'):
+ if not enabled:
+ SET_TARGET_TYPE(bld, modname, 'DISABLED')
return
+ source = bld.EXPAND_VARIABLES(source, vars=vars)
+ source = unique_list(TO_LIST(source))
+
# remember empty modules, so we can strip the dependencies
- if (source == '') or (source == []):
- LOCAL_CACHE_SET(bld, 'EMPTY_TARGETS', modname, True)
+ if ((source == '') or (source == [])) and deps == '' and public_deps == '':
+ SET_TARGET_TYPE(bld, modname, 'EMPTY')
return
- (sysdeps, localdeps, add_objects) = ADD_DEPENDENCIES(bld, modname, deps)
+ if not SET_TARGET_TYPE(bld, modname, 'MODULE'):
+ return
+
+ if subsystem is not None:
+ deps += ' ' + subsystem
- ilist = bld.SUBDIR(bld.curdir, includes) + ' ' + bld.SAMBA_LIBRARY_INCLUDE_LIST(deps)
- ilist = bld.NORMPATH(ilist)
bld.SET_BUILD_GROUP('main')
bld(
- features = 'cc',
- source = source,
- target=modname,
- ccflags = CURRENT_CFLAGS(bld, cflags),
- includes=ilist + ' . #')
+ features = 'cc',
+ source = source,
+ target = modname,
+ samba_cflags = CURRENT_CFLAGS(bld, modname, cflags),
+ samba_includes = includes,
+ local_include = local_include,
+ samba_deps = TO_LIST(deps)
+ )
+
+ if autoproto is not None:
+ bld.SAMBA_AUTOPROTO(autoproto, source + TO_LIST(autoproto_extra_source))
+
Build.BuildContext.SAMBA_MODULE = SAMBA_MODULE
#################################################################
-# define a Samba subsystem
def SAMBA_SUBSYSTEM(bld, modname, source,
deps='',
public_deps='',
- includes='.',
+ includes='',
public_headers=None,
+ header_path=None,
cflags='',
+ cflags_end=None,
group='main',
- config_option=None,
init_function_sentinal=None,
heimdal_autoproto=None,
+ heimdal_autoproto_options=None,
heimdal_autoproto_private=None,
autoproto=None,
- depends_on=''):
-
- if not SET_TARGET_TYPE(bld, modname, 'SUBSYSTEM'):
+ autoproto_extra_source='',
+ depends_on='',
+ local_include=True,
+ local_include_first=True,
+ subsystem_name=None,
+ enabled=True,
+ use_hostcc=False,
+ use_global_deps=True,
+ vars=None,
+ hide_symbols=False,
+ needs_python=False):
+ '''define a Samba subsystem'''
+
+ if not enabled:
+ SET_TARGET_TYPE(bld, modname, 'DISABLED')
return
- # if the caller specifies a config_option, then we create a blank
- # subsystem if that configuration option was found at configure time
- if (config_option is not None) and bld.CONFIG_SET(config_option):
- source = ''
-
# remember empty subsystems, so we can strip the dependencies
- if (source == '') or (source == []):
- LOCAL_CACHE_SET(bld, 'EMPTY_TARGETS', modname, True)
+ if ((source == '') or (source == [])) and deps == '' and public_deps == '':
+ SET_TARGET_TYPE(bld, modname, 'EMPTY')
return
- (sysdeps, localdeps, add_objects) = ADD_DEPENDENCIES(bld, modname, deps)
+ if not SET_TARGET_TYPE(bld, modname, 'SUBSYSTEM'):
+ return
+
+ source = bld.EXPAND_VARIABLES(source, vars=vars)
+ source = unique_list(TO_LIST(source))
+
+ deps += ' ' + public_deps
- ilist = bld.SUBDIR(bld.curdir, includes) + ' ' + bld.SAMBA_LIBRARY_INCLUDE_LIST(deps)
- ilist = bld.NORMPATH(ilist)
bld.SET_BUILD_GROUP(group)
+
+ features = 'cc'
+ if needs_python:
+ features += ' pyext'
+
t = bld(
- features = 'cc',
- source = source,
- target=modname,
- ccflags = CURRENT_CFLAGS(bld, cflags),
- includes=ilist + ' . #',
- depends_on=depends_on)
- LOCAL_CACHE_SET(bld, 'INCLUDE_LIST', modname, ilist)
+ features = features,
+ source = source,
+ target = modname,
+ samba_cflags = CURRENT_CFLAGS(bld, modname, cflags, hide_symbols=hide_symbols),
+ depends_on = depends_on,
+ samba_deps = TO_LIST(deps),
+ samba_includes = includes,
+ local_include = local_include,
+ local_include_first = local_include_first,
+ samba_subsystem= subsystem_name,
+ samba_use_hostcc = use_hostcc,
+ samba_use_global_deps = use_global_deps
+ )
+
+ if cflags_end is not None:
+ t.samba_cflags.extend(TO_LIST(cflags_end))
if heimdal_autoproto is not None:
- bld.HEIMDAL_AUTOPROTO(heimdal_autoproto, source)
+ bld.HEIMDAL_AUTOPROTO(heimdal_autoproto, source, options=heimdal_autoproto_options)
if heimdal_autoproto_private is not None:
bld.HEIMDAL_AUTOPROTO_PRIVATE(heimdal_autoproto_private, source)
if autoproto is not None:
- bld.SAMBA_AUTOPROTO(autoproto, source)
+ bld.SAMBA_AUTOPROTO(autoproto, source + TO_LIST(autoproto_extra_source))
+ if public_headers is not None:
+ bld.PUBLIC_HEADERS(public_headers, header_path=header_path)
return t
+
Build.BuildContext.SAMBA_SUBSYSTEM = SAMBA_SUBSYSTEM
-###############################################################
-# add a new set of build rules from a subdirectory
-# the @runonce decorator ensures we don't end up
-# with duplicate rules
-def BUILD_SUBDIR(bld, dir):
- path = os.path.normpath(bld.curdir + '/' + dir)
- cache = LOCAL_CACHE(bld, 'SUBDIR_LIST')
- if path in cache: return
- cache[path] = True
- debug("build: Processing subdirectory %s" % dir)
- bld.add_subdirs(dir)
+def SAMBA_GENERATOR(bld, name, rule, source='', target='',
+ group='generators', enabled=True,
+ public_headers=None,
+ header_path=None,
+ vars=None):
+ '''A generic source generator target'''
+
+ if not SET_TARGET_TYPE(bld, name, 'GENERATOR'):
+ return
-Build.BuildContext.BUILD_SUBDIR = BUILD_SUBDIR
+ if not enabled:
+ return
+
+ bld.SET_BUILD_GROUP(group)
+ t = bld(
+ rule=rule,
+ source=bld.EXPAND_VARIABLES(source, vars=vars),
+ target=target,
+ shell=isinstance(rule, str),
+ on_results=True,
+ before='cc',
+ ext_out='.c',
+ name=name)
+
+ if public_headers is not None:
+ bld.PUBLIC_HEADERS(public_headers, header_path=header_path)
+ return t
+Build.BuildContext.SAMBA_GENERATOR = SAMBA_GENERATOR
-##########################################################
-# add a new top level command to waf
-def ADD_COMMAND(opt, name, function):
- Utils.g_module.__dict__[name] = function
- opt.name = function
-Options.Handler.ADD_COMMAND = ADD_COMMAND
-###########################################################
-# setup build groups used to ensure that the different build
-# phases happen consecutively
@runonce
def SETUP_BUILD_GROUPS(bld):
+ '''setup build groups used to ensure that the different build
+ phases happen consecutively'''
bld.p_ln = bld.srcnode # we do want to see all targets!
bld.env['USING_BUILD_GROUPS'] = True
bld.add_group('setup')
+ bld.add_group('build_compiler_source')
bld.add_group('base_libraries')
+ bld.add_group('generators')
+ bld.add_group('compiler_prototypes')
+ bld.add_group('compiler_libraries')
bld.add_group('build_compilers')
bld.add_group('build_source')
bld.add_group('prototypes')
bld.add_group('main')
+ bld.add_group('binaries')
bld.add_group('final')
Build.BuildContext.SETUP_BUILD_GROUPS = SETUP_BUILD_GROUPS
-###########################################################
-# set the current build group
def SET_BUILD_GROUP(bld, group):
+ '''set the current build group'''
if not 'USING_BUILD_GROUPS' in bld.env:
return
bld.set_group(group)
Build.BuildContext.SET_BUILD_GROUP = SET_BUILD_GROUP
+
+
+@conf
+def ENABLE_TIMESTAMP_DEPENDENCIES(conf):
+ """use timestamps instead of file contents for deps
+ this currently doesn't work"""
+ def h_file(filename):
+ import stat
+ st = os.stat(filename)
+ if stat.S_ISDIR(st[stat.ST_MODE]): raise IOError('not a file')
+ m = Utils.md5()
+ m.update(str(st.st_mtime))
+ m.update(str(st.st_size))
+ m.update(filename)
+ return m.digest()
+ Utils.h_file = h_file
+
+
+
+t = Task.simple_task_type('copy_script', 'rm -f ${LINK_TARGET} && ln -s ${SRC[0].abspath(env)} ${LINK_TARGET}',
+ shell=True, color='PINK', ext_in='.bin')
+t.quiet = True
+
+@feature('copy_script')
+@before('apply_link')
+def copy_script(self):
+ tsk = self.create_task('copy_script', self.allnodes[0])
+ tsk.env.TARGET = self.target
+
+def SAMBA_SCRIPT(bld, name, pattern, installdir, installname=None):
+ '''used to copy scripts from the source tree into the build directory
+ for use by selftest'''
+
+ source = bld.path.ant_glob(pattern)
+
+ bld.SET_BUILD_GROUP('build_source')
+ for s in TO_LIST(source):
+ iname = s
+ if installname != None:
+ iname = installname
+ target = os.path.join(installdir, iname)
+ tgtdir = os.path.dirname(os.path.join(bld.srcnode.abspath(bld.env), '..', target))
+ mkdir_p(tgtdir)
+ t = bld(features='copy_script',
+ source = s,
+ target = target,
+ always = True,
+ install_path = None)
+ t.env.LINK_TARGET = target
+
+Build.BuildContext.SAMBA_SCRIPT = SAMBA_SCRIPT
+
+
+def install_file(bld, destdir, file, chmod=O644, flat=False,
+ python_fixup=False, destname=None, base_name=None):
+ '''install a file'''
+ destdir = bld.EXPAND_VARIABLES(destdir)
+ if not destname:
+ destname = file
+ if flat:
+ destname = os.path.basename(destname)
+ dest = os.path.join(destdir, destname)
+ if python_fixup:
+ # fixup the python path it will use to find Samba modules
+ inst_file = file + '.inst'
+ bld.SAMBA_GENERATOR('python_%s' % destname,
+ rule="sed 's|\(sys.path.insert.*\)bin/python\(.*\)$|\\1${PYTHONDIR}\\2|g' < ${SRC} > ${TGT}",
+ source=file,
+ target=inst_file)
+ file = inst_file
+ if base_name:
+ file = os.path.join(base_name, file)
+ bld.install_as(dest, file, chmod=chmod)
+
+
+def INSTALL_FILES(bld, destdir, files, chmod=O644, flat=False,
+ python_fixup=False, destname=None, base_name=None):
+ '''install a set of files'''
+ for f in TO_LIST(files):
+ install_file(bld, destdir, f, chmod=chmod, flat=flat,
+ python_fixup=python_fixup, destname=destname,
+ base_name=base_name)
+Build.BuildContext.INSTALL_FILES = INSTALL_FILES
+
+
+def INSTALL_WILDCARD(bld, destdir, pattern, chmod=O644, flat=False,
+ python_fixup=False, exclude=None, trim_path=None):
+ '''install a set of files matching a wildcard pattern'''
+ files=TO_LIST(bld.path.ant_glob(pattern))
+ if trim_path:
+ files2 = []
+ for f in files:
+ files2.append(os_path_relpath(f, trim_path))
+ files = files2
+
+ if exclude:
+ for f in files[:]:
+ if fnmatch.fnmatch(f, exclude):
+ files.remove(f)
+ INSTALL_FILES(bld, destdir, files, chmod=chmod, flat=flat,
+ python_fixup=python_fixup, base_name=trim_path)
+Build.BuildContext.INSTALL_WILDCARD = INSTALL_WILDCARD
+
+
+def INSTALL_DIRS(bld, destdir, dirs):
+ '''install a set of directories'''
+ destdir = bld.EXPAND_VARIABLES(destdir)
+ dirs = bld.EXPAND_VARIABLES(dirs)
+ for d in TO_LIST(dirs):
+ bld.install_dir(os.path.join(destdir, d))
+Build.BuildContext.INSTALL_DIRS = INSTALL_DIRS
+
+
+re_header = re.compile('#include[ \t]*"([^"]+)"', re.I | re.M)
+class header_task(Task.Task):
+ name = 'header'
+ color = 'PINK'
+ vars = ['INCLUDEDIR', 'HEADER_DEPS']
+ def run(self):
+ txt = self.inputs[0].read(self.env)
+
+ txt = txt.replace('#if _SAMBA_BUILD_ == 4', '#if 1\n')
+
+ themap = self.generator.bld.subst_table
+ def repl(m):
+ if m.group(1):
+ s = m.group(1)
+ return "#include <%s>" % themap.get(s, s)
+ return ''
+
+ txt = re_header.sub(repl, txt)
+
+ f = None
+ try:
+ f = open(self.outputs[0].abspath(self.env), 'w')
+ f.write(txt)
+ finally:
+ if f:
+ f.close()
+
+def init_subst(bld):
+ """
+ initialize the header substitution table
+ for now use the file headermap.txt but in the future we will compute the paths properly
+ """
+
+ if getattr(bld, 'subst_table', None):
+ return bld.subst_table_h
+
+ node = bld.srcnode.find_resource("source4/headermap.txt")
+ if not node:
+ return {}
+ lines = node.read(None)
+ bld.subst_table_h = hash(lines)
+ lines = [x.strip().split(': ') for x in lines.split('\n') if x.rfind(': ') > -1]
+ bld.subst_table = dict(lines)
+ return bld.subst_table_h
+
+@TaskGen.feature('pubh')
+def make_public_headers(self):
+ if not self.bld.is_install:
+ # install time only (lazy)
+ return
+
+ self.env['HEADER_DEPS'] = init_subst(self.bld)
+ # adds a dependency and trigger a rebuild if the dict changes
+
+ header_path = getattr(self, 'header_path', None) or ''
+
+ for x in self.to_list(self.headers):
+
+ # too complicated, but what was the original idea?
+ if isinstance(header_path, list):
+ add_dir = ''
+ for (p1, dir) in header_path:
+ lst = self.to_list(p1)
+ for p2 in lst:
+ if fnmatch.fnmatch(x, p2):
+ add_dir = dir
+ break
+ else:
+ continue
+ break
+ inst_path = add_dir
+ else:
+ inst_path = header_path
+
+ dest = ''
+ name = x
+ if x.find(':') != -1:
+ s = x.split(':')
+ name = s[0]
+ dest = s[1]
+
+ inn = self.path.find_resource(name)
+ if not inn:
+ raise ValueError("could not find the public header %r in %r" % (name, self.path))
+ out = inn.change_ext('.inst.h')
+ self.create_task('header', inn, out)
+
+ if not dest:
+ dest = inn.name
+
+ if inst_path:
+ inst_path = inst_path + '/'
+ inst_path = inst_path + dest
+
+ #print("going to install the headers", inst_path, out)
+ self.bld.install_as('${INCLUDEDIR}/%s' % inst_path, out, self.env)
+
+def PUBLIC_HEADERS(bld, public_headers, header_path=None):
+ '''install some headers
+
+ header_path may either be a string that is added to the INCLUDEDIR,
+ or it can be a dictionary of wildcard patterns which map to destination
+ directories relative to INCLUDEDIR
+ '''
+ bld.SET_BUILD_GROUP('final')
+ ret = bld(features=['pubh'], headers=public_headers, header_path=header_path)
+ return ret
+Build.BuildContext.PUBLIC_HEADERS = PUBLIC_HEADERS
+
+
+def subst_at_vars(task):
+ '''substiture @VAR@ style variables in a file'''
+ src = task.inputs[0].srcpath(task.env)
+ tgt = task.outputs[0].bldpath(task.env)
+
+ f = open(src, 'r')
+ s = f.read()
+ f.close()
+ # split on the vars
+ a = re.split('(@\w+@)', s)
+ out = []
+ done_var = {}
+ back_sub = [ ('PREFIX', '${prefix}'), ('EXEC_PREFIX', '${exec_prefix}')]
+ for v in a:
+ if re.match('@\w+@', v):
+ vname = v[1:-1]
+ if not vname in task.env and vname.upper() in task.env:
+ vname = vname.upper()
+ if not vname in task.env:
+ Logs.error("Unknown substitution %s in %s" % (v, task.name))
+ sys.exit(1)
+ v = SUBST_VARS_RECURSIVE(task.env[vname], task.env)
+ # now we back substitute the allowed pc vars
+ for (b, m) in back_sub:
+ s = task.env[b]
+ if s == v[0:len(s)]:
+ if not b in done_var:
+ # we don't want to substitute the first usage
+ done_var[b] = True
+ else:
+ v = m + v[len(s):]
+ break
+ out.append(v)
+ contents = ''.join(out)
+ f = open(tgt, 'w')
+ s = f.write(contents)
+ f.close()
+ return 0
+
+
+
+def PKG_CONFIG_FILES(bld, pc_files, vnum=None):
+ '''install some pkg_config pc files'''
+ dest = '${PKGCONFIGDIR}'
+ dest = bld.EXPAND_VARIABLES(dest)
+ for f in TO_LIST(pc_files):
+ base=os.path.basename(f)
+ t = bld.SAMBA_GENERATOR('PKGCONFIG_%s' % base,
+ rule=subst_at_vars,
+ source=f+'.in',
+ target=f)
+ if vnum:
+ t.env.PACKAGE_VERSION = vnum
+ INSTALL_FILES(bld, dest, f, flat=True, destname=base)
+Build.BuildContext.PKG_CONFIG_FILES = PKG_CONFIG_FILES
+
+
+
+#############################################################
+# give a nicer display when building different types of files
+def progress_display(self, msg, fname):
+ col1 = Logs.colors(self.color)
+ col2 = Logs.colors.NORMAL
+ total = self.position[1]
+ n = len(str(total))
+ fs = '[%%%dd/%%%dd] %s %%s%%s%%s\n' % (n, n, msg)
+ return fs % (self.position[0], self.position[1], col1, fname, col2)
+
+def link_display(self):
+ if Options.options.progress_bar != 0:
+ return Task.Task.old_display(self)
+ fname = self.outputs[0].bldpath(self.env)
+ return progress_display(self, 'Linking', fname)
+Task.TaskBase.classes['cc_link'].display = link_display
+
+def samba_display(self):
+ if Options.options.progress_bar != 0:
+ return Task.Task.old_display(self)
+
+ targets = LOCAL_CACHE(self, 'TARGET_TYPE')
+ if self.name in targets:
+ target_type = targets[self.name]
+ type_map = { 'GENERATOR' : 'Generating',
+ 'PROTOTYPE' : 'Generating'
+ }
+ if target_type in type_map:
+ return progress_display(self, type_map[target_type], self.name)
+
+ fname = self.inputs[0].bldpath(self.env)
+ if fname[0:3] == '../':
+ fname = fname[3:]
+ ext_loc = fname.rfind('.')
+ if ext_loc == -1:
+ return Task.Task.old_display(self)
+ ext = fname[ext_loc:]
+
+ ext_map = { '.idl' : 'Compiling IDL',
+ '.et' : 'Compiling ERRTABLE',
+ '.asn1': 'Compiling ASN1',
+ '.c' : 'Compiling' }
+ if ext in ext_map:
+ return progress_display(self, ext_map[ext], fname)
+ return Task.Task.old_display(self)
+
+Task.TaskBase.classes['Task'].old_display = Task.TaskBase.classes['Task'].display
+Task.TaskBase.classes['Task'].display = samba_display