# a waf tool to add autoconf-like macros to the configure section
# and for SAMBA_ macros for building libraries, binaries etc
-import Build, os, Options, Task, Utils, cc, TaskGen, fnmatch, re, shutil, Logs
+import Build, os, Options, Task, Utils, cc, TaskGen, fnmatch, re, shutil, Logs, Constants
from Configure import conf
from Logs import debug
from samba_utils import SUBST_VARS_RECURSIVE
from samba_bundled import *
import samba_install
import samba_conftests
+import samba_abi
import tru64cc
+import irixcc
+import generic_cc
+import samba_dist
+import samba_wildcard
+
+O644 = 420
# some systems have broken threading in python
if os.environ.get('WAF_NOTHREADS') == '1':
os.putenv('PYTHONUNBUFFERED', '1')
+
+if Constants.HEXVERSION < 0x105016:
+ Logs.error('''
+Please use the version of waf that comes with Samba, not
+a system installed version. See http://wiki.samba.org/index.php/Waf
+for details.
+
+Alternatively, please use ./autogen-waf.sh, and then
+run ./configure and make as usual. That will call the right version of waf.
+''')
+ sys.exit(1)
+
+
@conf
def SAMBA_BUILD_ENV(conf):
'''create the samba build environment'''
- conf.env['BUILD_DIRECTORY'] = conf.blddir
+ conf.env.BUILD_DIRECTORY = conf.blddir
mkdir_p(os.path.join(conf.blddir, LIB_PATH))
mkdir_p(os.path.join(conf.blddir, 'python/samba/dcerpc'))
# this allows all of the bin/shared and bin/python targets
# to be expressed in terms of build directory paths
+ mkdir_p(os.path.join(conf.blddir, 'default'))
for p in ['python','shared']:
link_target = os.path.join(conf.blddir, 'default/' + p)
if not os.path.lexists(link_target):
target_type='LIBRARY',
bundled_extension=True,
link_name=None,
+ abi_file=None,
+ abi_match=None,
+ hide_symbols=False,
enabled=True):
'''define a Samba library'''
source = bld.EXPAND_VARIABLES(source, vars=vars)
# remember empty libraries, so we can strip the dependencies
- if (source == '') or (source == []):
+ if ((source == '') or (source == [])) and deps == '' and public_deps == '':
SET_TARGET_TYPE(bld, libname, 'EMPTY')
return
autoproto = autoproto,
depends_on = depends_on,
needs_python = needs_python,
+ hide_symbols = hide_symbols,
local_include = local_include)
if libname == obj_target:
deps = TO_LIST(deps)
deps.append(obj_target)
- if target_type == 'PYTHON':
+ if target_type == 'PYTHON' or realname:
bundled_name = libname
else:
bundled_name = BUNDLED_NAME(bld, libname, bundled_extension)
features += ' pyext'
elif needs_python:
features += ' pyembed'
+ if abi_file:
+ features += ' abi_check'
+
+ if abi_file:
+ abi_file = os.path.join(bld.curdir, abi_file)
bld.SET_BUILD_GROUP(group)
t = bld(
samba_inst_path = install_path,
name = libname,
samba_realname = realname,
- samba_install = install
+ samba_install = install,
+ abi_file = abi_file,
+ abi_match = abi_match
)
+ if realname and not link_name:
+ link_name = 'shared/%s' % realname
+
if link_name:
t.link_name = link_name
- if autoproto is not None:
- bld.SAMBA_AUTOPROTO(autoproto, source)
-
- if public_headers is not None:
- bld.PUBLIC_HEADERS(public_headers, header_path=header_path)
-
if pc_files is not None:
bld.PKG_CONFIG_FILES(pc_files, vnum=vnum)
public_headers=None,
header_path=None,
modules=None,
- installdir=None,
ldflags=None,
cflags='',
autoproto=None,
- use_hostcc=None,
+ use_hostcc=False,
+ use_global_deps=True,
compiler=None,
group='binaries',
manpages=None,
needs_python=False,
vars=None,
install=True,
- install_path=None):
+ install_path=None,
+ enabled=True):
'''define a Samba binary'''
+ if not enabled:
+ SET_TARGET_TYPE(bld, binname, 'DISABLED')
+ return
+
if not SET_TARGET_TYPE(bld, binname, 'BINARY'):
return
obj_target = binname + '.objlist'
source = bld.EXPAND_VARIABLES(source, vars=vars)
+ source = unique_list(TO_LIST(source))
# first create a target for building the object files for this binary
# by separating in this way, we avoid recompiling the C files
autoproto = autoproto,
subsystem_name = subsystem_name,
needs_python = needs_python,
- local_include = local_include)
+ local_include = local_include,
+ use_hostcc = use_hostcc,
+ use_global_deps= use_global_deps)
bld.SET_BUILD_GROUP(group)
if subsystem_name is not None:
bld.TARGET_ALIAS(subsystem_name, binname)
- if autoproto is not None:
- bld.SAMBA_AUTOPROTO(autoproto, source)
- if public_headers is not None:
- bld.PUBLIC_HEADERS(public_headers, header_path=header_path)
Build.BuildContext.SAMBA_BINARY = SAMBA_BINARY
return
source = bld.EXPAND_VARIABLES(source, vars=vars)
+ source = unique_list(TO_LIST(source))
# remember empty modules, so we can strip the dependencies
- if (source == '') or (source == []):
+ if ((source == '') or (source == [])) and deps == '' and public_deps == '':
SET_TARGET_TYPE(bld, modname, 'EMPTY')
return
)
if autoproto is not None:
- bld.SAMBA_AUTOPROTO(autoproto, source + ' ' + autoproto_extra_source)
+ bld.SAMBA_AUTOPROTO(autoproto, source + TO_LIST(autoproto_extra_source))
Build.BuildContext.SAMBA_MODULE = SAMBA_MODULE
local_include_first=True,
subsystem_name=None,
enabled=True,
+ use_hostcc=False,
+ use_global_deps=True,
vars=None,
+ hide_symbols=False,
needs_python=False):
'''define a Samba subsystem'''
return
# remember empty subsystems, so we can strip the dependencies
- if (source == '') or (source == []):
+ if ((source == '') or (source == [])) and deps == '' and public_deps == '':
SET_TARGET_TYPE(bld, modname, 'EMPTY')
return
return
source = bld.EXPAND_VARIABLES(source, vars=vars)
+ source = unique_list(TO_LIST(source))
deps += ' ' + public_deps
features = features,
source = source,
target = modname,
- samba_cflags = CURRENT_CFLAGS(bld, modname, cflags),
+ samba_cflags = CURRENT_CFLAGS(bld, modname, cflags, hide_symbols=hide_symbols),
depends_on = depends_on,
samba_deps = TO_LIST(deps),
samba_includes = includes,
local_include = local_include,
local_include_first = local_include_first,
- samba_subsystem= subsystem_name
+ samba_subsystem= subsystem_name,
+ samba_use_hostcc = use_hostcc,
+ samba_use_global_deps = use_global_deps
)
if cflags_end is not None:
if heimdal_autoproto_private is not None:
bld.HEIMDAL_AUTOPROTO_PRIVATE(heimdal_autoproto_private, source)
if autoproto is not None:
- bld.SAMBA_AUTOPROTO(autoproto, source + ' ' + autoproto_extra_source)
+ bld.SAMBA_AUTOPROTO(autoproto, source + TO_LIST(autoproto_extra_source))
if public_headers is not None:
bld.PUBLIC_HEADERS(public_headers, header_path=header_path)
return t
Build.BuildContext.SAMBA_SUBSYSTEM = SAMBA_SUBSYSTEM
-def SAMBA_GENERATOR(bld, name, rule, source, target,
+def SAMBA_GENERATOR(bld, name, rule, source='', target='',
group='generators', enabled=True,
public_headers=None,
header_path=None,
-def BUILD_SUBDIR(bld, dir):
- '''add a new set of build rules from a subdirectory'''
- path = os.path.normpath(bld.curdir + '/' + dir)
- cache = LOCAL_CACHE(bld, 'SUBDIR_LIST')
- if path in cache: return
- cache[path] = True
- debug("build: Processing subdirectory %s" % dir)
- bld.add_subdirs(dir)
-Build.BuildContext.BUILD_SUBDIR = BUILD_SUBDIR
-
-
-
@runonce
def SETUP_BUILD_GROUPS(bld):
'''setup build groups used to ensure that the different build
Build.BuildContext.SAMBA_SCRIPT = SAMBA_SCRIPT
-def install_file(bld, destdir, file, chmod=0644, flat=False,
+def install_file(bld, destdir, file, chmod=O644, flat=False,
python_fixup=False, destname=None, base_name=None):
'''install a file'''
destdir = bld.EXPAND_VARIABLES(destdir)
bld.install_as(dest, file, chmod=chmod)
-def INSTALL_FILES(bld, destdir, files, chmod=0644, flat=False,
+def INSTALL_FILES(bld, destdir, files, chmod=O644, flat=False,
python_fixup=False, destname=None, base_name=None):
'''install a set of files'''
for f in TO_LIST(files):
Build.BuildContext.INSTALL_FILES = INSTALL_FILES
-def INSTALL_WILDCARD(bld, destdir, pattern, chmod=0644, flat=False,
+def INSTALL_WILDCARD(bld, destdir, pattern, chmod=O644, flat=False,
python_fixup=False, exclude=None, trim_path=None):
'''install a set of files matching a wildcard pattern'''
files=TO_LIST(bld.path.ant_glob(pattern))
Build.BuildContext.INSTALL_WILDCARD = INSTALL_WILDCARD
+def INSTALL_DIRS(bld, destdir, dirs):
+ '''install a set of directories'''
+ destdir = bld.EXPAND_VARIABLES(destdir)
+ dirs = bld.EXPAND_VARIABLES(dirs)
+ for d in TO_LIST(dirs):
+ bld.install_dir(os.path.join(destdir, d))
+Build.BuildContext.INSTALL_DIRS = INSTALL_DIRS
+
+
+re_header = re.compile('#include[ \t]*"([^"]+)"', re.I | re.M)
+class header_task(Task.Task):
+ name = 'header'
+ color = 'PINK'
+ vars = ['INCLUDEDIR', 'HEADER_DEPS']
+ def run(self):
+ txt = self.inputs[0].read(self.env)
+
+ txt = txt.replace('#if _SAMBA_BUILD_ == 4', '#if 1\n')
+
+ themap = self.generator.bld.subst_table
+ def repl(m):
+ if m.group(1):
+ s = m.group(1)
+ return "#include <%s>" % themap.get(s, s)
+ return ''
+
+ txt = re_header.sub(repl, txt)
+
+ f = None
+ try:
+ f = open(self.outputs[0].abspath(self.env), 'w')
+ f.write(txt)
+ finally:
+ if f:
+ f.close()
+
+def init_subst(bld):
+ """
+ initialize the header substitution table
+ for now use the file headermap.txt but in the future we will compute the paths properly
+ """
+
+ if getattr(bld, 'subst_table', None):
+ return bld.subst_table_h
+
+ node = bld.srcnode.find_resource("source4/headermap.txt")
+ if not node:
+ return {}
+ lines = node.read(None)
+ bld.subst_table_h = hash(lines)
+ lines = [x.strip().split(': ') for x in lines.split('\n') if x.rfind(': ') > -1]
+ bld.subst_table = dict(lines)
+ return bld.subst_table_h
+
+@TaskGen.feature('pubh')
+def make_public_headers(self):
+ if not self.bld.is_install:
+ # install time only (lazy)
+ return
+
+ self.env['HEADER_DEPS'] = init_subst(self.bld)
+ # adds a dependency and trigger a rebuild if the dict changes
+
+ header_path = getattr(self, 'header_path', None) or ''
+
+ for x in self.to_list(self.headers):
+
+ # too complicated, but what was the original idea?
+ if isinstance(header_path, list):
+ add_dir = ''
+ for (p1, dir) in header_path:
+ lst = self.to_list(p1)
+ for p2 in lst:
+ if fnmatch.fnmatch(x, p2):
+ add_dir = dir
+ break
+ else:
+ continue
+ break
+ inst_path = add_dir
+ else:
+ inst_path = header_path
+
+ dest = ''
+ name = x
+ if x.find(':') != -1:
+ s = x.split(':')
+ name = s[0]
+ dest = s[1]
+
+ inn = self.path.find_resource(name)
+ if not inn:
+ raise ValueError("could not find the public header %r in %r" % (name, self.path))
+ out = inn.change_ext('.inst.h')
+ self.create_task('header', inn, out)
+
+ if not dest:
+ dest = inn.name
+
+ if inst_path:
+ inst_path = inst_path + '/'
+ inst_path = inst_path + dest
+
+ #print("going to install the headers", inst_path, out)
+ self.bld.install_as('${INCLUDEDIR}/%s' % inst_path, out, self.env)
+
def PUBLIC_HEADERS(bld, public_headers, header_path=None):
'''install some headers
or it can be a dictionary of wildcard patterns which map to destination
directories relative to INCLUDEDIR
'''
- dest = '${INCLUDEDIR}'
- if isinstance(header_path, str):
- dest += '/' + header_path
- for h in TO_LIST(public_headers):
- hdest = dest
- if isinstance(header_path, list):
- for (p1, dir) in header_path:
- found_match=False
- lst = TO_LIST(p1)
- for p2 in lst:
- if fnmatch.fnmatch(h, p2):
- if dir:
- hdest = os.path.join(hdest, dir)
- found_match=True
- break
- if found_match: break
- if h.find(':') != -1:
- hs=h.split(':')
- INSTALL_FILES(bld, hdest, hs[0], flat=True, destname=hs[1])
- else:
- INSTALL_FILES(bld, hdest, h, flat=True)
+ bld.SET_BUILD_GROUP('final')
+ ret = bld(features=['pubh'], headers=public_headers, header_path=header_path)
+ return ret
Build.BuildContext.PUBLIC_HEADERS = PUBLIC_HEADERS
# split on the vars
a = re.split('(@\w+@)', s)
out = []
+ done_var = {}
+ back_sub = [ ('PREFIX', '${prefix}'), ('EXEC_PREFIX', '${exec_prefix}')]
for v in a:
if re.match('@\w+@', v):
vname = v[1:-1]
if not vname in task.env and vname.upper() in task.env:
vname = vname.upper()
if not vname in task.env:
- print "Unknown substitution %s in %s" % (v, task.name)
- raise
- v = task.env[vname]
+ Logs.error("Unknown substitution %s in %s" % (v, task.name))
+ sys.exit(1)
+ v = SUBST_VARS_RECURSIVE(task.env[vname], task.env)
+ # now we back substitute the allowed pc vars
+ for (b, m) in back_sub:
+ s = task.env[b]
+ if s == v[0:len(s)]:
+ if not b in done_var:
+ # we don't want to substitute the first usage
+ done_var[b] = True
+ else:
+ v = m + v[len(s):]
+ break
out.append(v)
contents = ''.join(out)
f = open(tgt, 'w')