summaryrefslogtreecommitdiffstats
path: root/buildtools
diff options
context:
space:
mode:
authorDaniel Baumann <daniel.baumann@progress-linux.org>2024-04-17 16:44:12 +0000
committerDaniel Baumann <daniel.baumann@progress-linux.org>2024-04-17 16:44:12 +0000
commitcda5d340e78555d2542be9264137319c5308e798 (patch)
tree7bc11f3311859ceee697bba8b60a5cb79a03473b /buildtools
parentInitial commit. (diff)
downloadtalloc-cda5d340e78555d2542be9264137319c5308e798.tar.xz
talloc-cda5d340e78555d2542be9264137319c5308e798.zip
Adding upstream version 2.4.2.upstream/2.4.2upstream
Signed-off-by: Daniel Baumann <daniel.baumann@progress-linux.org>
Diffstat (limited to 'buildtools')
-rw-r--r--buildtools/README12
-rwxr-xr-xbuildtools/bin/waf166
-rwxr-xr-xbuildtools/compare_config_h4.sh12
-rwxr-xr-xbuildtools/compare_generated.sh49
-rwxr-xr-xbuildtools/compare_install.sh8
-rw-r--r--buildtools/devel_env.sh7
-rwxr-xr-xbuildtools/examples/run_on_target.py148
-rw-r--r--buildtools/scripts/Makefile.waf72
-rwxr-xr-xbuildtools/scripts/abi_gen.sh26
-rwxr-xr-xbuildtools/scripts/autogen-waf.sh27
-rwxr-xr-xbuildtools/scripts/configure.waf21
-rw-r--r--buildtools/wafsamba/README8
-rw-r--r--buildtools/wafsamba/__init__.py0
-rw-r--r--buildtools/wafsamba/configure_file.py41
-rw-r--r--buildtools/wafsamba/generic_cc.py70
-rw-r--r--buildtools/wafsamba/pkgconfig.py68
-rw-r--r--buildtools/wafsamba/samba3.py111
-rw-r--r--buildtools/wafsamba/samba_abi.py334
-rw-r--r--buildtools/wafsamba/samba_autoconf.py1023
-rw-r--r--buildtools/wafsamba/samba_autoproto.py24
-rw-r--r--buildtools/wafsamba/samba_bundled.py273
-rw-r--r--buildtools/wafsamba/samba_conftests.py529
-rw-r--r--buildtools/wafsamba/samba_cross.py175
-rw-r--r--buildtools/wafsamba/samba_deps.py1314
-rw-r--r--buildtools/wafsamba/samba_dist.py280
-rw-r--r--buildtools/wafsamba/samba_git.py58
-rw-r--r--buildtools/wafsamba/samba_headers.py181
-rw-r--r--buildtools/wafsamba/samba_install.py236
-rw-r--r--buildtools/wafsamba/samba_patterns.py234
-rw-r--r--buildtools/wafsamba/samba_perl.py58
-rw-r--r--buildtools/wafsamba/samba_pidl.py175
-rw-r--r--buildtools/wafsamba/samba_python.py157
-rw-r--r--buildtools/wafsamba/samba_third_party.py48
-rw-r--r--buildtools/wafsamba/samba_utils.py754
-rw-r--r--buildtools/wafsamba/samba_version.py268
-rw-r--r--buildtools/wafsamba/samba_waf18.py433
-rw-r--r--buildtools/wafsamba/samba_wildcard.py151
-rw-r--r--buildtools/wafsamba/stale_files.py114
-rw-r--r--buildtools/wafsamba/symbols.py659
-rwxr-xr-xbuildtools/wafsamba/test_duplicate_symbol.sh15
-rw-r--r--buildtools/wafsamba/tests/__init__.py35
-rw-r--r--buildtools/wafsamba/tests/test_abi.py134
-rw-r--r--buildtools/wafsamba/tests/test_bundled.py27
-rw-r--r--buildtools/wafsamba/tests/test_utils.py76
-rw-r--r--buildtools/wafsamba/wafsamba.py1227
-rw-r--r--buildtools/wafsamba/wscript737
46 files changed, 10575 insertions, 0 deletions
diff --git a/buildtools/README b/buildtools/README
new file mode 100644
index 0000000..eab0382
--- /dev/null
+++ b/buildtools/README
@@ -0,0 +1,12 @@
+See http://code.google.com/p/waf/ for more information on waf
+
+You can get a svn copy of the upstream source with:
+
+ svn checkout http://waf.googlecode.com/svn/trunk/ waf-read-only
+
+Samba currently uses waf 1.5, which can be found at:
+
+ http://waf.googlecode.com/svn/branches/waf-1.5
+
+To update the current copy of waf, use the update-waf.sh script in this
+directory.
diff --git a/buildtools/bin/waf b/buildtools/bin/waf
new file mode 100755
index 0000000..0f70fa2
--- /dev/null
+++ b/buildtools/bin/waf
@@ -0,0 +1,166 @@
+#!/usr/bin/env python3
+# encoding: latin-1
+# Thomas Nagy, 2005-2018
+#
+"""
+Redistribution and use in source and binary forms, with or without
+modification, are permitted provided that the following conditions
+are met:
+
+1. Redistributions of source code must retain the above copyright
+ notice, this list of conditions and the following disclaimer.
+
+2. Redistributions in binary form must reproduce the above copyright
+ notice, this list of conditions and the following disclaimer in the
+ documentation and/or other materials provided with the distribution.
+
+3. The name of the author may not be used to endorse or promote products
+ derived from this software without specific prior written permission.
+
+THIS SOFTWARE IS PROVIDED BY THE AUTHOR "AS IS" AND ANY EXPRESS OR
+IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
+WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
+DISCLAIMED. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT,
+INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
+(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
+SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
+HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT,
+STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING
+IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
+POSSIBILITY OF SUCH DAMAGE.
+"""
+
+import os, sys, inspect
+
+VERSION="2.0.26"
+REVISION="x"
+GIT="x"
+INSTALL="x"
+C1='x'
+C2='x'
+C3='x'
+cwd = os.getcwd()
+join = os.path.join
+
+if sys.hexversion<0x206000f:
+ raise ImportError('Python >= 2.6 is required to create the waf file')
+
+WAF='waf'
+def b(x):
+ return x
+if sys.hexversion>0x300000f:
+ WAF='waf3'
+ def b(x):
+ return x.encode()
+
+def err(m):
+ print(('\033[91mError: %s\033[0m' % m))
+ sys.exit(1)
+
+def unpack_wafdir(dir, src):
+ f = open(src,'rb')
+ c = 'corrupt archive (%d)'
+ while 1:
+ line = f.readline()
+ if not line: err('run waf-light from a folder containing waflib')
+ if line == b('#==>\n'):
+ txt = f.readline()
+ if not txt: err(c % 1)
+ if f.readline() != b('#<==\n'): err(c % 2)
+ break
+ if not txt: err(c % 3)
+ txt = txt[1:-1].replace(b(C1), b('\n')).replace(b(C2), b('\r')).replace(b(C3), b('\x00'))
+
+ import shutil, tarfile
+ try: shutil.rmtree(dir)
+ except OSError: pass
+ try:
+ for x in ('Tools', 'extras'):
+ os.makedirs(join(dir, 'waflib', x))
+ except OSError:
+ err("Cannot unpack waf lib into %s\nMove waf in a writable directory" % dir)
+
+ os.chdir(dir)
+ tmp = 't.bz2'
+ t = open(tmp,'wb')
+ try: t.write(txt)
+ finally: t.close()
+
+ try:
+ t = tarfile.open(tmp)
+ except:
+ try:
+ os.system('bunzip2 t.bz2')
+ t = tarfile.open('t')
+ tmp = 't'
+ except:
+ os.chdir(cwd)
+ try: shutil.rmtree(dir)
+ except OSError: pass
+ err("Waf cannot be unpacked, check that bzip2 support is present")
+
+ try:
+ for x in t: t.extract(x)
+ finally:
+ t.close()
+
+ for x in ('Tools', 'extras'):
+ os.chmod(join('waflib',x), 493)
+
+ if sys.hexversion<0x300000f:
+ sys.path = [join(dir, 'waflib')] + sys.path
+ import fixpy2
+ fixpy2.fixdir(dir)
+
+ os.remove(tmp)
+ os.chdir(cwd)
+
+ try: dir = unicode(dir, 'mbcs')
+ except: pass
+ try:
+ from ctypes import windll
+ windll.kernel32.SetFileAttributesW(dir, 2)
+ except:
+ pass
+
+def test(dir):
+ try:
+ os.stat(join(dir, 'waflib'))
+ return os.path.abspath(dir)
+ except OSError:
+ pass
+
+def find_lib():
+ path = '../../third_party/waf'
+ paths = [path, path+'/waflib']
+ return [os.path.abspath(os.path.join(os.path.dirname(__file__), x)) for x in paths]
+
+wafdir = find_lib()
+for p in wafdir:
+ sys.path.insert(0, p)
+
+if __name__ == '__main__':
+ #import extras.compat15#PRELUDE
+ import sys
+
+ from waflib.Tools import ccroot, c, ar, compiler_c, gcc
+ sys.modules['cc'] = c
+ sys.modules['ccroot'] = ccroot
+ sys.modules['ar'] = ar
+ sys.modules['compiler_cc'] = compiler_c
+ sys.modules['gcc'] = gcc
+
+ from waflib import Options
+ Options.lockfile = os.environ.get('WAFLOCK', '.lock-wscript')
+ if os.path.isfile(Options.lockfile) and os.stat(Options.lockfile).st_size == 0:
+ os.environ['NOCLIMB'] = "1"
+ # there is a single top-level, but libraries must build independently
+ os.environ['NO_LOCK_IN_TOP'] = "1"
+
+ from waflib import Task
+ class o(object):
+ display = None
+ Task.classes['cc_link'] = o
+
+ from waflib import Scripting
+ Scripting.waf_entry_point(cwd, VERSION, wafdir[0])
diff --git a/buildtools/compare_config_h4.sh b/buildtools/compare_config_h4.sh
new file mode 100755
index 0000000..fee8abf
--- /dev/null
+++ b/buildtools/compare_config_h4.sh
@@ -0,0 +1,12 @@
+#!/bin/sh
+
+# compare the generated config.h from a waf build with existing samba
+# build
+
+grep "^.define" bin/default/source4/include/config.h | sort >waf-config.h
+grep "^.define" $HOME/samba_old/source4/include/config.h | sort >old-config.h
+
+comm -23 old-config.h waf-config.h
+
+#echo
+#diff -u old-config.h waf-config.h
diff --git a/buildtools/compare_generated.sh b/buildtools/compare_generated.sh
new file mode 100755
index 0000000..e626579
--- /dev/null
+++ b/buildtools/compare_generated.sh
@@ -0,0 +1,49 @@
+#!/bin/sh
+
+# compare the generated files from a waf
+
+old_build=$HOME/samba_old
+
+gen_files=$(cd bin/default && find . -type f -name '*.[ch]')
+
+2>&1
+
+strip_file()
+{
+ in_file=$1
+ out_file=$2
+ cat $in_file |
+ grep -v 'The following definitions come from' |
+ grep -v 'Automatically generated at' |
+ grep -v 'Generated from' |
+ sed 's|/home/tnagy/samba/source4||g' |
+ sed 's|/home/tnagy/samba/|../|g' |
+ sed 's|bin/default/source4/||g' |
+ sed 's|bin/default/|../|g' |
+ sed 's/define _____/define ___/g' |
+ sed 's/define __*/define _/g' |
+ sed 's/define _DEFAULT_/define _/g' |
+ sed 's/define _SOURCE4_/define ___/g' |
+ sed 's/define ___/define _/g' |
+ sed 's/ifndef ___/ifndef _/g' |
+ sed 's|endif /* ____|endif /* __|g' |
+ sed s/__DEFAULT_SOURCE4/__/ |
+ sed s/__DEFAULT_SOURCE4/__/ |
+ sed s/__DEFAULT/____/ >$out_file
+}
+
+compare_file()
+{
+ f=$f
+ bname=$(basename $f)
+ t1=/tmp/$bname.old.$$
+ t2=/tmp/$bname.new.$$
+ strip_file $old_build/$f $t1
+ strip_file bin/default/$f $t2
+ diff -u -b $t1 $t2 2>&1
+ rm -f $t1 $t2
+}
+
+for f in $gen_files; do
+ compare_file $f
+done
diff --git a/buildtools/compare_install.sh b/buildtools/compare_install.sh
new file mode 100755
index 0000000..37772a4
--- /dev/null
+++ b/buildtools/compare_install.sh
@@ -0,0 +1,8 @@
+#!/bin/sh
+
+prefix1="$1"
+prefix2="$2"
+
+(cd $prefix1 && find .) | sort >p1.txt
+(cd $prefix2 && find .) | sort >p2.txt
+diff -u p[12].txt
diff --git a/buildtools/devel_env.sh b/buildtools/devel_env.sh
new file mode 100644
index 0000000..9f87a4a
--- /dev/null
+++ b/buildtools/devel_env.sh
@@ -0,0 +1,7 @@
+# This file can be sourced using
+#
+# source buildtools/devel_env.sh
+
+# Setup python path for lsp server
+PYTHONPATH="$(pwd)/third_party/waf:$(pwd)/python:$(pwd)/bin/python:$(pwd)/selftest:${PYTHONPATH}"
+export PYTHONPATH
diff --git a/buildtools/examples/run_on_target.py b/buildtools/examples/run_on_target.py
new file mode 100755
index 0000000..0c9ac5a
--- /dev/null
+++ b/buildtools/examples/run_on_target.py
@@ -0,0 +1,148 @@
+#!/usr/bin/env python3
+
+#
+# Sample run-on-target script
+# This is a script that can be used as cross-execute parameter to samba
+# configuration process, running the command on a remote target for which
+# the cross-compiled configure test was compiled.
+#
+# To use:
+# ./configure \
+# --cross-compile \
+# '--cross-execute=./buildtools/example/run_on_target.py --host=<host>'
+#
+# A more elaborate example:
+# ./configure \
+# --cross-compile \
+# '--cross-execute=./buildtools/example/run_on_target.py --host=<host> --user=<user> "--ssh=ssh -i <some key file>" --destdir=/path/to/dir'
+#
+# Typically this is to be used also with --cross-answers, so that the
+# cross answers file gets built and further builds can be made without
+# the help of a remote target.
+#
+# The following assumptions are made:
+# 1. rsync is available on build machine and target machine
+# 2. A running ssh service on target machine with password-less shell login
+# 3. A directory writable by the password-less login user
+# 4. The tests on the target can run and provide reliable results
+# from the login account's home directory. This is significant
+# for example in locking tests which
+# create files in the current directory. As a workaround to this
+# assumption, the TESTDIR environment variable can be set on the target
+# (using ssh command line or server config) and the tests shall
+# chdir to that directory.
+#
+
+import sys
+import os
+import subprocess
+from optparse import OptionParser
+
+# these are defaults, but can be overridden using command line
+SSH = 'ssh'
+USER = None
+HOST = 'localhost'
+
+
+def xfer_files(ssh, srcdir, host, user, targ_destdir):
+ """Transfer executable files to target
+
+ Use rsync to copy the directory containing program to run
+ INTO a destination directory on the target. An exact copy
+ of the source directory is created on the target machine,
+ possibly deleting files on the target machine which do not
+ exist on the source directory.
+
+ The idea is that the test may include files in addition to
+ the compiled binary, and all of those files reside alongside
+ the binary in a source directory.
+
+ For example, if the test to run is /foo/bar/test and the
+ destination directory on the target is /tbaz, then /tbaz/bar
+ on the target shall be an exact copy of /foo/bar on the source,
+ including deletion of files inside /tbaz/bar which do not exist
+ on the source.
+ """
+
+ userhost = host
+ if user:
+ userhost = '%s@%s' % (user, host)
+
+ cmd = 'rsync --verbose -rl --ignore-times --delete -e "%s" %s %s:%s/' % \
+ (ssh, srcdir, userhost, targ_destdir)
+ p = subprocess.Popen(cmd, shell=True, stdout=subprocess.PIPE,
+ stderr=subprocess.PIPE)
+ (out, err) = p.communicate()
+ if p.returncode != 0:
+ raise Exception('failed syncing files\n stdout:\n%s\nstderr:%s\n'
+ % (out, err))
+
+
+def exec_remote(ssh, host, user, destdir, targdir, prog, args):
+ """Run a test on the target
+
+ Using password-less ssh, run the compiled binary on the target.
+
+ An assumption is that there's no need to cd into the target dir,
+ same as there's no need to do it on a native build.
+ """
+ userhost = host
+ if user:
+ userhost = '%s@%s' % (user, host)
+
+ cmd = '%s %s %s/%s/%s' % (ssh, userhost, destdir, targdir, prog)
+ if args:
+ cmd = cmd + ' ' + ' '.join(args)
+ p = subprocess.Popen(cmd, shell=True, stdout=subprocess.PIPE,
+ stderr=subprocess.PIPE)
+ (out, err) = p.communicate()
+ return (p.returncode, out)
+
+
+def main(argv):
+ usage = "usage: %prog [options] <prog> [args]"
+ parser = OptionParser(usage)
+
+ parser.add_option('--ssh', help="SSH client and additional flags",
+ default=SSH)
+ parser.add_option('--host', help="target host name or IP address",
+ default=HOST)
+ parser.add_option('--user', help="login user on target",
+ default=USER)
+ parser.add_option('--destdir', help="work directory on target",
+ default='~')
+
+ (options, args) = parser.parse_args(argv)
+ if len(args) < 1:
+ parser.error("please supply test program to run")
+
+ progpath = args[0]
+
+ # assume that a test that was not compiled fails (e.g. getconf)
+ if progpath[0] != '/':
+ return (1, "")
+
+ progdir = os.path.dirname(progpath)
+ prog = os.path.basename(progpath)
+ targ_progdir = os.path.basename(progdir)
+
+ xfer_files(
+ options.ssh,
+ progdir,
+ options.host,
+ options.user,
+ options.destdir)
+
+ (rc, out) = exec_remote(options.ssh,
+ options.host,
+ options.user,
+ options.destdir,
+ targ_progdir,
+ prog, args[1:])
+ return (rc, out)
+
+
+if __name__ == '__main__':
+ (rc, out) = main(sys.argv[1:])
+ sys.stdout.write(out)
+ sys.exit(rc)
diff --git a/buildtools/scripts/Makefile.waf b/buildtools/scripts/Makefile.waf
new file mode 100644
index 0000000..a15a5f8
--- /dev/null
+++ b/buildtools/scripts/Makefile.waf
@@ -0,0 +1,72 @@
+# simple makefile wrapper to run waf
+
+WAF_BINARY=$(PYTHON) BUILDTOOLS/bin/waf
+WAF=PYTHONHASHSEED=1 WAF_MAKE=1 $(WAF_BINARY)
+
+all:
+ $(WAF) build
+
+install:
+ $(WAF) install
+
+uninstall:
+ $(WAF) uninstall
+
+test:
+ $(WAF) test $(TEST_OPTIONS)
+
+help:
+ @echo NOTE: to run extended waf options use $(WAF_BINARY) or modify your PATH
+ $(WAF) --help
+
+testenv:
+ $(WAF) test --testenv $(TEST_OPTIONS)
+
+quicktest:
+ $(WAF) test --quick $(TEST_OPTIONS)
+
+dist:
+ $(WAF) dist
+
+distcheck:
+ $(WAF) distcheck
+
+clean:
+ $(WAF) clean
+
+distclean:
+ $(WAF) distclean
+
+reconfigure: configure
+ $(WAF) reconfigure
+
+show_waf_options:
+ $(WAF) --help
+
+# some compatibility make targets
+everything: all
+
+testsuite: all
+
+check: test
+
+torture: all
+
+# this should do an install as well, once install is finished
+installcheck: test
+
+etags:
+ $(WAF) etags
+
+ctags:
+ $(WAF) ctags
+
+bin/%:: FORCE
+ $(WAF) --targets=$@
+FORCE:
+
+configure: autogen-waf.sh BUILDTOOLS/scripts/configure.waf
+ ./autogen-waf.sh
+
+Makefile: autogen-waf.sh configure BUILDTOOLS/scripts/Makefile.waf
+ ./autogen-waf.sh
diff --git a/buildtools/scripts/abi_gen.sh b/buildtools/scripts/abi_gen.sh
new file mode 100755
index 0000000..c66a1b8
--- /dev/null
+++ b/buildtools/scripts/abi_gen.sh
@@ -0,0 +1,26 @@
+#!/bin/sh
+# generate a set of ABI signatures from a shared library
+
+SHAREDLIB="$1"
+
+GDBSCRIPT="gdb_syms.$$"
+
+(
+ cat <<EOF
+set height 0
+set width 0
+EOF
+
+ # On older linker versions _init|_fini symbols are not hidden.
+ objdump --dynamic-syms "${SHAREDLIB}" |
+ awk '$0 !~ /.hidden/ {if ($2 == "g" && $3 ~ /D(F|O)/ && $4 ~ /(.bss|.rodata|.text)/) print $NF}' |
+ sort |
+ while read -r s; do
+ echo "echo $s: "
+ echo p "${s}"
+ done
+) >$GDBSCRIPT
+
+# forcing the terminal avoids a problem on Fedora12
+TERM=none gdb -n -batch -x $GDBSCRIPT "$SHAREDLIB" </dev/null
+rm -f $GDBSCRIPT
diff --git a/buildtools/scripts/autogen-waf.sh b/buildtools/scripts/autogen-waf.sh
new file mode 100755
index 0000000..a0ed80c
--- /dev/null
+++ b/buildtools/scripts/autogen-waf.sh
@@ -0,0 +1,27 @@
+#!/bin/sh
+
+p=$(dirname $0)
+
+echo "Setting up for waf build"
+
+echo "Looking for the buildtools directory"
+
+d="buildtools"
+while test \! -d "$p/$d"; do d="../$d"; done
+
+echo "Found buildtools in $p/$d"
+
+echo "Setting up configure"
+rm -f $p/configure $p/include/config*.h*
+sed "s|BUILDTOOLS|$d|g;s|BUILDPATH|$p|g" <"$p/$d/scripts/configure.waf" >$p/configure
+chmod +x $p/configure
+
+echo "Setting up Makefile"
+rm -f $p/makefile $p/Makefile
+sed "s|BUILDTOOLS|$d|g" <"$p/$d/scripts/Makefile.waf" >$p/Makefile
+
+echo "done. Now run $p/configure or $p/configure.developer then make."
+if [ $p != "." ]; then
+ echo "Notice: The build invoke path is not 'source4'! Use make with the parameter"
+ echo "-C <'source4' path>. Example: make -C source4 all"
+fi
diff --git a/buildtools/scripts/configure.waf b/buildtools/scripts/configure.waf
new file mode 100755
index 0000000..ccb6284
--- /dev/null
+++ b/buildtools/scripts/configure.waf
@@ -0,0 +1,21 @@
+#!/bin/sh
+
+PREVPATH=$(dirname $0)
+
+WAF=BUILDTOOLS/bin/waf
+
+# using JOBS=1 gives maximum compatibility with
+# systems like AIX which have broken threading in python
+JOBS=1
+export JOBS
+
+# Make sure we don't have any library preloaded.
+unset LD_PRELOAD
+
+# Make sure we get stable hashes
+PYTHONHASHSEED=1
+export PYTHONHASHSEED
+
+cd BUILDPATH || exit 1
+$PYTHON $WAF configure "$@" || exit 1
+cd $PREVPATH
diff --git a/buildtools/wafsamba/README b/buildtools/wafsamba/README
new file mode 100644
index 0000000..1968b55
--- /dev/null
+++ b/buildtools/wafsamba/README
@@ -0,0 +1,8 @@
+This is a set of waf 'tools' to help make building the Samba
+components easier, by having common functions in one place. This gives
+us a more consistent build, and ensures that our project rules are
+obeyed
+
+
+TODO:
+ see http://wiki.samba.org/index.php/Waf
diff --git a/buildtools/wafsamba/__init__.py b/buildtools/wafsamba/__init__.py
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/buildtools/wafsamba/__init__.py
diff --git a/buildtools/wafsamba/configure_file.py b/buildtools/wafsamba/configure_file.py
new file mode 100644
index 0000000..98a58a4
--- /dev/null
+++ b/buildtools/wafsamba/configure_file.py
@@ -0,0 +1,41 @@
+# handle substitution of variables in .in files
+
+import sys
+import re
+import os
+from waflib import Build, Logs
+from samba_utils import SUBST_VARS_RECURSIVE
+
+def subst_at_vars(task):
+ '''substiture @VAR@ style variables in a file'''
+
+ env = task.env
+ s = task.inputs[0].read()
+
+ # split on the vars
+ a = re.split(r'(@\w+@)', s)
+ out = []
+ for v in a:
+ if re.match(r'@\w+@', v):
+ vname = v[1:-1]
+ if not vname in task.env and vname.upper() in task.env:
+ vname = vname.upper()
+ if not vname in task.env:
+ Logs.error("Unknown substitution %s in %s" % (v, task.name))
+ sys.exit(1)
+ v = SUBST_VARS_RECURSIVE(task.env[vname], task.env)
+ out.append(v)
+ contents = ''.join(out)
+ task.outputs[0].write(contents)
+ return 0
+
+def CONFIGURE_FILE(bld, in_file, **kwargs):
+ '''configure file'''
+
+ base=os.path.basename(in_file)
+ t = bld.SAMBA_GENERATOR('INFILE_%s' % base,
+ rule = subst_at_vars,
+ source = in_file + '.in',
+ target = in_file,
+ vars = kwargs)
+Build.BuildContext.CONFIGURE_FILE = CONFIGURE_FILE
diff --git a/buildtools/wafsamba/generic_cc.py b/buildtools/wafsamba/generic_cc.py
new file mode 100644
index 0000000..1352c54
--- /dev/null
+++ b/buildtools/wafsamba/generic_cc.py
@@ -0,0 +1,70 @@
+
+# compiler definition for a generic C compiler
+# based on suncc.py from waf
+
+import os, optparse
+from waflib import Errors
+from waflib.Tools import ccroot, ar
+from waflib.Configure import conf
+
+#
+# Let waflib provide useful defaults, but
+# provide generic_cc as last resort fallback on
+# all platforms
+#
+from waflib.Tools.compiler_c import c_compiler
+for key in c_compiler.keys():
+ c_compiler[key].append('generic_cc')
+
+@conf
+def find_generic_cc(conf):
+ v = conf.env
+ cc = None
+ if v.CC:
+ cc = v.CC
+ elif 'CC' in conf.environ:
+ cc = conf.environ['CC']
+ if not cc:
+ cc = conf.find_program('cc', var='CC')
+ if not cc:
+ conf.fatal('generic_cc was not found')
+
+ try:
+ conf.cmd_and_log(cc + ['--version'])
+ except Errors.WafError:
+ conf.fatal('%r --version could not be executed' % cc)
+
+ v.CC = cc
+ v.CC_NAME = 'generic_cc'
+
+@conf
+def generic_cc_common_flags(conf):
+ v = conf.env
+
+ v.CC_SRC_F = ''
+ v.CC_TGT_F = ['-c', '-o']
+ v.CPPPATH_ST = '-I%s'
+ v.DEFINES_ST = '-D%s'
+
+ if not v.LINK_CC:
+ v.LINK_CC = v.CC
+
+ v.CCLNK_SRC_F = ''
+ v.CCLNK_TGT_F = ['-o']
+
+ v.LIB_ST = '-l%s' # template for adding libs
+ v.LIBPATH_ST = '-L%s' # template for adding libpaths
+ v.STLIB_ST = '-l%s'
+ v.STLIBPATH_ST = '-L%s'
+
+ v.cprogram_PATTERN = '%s'
+ v.cshlib_PATTERN = 'lib%s.so'
+ v.cstlib_PATTERN = 'lib%s.a'
+
+def configure(conf):
+ conf.find_generic_cc()
+ conf.find_ar()
+ conf.generic_cc_common_flags()
+ conf.cc_load_tools()
+ conf.cc_add_flags()
+ conf.link_add_flags()
diff --git a/buildtools/wafsamba/pkgconfig.py b/buildtools/wafsamba/pkgconfig.py
new file mode 100644
index 0000000..417bf03
--- /dev/null
+++ b/buildtools/wafsamba/pkgconfig.py
@@ -0,0 +1,68 @@
+# handle substitution of variables in pc files
+
+import os, re, sys
+from waflib import Build, Logs
+from samba_utils import SUBST_VARS_RECURSIVE, TO_LIST
+
+def subst_at_vars(task):
+ '''substitute @VAR@ style variables in a file'''
+
+ s = task.inputs[0].read()
+ # split on the vars
+ a = re.split(r'(@\w+@)', s)
+ out = []
+ done_var = {}
+ back_sub = [ ('PREFIX', '${prefix}'), ('EXEC_PREFIX', '${exec_prefix}')]
+ for v in a:
+ if re.match(r'@\w+@', v):
+ vname = v[1:-1]
+ if not vname in task.env and vname.upper() in task.env:
+ vname = vname.upper()
+ if not vname in task.env:
+ Logs.error("Unknown substitution %s in %s" % (v, task.name))
+ sys.exit(1)
+ v = SUBST_VARS_RECURSIVE(task.env[vname], task.env)
+ # now we back substitute the allowed pc vars
+ for (b, m) in back_sub:
+ s = task.env[b]
+ if s == v[0:len(s)]:
+ if not b in done_var:
+ # we don't want to substitute the first usage
+ done_var[b] = True
+ else:
+ v = m + v[len(s):]
+ break
+ out.append(v)
+ contents = ''.join(out)
+ task.outputs[0].write(contents)
+ return 0
+
+
+def PKG_CONFIG_FILES(bld, pc_files, vnum=None, extra_name=None):
+ '''install some pkg_config pc files'''
+ dest = '${PKGCONFIGDIR}'
+ dest = bld.EXPAND_VARIABLES(dest)
+ for f in TO_LIST(pc_files):
+ if extra_name:
+ target = f.split('.pc')[0] + extra_name + ".pc"
+ else:
+ target = f
+ base=os.path.basename(target)
+ t = bld.SAMBA_GENERATOR('PKGCONFIG_%s' % base,
+ rule=subst_at_vars,
+ source=f+'.in',
+ target=target)
+ bld.add_manual_dependency(bld.path.find_or_declare(f), bld.env['PREFIX'].encode('utf8'))
+ t.vars = []
+ if t.env.RPATH_ON_INSTALL:
+ t.env.LIB_RPATH = t.env.RPATH_ST % t.env.LIBDIR
+ else:
+ t.env.LIB_RPATH = ''
+ if vnum:
+ t.env.PACKAGE_VERSION = vnum
+ for v in [ 'PREFIX', 'EXEC_PREFIX', 'LIB_RPATH' ]:
+ t.vars.append(t.env[v])
+ bld.INSTALL_FILES(dest, target, flat=True, destname=base)
+Build.BuildContext.PKG_CONFIG_FILES = PKG_CONFIG_FILES
+
+
diff --git a/buildtools/wafsamba/samba3.py b/buildtools/wafsamba/samba3.py
new file mode 100644
index 0000000..227ee27
--- /dev/null
+++ b/buildtools/wafsamba/samba3.py
@@ -0,0 +1,111 @@
+# a waf tool to add autoconf-like macros to the configure section
+# and for SAMBA_ macros for building libraries, binaries etc
+
+import os
+from waflib import Build
+from samba_utils import TO_LIST
+from samba_autoconf import library_flags
+
+def SAMBA3_IS_STATIC_MODULE(bld, module):
+ '''Check whether module is in static list'''
+ if module in bld.env['static_modules']:
+ return True
+ return False
+Build.BuildContext.SAMBA3_IS_STATIC_MODULE = SAMBA3_IS_STATIC_MODULE
+
+def SAMBA3_IS_SHARED_MODULE(bld, module):
+ '''Check whether module is in shared list'''
+ if module in bld.env['shared_modules']:
+ return True
+ return False
+Build.BuildContext.SAMBA3_IS_SHARED_MODULE = SAMBA3_IS_SHARED_MODULE
+
+def SAMBA3_IS_ENABLED_MODULE(bld, module):
+ '''Check whether module is in either shared or static list '''
+ return SAMBA3_IS_STATIC_MODULE(bld, module) or SAMBA3_IS_SHARED_MODULE(bld, module)
+Build.BuildContext.SAMBA3_IS_ENABLED_MODULE = SAMBA3_IS_ENABLED_MODULE
+
+
+
+def s3_fix_kwargs(bld, kwargs):
+ '''fix the build arguments for s3 build rules to include the
+ necessary includes, subdir and cflags options '''
+ s3dir = os.path.join(bld.env.srcdir, 'source3')
+ s3reldir = os.path.relpath(s3dir, bld.path.abspath())
+
+ # the extra_includes list is relative to the source3 directory
+ extra_includes = [ '.', 'include', 'lib' ]
+ # local heimdal paths must only be included when using our embedded Heimdal
+ if bld.CONFIG_SET("USING_EMBEDDED_HEIMDAL"):
+ extra_includes += [ '../third_party/heimdal/lib/com_err',
+ '../third_party/heimdal/lib/base',
+ '../third_party/heimdal/lib/krb5',
+ '../third_party/heimdal/lib/gssapi/gssapi',
+ '../third_party/heimdal_build/include',
+ '../bin/default/third_party/heimdal/lib/asn1' ]
+
+ if bld.CONFIG_SET('USING_SYSTEM_TDB'):
+ (tdb_includes, tdb_ldflags, tdb_cpppath) = library_flags(bld, 'tdb')
+ extra_includes += tdb_cpppath
+ else:
+ extra_includes += [ '../lib/tdb/include' ]
+
+ if bld.CONFIG_SET('USING_SYSTEM_TEVENT'):
+ (tevent_includes, tevent_ldflags, tevent_cpppath) = library_flags(bld, 'tevent')
+ extra_includes += tevent_cpppath
+ else:
+ extra_includes += [ '../lib/tevent' ]
+
+ if bld.CONFIG_SET('USING_SYSTEM_TALLOC'):
+ (talloc_includes, talloc_ldflags, talloc_cpppath) = library_flags(bld, 'talloc')
+ extra_includes += talloc_cpppath
+ else:
+ extra_includes += [ '../lib/talloc' ]
+
+ if bld.CONFIG_SET('USING_SYSTEM_POPT'):
+ (popt_includes, popt_ldflags, popt_cpppath) = library_flags(bld, 'popt')
+ extra_includes += popt_cpppath
+ else:
+ extra_includes += [ '../lib/popt' ]
+
+ # s3 builds assume that they will have a bunch of extra include paths
+ includes = []
+ for d in extra_includes:
+ includes += [ os.path.join(s3reldir, d) ]
+
+ # the rule may already have some includes listed
+ if 'includes' in kwargs:
+ includes += TO_LIST(kwargs['includes'])
+ kwargs['includes'] = includes
+
+# these wrappers allow for mixing of S3 and S4 build rules in the one build
+
+def SAMBA3_LIBRARY(bld, name, *args, **kwargs):
+ s3_fix_kwargs(bld, kwargs)
+ return bld.SAMBA_LIBRARY(name, *args, **kwargs)
+Build.BuildContext.SAMBA3_LIBRARY = SAMBA3_LIBRARY
+
+def SAMBA3_PLUGIN(bld, name, *args, **kwargs):
+ s3_fix_kwargs(bld, kwargs)
+ return bld.SAMBA_PLUGIN(name, *args, **kwargs)
+Build.BuildContext.SAMBA3_PLUGIN = SAMBA3_PLUGIN
+
+def SAMBA3_MODULE(bld, name, *args, **kwargs):
+ s3_fix_kwargs(bld, kwargs)
+ return bld.SAMBA_MODULE(name, *args, **kwargs)
+Build.BuildContext.SAMBA3_MODULE = SAMBA3_MODULE
+
+def SAMBA3_SUBSYSTEM(bld, name, *args, **kwargs):
+ s3_fix_kwargs(bld, kwargs)
+ return bld.SAMBA_SUBSYSTEM(name, *args, **kwargs)
+Build.BuildContext.SAMBA3_SUBSYSTEM = SAMBA3_SUBSYSTEM
+
+def SAMBA3_BINARY(bld, name, *args, **kwargs):
+ s3_fix_kwargs(bld, kwargs)
+ return bld.SAMBA_BINARY(name, *args, **kwargs)
+Build.BuildContext.SAMBA3_BINARY = SAMBA3_BINARY
+
+def SAMBA3_PYTHON(bld, name, *args, **kwargs):
+ s3_fix_kwargs(bld, kwargs)
+ return bld.SAMBA_PYTHON(name, *args, **kwargs)
+Build.BuildContext.SAMBA3_PYTHON = SAMBA3_PYTHON
diff --git a/buildtools/wafsamba/samba_abi.py b/buildtools/wafsamba/samba_abi.py
new file mode 100644
index 0000000..2d9505d
--- /dev/null
+++ b/buildtools/wafsamba/samba_abi.py
@@ -0,0 +1,334 @@
+# functions for handling ABI checking of libraries
+
+import os
+import sys
+import re
+import fnmatch
+
+from waflib import Options, Utils, Logs, Task, Build, Errors
+from waflib.TaskGen import feature, before, after
+from wafsamba import samba_utils
+
+# these type maps cope with platform specific names for common types
+# please add new type mappings into the list below
+abi_type_maps = {
+ '_Bool' : 'bool',
+ 'struct __va_list_tag *' : 'va_list'
+ }
+
+version_key = lambda x: list(map(int, x.split(".")))
+
+def normalise_signature(sig):
+ '''normalise a signature from gdb'''
+ sig = sig.strip()
+ sig = re.sub(r'^\$[0-9]+\s=\s\{(.+)\}$', r'\1', sig)
+ sig = re.sub(r'^\$[0-9]+\s=\s\{(.+)\}(\s0x[0-9a-f]+\s<\w+>)+$', r'\1', sig)
+ sig = re.sub(r'^\$[0-9]+\s=\s(0x[0-9a-f]+)\s?(<\w+>)?$', r'\1', sig)
+ sig = re.sub(r'0x[0-9a-f]+', '0xXXXX', sig)
+ sig = re.sub('", <incomplete sequence (\\\\[a-z0-9]+)>', r'\1"', sig)
+
+ for t in abi_type_maps:
+ # we need to cope with non-word characters in mapped types
+ m = t
+ m = m.replace('*', r'\*')
+ if m[-1].isalnum() or m[-1] == '_':
+ m += '\\b'
+ if m[0].isalnum() or m[0] == '_':
+ m = '\\b' + m
+ sig = re.sub(m, abi_type_maps[t], sig)
+ return sig
+
+
+def normalise_varargs(sig):
+ '''cope with older versions of gdb'''
+ sig = re.sub(r',\s\.\.\.', '', sig)
+ # Make sure we compare bytes and not strings
+ return bytes(sig, encoding='utf-8').decode('unicode_escape')
+
+
+def parse_sigs(sigs, abi_match):
+ '''parse ABI signatures file'''
+ abi_match = samba_utils.TO_LIST(abi_match)
+ ret = {}
+ a = sigs.split('\n')
+ for s in a:
+ if s.find(':') == -1:
+ continue
+ sa = s.split(':')
+ if abi_match:
+ matched = False
+ negative = False
+ for p in abi_match:
+ if p[0] == '!' and fnmatch.fnmatch(sa[0], p[1:]):
+ negative = True
+ break
+ elif fnmatch.fnmatch(sa[0], p):
+ matched = True
+ break
+ if (not matched) and negative:
+ continue
+ Logs.debug("%s -> %s" % (sa[1], normalise_signature(sa[1])))
+ ret[sa[0]] = normalise_signature(sa[1])
+ return ret
+
+def save_sigs(sig_file, parsed_sigs):
+ '''save ABI signatures to a file'''
+ sigs = "".join('%s: %s\n' % (s, parsed_sigs[s]) for s in sorted(parsed_sigs.keys()))
+ return samba_utils.save_file(sig_file, sigs, create_dir=True)
+
+
+def abi_check_task(self):
+ '''check if the ABI has changed'''
+ abi_gen = self.ABI_GEN
+
+ libpath = self.inputs[0].abspath(self.env)
+ libname = os.path.basename(libpath)
+
+ sigs = samba_utils.get_string(Utils.cmd_output([abi_gen, libpath]))
+ parsed_sigs = parse_sigs(sigs, self.ABI_MATCH)
+
+ sig_file = self.ABI_FILE
+
+ old_sigs = samba_utils.load_file(sig_file)
+ if old_sigs is None or Options.options.ABI_UPDATE:
+ if not save_sigs(sig_file, parsed_sigs):
+ raise Errors.WafError('Failed to save ABI file "%s"' % sig_file)
+ Logs.warn('Generated ABI signatures %s' % sig_file)
+ return
+
+ parsed_old_sigs = parse_sigs(old_sigs, self.ABI_MATCH)
+
+ # check all old sigs
+ got_error = False
+ for s in parsed_old_sigs:
+ if not s in parsed_sigs:
+ Logs.error('%s: symbol %s has been removed - please update major version\n\tsignature: %s' % (
+ libname, s, parsed_old_sigs[s]))
+ got_error = True
+ elif normalise_varargs(parsed_old_sigs[s]) != normalise_varargs(parsed_sigs[s]):
+ Logs.error('%s: symbol %s has changed - please update major version\n\told_signature: %s\n\tnew_signature: %s' % (
+ libname, s, parsed_old_sigs[s], parsed_sigs[s]))
+ got_error = True
+
+ for s in parsed_sigs:
+ if not s in parsed_old_sigs:
+ Logs.error('%s: symbol %s has been added - please mark it _PRIVATE_ or update minor version\n\tsignature: %s' % (
+ libname, s, parsed_sigs[s]))
+ got_error = True
+
+ if got_error:
+ raise Errors.WafError('ABI for %s has changed - please fix library version then build with --abi-update\nSee http://wiki.samba.org/index.php/Waf#ABI_Checking for more information\nIf you have not changed any ABI, and your platform always gives this error, please configure with --abi-check-disable to skip this check' % libname)
+
+
+t = Task.task_factory('abi_check', abi_check_task, color='BLUE', ext_in='.bin')
+t.quiet = True
+# allow "waf --abi-check" to force re-checking the ABI
+if '--abi-check' in sys.argv:
+ t.always_run = True
+
+@after('apply_link')
+@feature('abi_check')
+def abi_check(self):
+ '''check that ABI matches saved signatures'''
+ env = self.bld.env
+ if not env.ABI_CHECK or self.abi_directory is None:
+ return
+
+ # if the platform doesn't support -fvisibility=hidden then the ABI
+ # checks become fairly meaningless
+ if not env.HAVE_VISIBILITY_ATTR:
+ return
+
+ topsrc = self.bld.srcnode.abspath()
+ abi_gen = os.path.join(topsrc, 'buildtools/scripts/abi_gen.sh')
+
+ abi_file = "%s/%s-%s.sigs" % (self.abi_directory, self.version_libname,
+ self.abi_vnum)
+
+ tsk = self.create_task('abi_check', self.link_task.outputs[0])
+ tsk.ABI_FILE = abi_file
+ tsk.ABI_MATCH = self.abi_match
+ tsk.ABI_GEN = abi_gen
+
+
+def abi_process_file(fname, version, symmap):
+ '''process one ABI file, adding new symbols to the symmap'''
+ for line in Utils.readf(fname).splitlines():
+ symname = line.split(":")[0]
+ if not symname in symmap:
+ symmap[symname] = version
+
+def version_script_map_process_file(fname, version, abi_match):
+ '''process one standard version_script file, adding the symbols to the
+ abi_match'''
+ in_section = False
+ in_global = False
+ in_local = False
+ for _line in Utils.readf(fname).splitlines():
+ line = _line.strip()
+ if line == "":
+ continue
+ if line.startswith("#"):
+ continue
+ if line.endswith(" {"):
+ in_section = True
+ continue
+ if line == "};":
+ assert in_section
+ in_section = False
+ in_global = False
+ in_local = False
+ continue
+ if not in_section:
+ continue
+ if line == "global:":
+ in_global = True
+ in_local = False
+ continue
+ if line == "local:":
+ in_global = False
+ in_local = True
+ continue
+
+ symname = line.split(";")[0]
+ assert symname != ""
+ if in_local:
+ if symname == "*":
+ continue
+ symname = "!%s" % symname
+ if not symname in abi_match:
+ abi_match.append(symname)
+
+def abi_write_vscript(f, libname, current_version, versions, symmap, abi_match):
+ """Write a vscript file for a library in --version-script format.
+
+ :param f: File-like object to write to
+ :param libname: Name of the library, uppercased
+ :param current_version: Current version
+ :param versions: Versions to consider
+ :param symmap: Dictionary mapping symbols -> version
+ :param abi_match: List of symbols considered to be public in the current
+ version
+ """
+
+ invmap = {}
+ for s in symmap:
+ invmap.setdefault(symmap[s], []).append(s)
+
+ last_key = ""
+ versions = sorted(versions, key=version_key)
+ for k in versions:
+ symver = "%s_%s" % (libname, k)
+ if symver == current_version:
+ break
+ f.write("%s {\n" % symver)
+ if k in sorted(invmap.keys()):
+ f.write("\tglobal:\n")
+ for s in invmap.get(k, []):
+ f.write("\t\t%s;\n" % s)
+ f.write("}%s;\n\n" % last_key)
+ last_key = " %s" % symver
+ f.write("%s {\n" % current_version)
+ local_abi = list(filter(lambda x: x[0] == '!', abi_match))
+ global_abi = list(filter(lambda x: x[0] != '!', abi_match))
+ f.write("\tglobal:\n")
+ if len(global_abi) > 0:
+ for x in global_abi:
+ f.write("\t\t%s;\n" % x)
+ else:
+ f.write("\t\t*;\n")
+ # Always hide symbols that must be local if exist
+ local_abi.extend(["!_end", "!__bss_start", "!_edata"])
+ f.write("\tlocal:\n")
+ for x in local_abi:
+ f.write("\t\t%s;\n" % x[1:])
+ if global_abi != ["*"]:
+ if len(global_abi) > 0:
+ f.write("\t\t*;\n")
+ f.write("};\n")
+
+
+def abi_build_vscript(task):
+ '''generate a vscript file for our public libraries'''
+
+ tgt = task.outputs[0].bldpath(task.env)
+
+ symmap = {}
+ versions = []
+ abi_match = list(task.env.ABI_MATCH)
+ for f in task.inputs:
+ fname = f.abspath(task.env)
+ basename = os.path.basename(fname)
+ if basename.endswith(".sigs"):
+ version = basename[len(task.env.LIBNAME)+1:-len(".sigs")]
+ versions.append(version)
+ abi_process_file(fname, version, symmap)
+ continue
+ if basename == "version-script.map":
+ version_script_map_process_file(fname, task.env.VERSION, abi_match)
+ continue
+ raise Errors.WafError('Unsupported input "%s"' % fname)
+ if task.env.PRIVATE_LIBRARY:
+ # For private libraries we need to inject
+ # each public symbol explicitly into the
+ # abi match array and remove all explicit
+ # versioning so that each exported symbol
+ # is tagged with the private library tag.
+ for s in symmap:
+ abi_match.append(s)
+ symmap = {}
+ versions = []
+ f = open(tgt, mode='w')
+ try:
+ abi_write_vscript(f, task.env.LIBNAME, task.env.VERSION, versions,
+ symmap, abi_match)
+ finally:
+ f.close()
+
+def VSCRIPT_MAP_PRIVATE(bld, libname, orig_vscript, version, private_vscript):
+ version = version.replace("-", "_").replace("+","_").upper()
+ t = bld.SAMBA_GENERATOR(private_vscript,
+ rule=abi_build_vscript,
+ source=orig_vscript,
+ group='vscripts',
+ target=private_vscript)
+ t.env.ABI_MATCH = []
+ t.env.VERSION = version
+ t.env.LIBNAME = libname
+ t.env.PRIVATE_LIBRARY = True
+ t.vars = ['LIBNAME', 'VERSION', 'ABI_MATCH', 'PRIVATE_LIBRARY']
+Build.BuildContext.VSCRIPT_MAP_PRIVATE = VSCRIPT_MAP_PRIVATE
+
+def ABI_VSCRIPT(bld, libname, abi_directory, version, vscript, abi_match=None, private_library=False):
+ '''generate a vscript file for our public libraries'''
+ if abi_directory:
+ source = bld.path.ant_glob('%s/%s-[0-9]*.sigs' % (abi_directory, libname), flat=True)
+ def abi_file_key(path):
+ return version_key(path[:-len(".sigs")].rsplit("-")[-1])
+ source = sorted(source.split(), key=abi_file_key)
+ else:
+ source = ''
+
+ if private_library is None:
+ private_library = False
+
+ libname = os.path.basename(libname)
+ version = os.path.basename(version)
+ libname = libname.replace("-", "_").replace("+","_").upper()
+ version = version.replace("-", "_").replace("+","_").upper()
+
+ t = bld.SAMBA_GENERATOR(vscript,
+ rule=abi_build_vscript,
+ source=source,
+ group='vscripts',
+ target=vscript)
+ if abi_match is None:
+ abi_match = ["*"]
+ else:
+ abi_match = samba_utils.TO_LIST(abi_match)
+ t.env.ABI_MATCH = abi_match
+ t.env.VERSION = version
+ t.env.LIBNAME = libname
+ t.env.PRIVATE_LIBRARY = private_library
+ t.vars = ['LIBNAME', 'VERSION', 'ABI_MATCH', 'PRIVATE_LIBRARY']
+Build.BuildContext.ABI_VSCRIPT = ABI_VSCRIPT
diff --git a/buildtools/wafsamba/samba_autoconf.py b/buildtools/wafsamba/samba_autoconf.py
new file mode 100644
index 0000000..7b383ea
--- /dev/null
+++ b/buildtools/wafsamba/samba_autoconf.py
@@ -0,0 +1,1023 @@
+# a waf tool to add autoconf-like macros to the configure section
+
+import os, sys
+from waflib import Build, Options, Logs, Context
+from waflib.Configure import conf
+from waflib.TaskGen import feature
+from waflib.Tools import c_preproc as preproc
+from samba_utils import TO_LIST, GET_TARGET_TYPE, SET_TARGET_TYPE, unique_list, mkdir_p
+
+missing_headers = set()
+
+####################################################
+# some autoconf like helpers, to make the transition
+# to waf a bit easier for those used to autoconf
+# m4 files
+
+@conf
+def DEFINE(conf, d, v, add_to_cflags=False, quote=False):
+ '''define a config option'''
+ conf.define(d, v, quote=quote)
+ if add_to_cflags:
+ conf.env.append_value('CFLAGS', '-D%s=%s' % (d, str(v)))
+
+def hlist_to_string(conf, headers=None):
+ '''convert a headers list to a set of #include lines'''
+ hlist = conf.env.hlist
+ if headers:
+ hlist = hlist[:]
+ hlist.extend(TO_LIST(headers))
+ hdrs = "\n".join('#include <%s>' % h for h in hlist)
+
+ return hdrs
+
+
+@conf
+def COMPOUND_START(conf, msg):
+ '''start a compound test'''
+ def null_check_message_1(self,*k,**kw):
+ return
+ def null_check_message_2(self,*k,**kw):
+ return
+
+ v = getattr(conf.env, 'in_compound', [])
+ if v != [] and v != 0:
+ conf.env.in_compound = v + 1
+ return
+ conf.start_msg(msg)
+ conf.saved_check_message_1 = conf.start_msg
+ conf.start_msg = null_check_message_1
+ conf.saved_check_message_2 = conf.end_msg
+ conf.end_msg = null_check_message_2
+ conf.env.in_compound = 1
+
+
+@conf
+def COMPOUND_END(conf, result):
+ '''start a compound test'''
+ conf.env.in_compound -= 1
+ if conf.env.in_compound != 0:
+ return
+ conf.start_msg = conf.saved_check_message_1
+ conf.end_msg = conf.saved_check_message_2
+ p = conf.end_msg
+ if result is True:
+ p('ok')
+ elif not result:
+ p('not found', 'YELLOW')
+ else:
+ p(result)
+
+
+@feature('nolink')
+def nolink(self):
+ '''using the nolink type in conf.check() allows us to avoid
+ the link stage of a test, thus speeding it up for tests
+ that where linking is not needed'''
+ pass
+
+
+def CHECK_HEADER(conf, h, add_headers=False, lib=None):
+ '''check for a header'''
+ if h in missing_headers and lib is None:
+ return False
+ d = h.upper().replace('/', '_')
+ d = d.replace('.', '_')
+ d = d.replace('-', '_')
+ d = 'HAVE_%s' % d
+ if CONFIG_SET(conf, d):
+ if add_headers:
+ if not h in conf.env.hlist:
+ conf.env.hlist.append(h)
+ return True
+
+ (ccflags, ldflags, cpppath) = library_flags(conf, lib)
+
+ hdrs = hlist_to_string(conf, headers=h)
+ if lib is None:
+ lib = ""
+ ret = conf.check(fragment='%s\nint main(void) { return 0; }\n' % hdrs,
+ type='nolink',
+ execute=0,
+ cflags=ccflags,
+ mandatory=False,
+ includes=cpppath,
+ uselib=lib.upper(),
+ msg="Checking for header %s" % h)
+ if not ret:
+ missing_headers.add(h)
+ return False
+
+ conf.DEFINE(d, 1)
+ if add_headers and not h in conf.env.hlist:
+ conf.env.hlist.append(h)
+ return ret
+
+
+@conf
+def CHECK_HEADERS(conf, headers, add_headers=False, together=False, lib=None):
+ '''check for a list of headers
+
+ when together==True, then the headers accumulate within this test.
+ This is useful for interdependent headers
+ '''
+ ret = True
+ if not add_headers and together:
+ saved_hlist = conf.env.hlist[:]
+ set_add_headers = True
+ else:
+ set_add_headers = add_headers
+ for hdr in TO_LIST(headers):
+ if not CHECK_HEADER(conf, hdr, set_add_headers, lib=lib):
+ ret = False
+ if not add_headers and together:
+ conf.env.hlist = saved_hlist
+ return ret
+
+
+def header_list(conf, headers=None, lib=None):
+ '''form a list of headers which exist, as a string'''
+ hlist=[]
+ if headers is not None:
+ for h in TO_LIST(headers):
+ if CHECK_HEADER(conf, h, add_headers=False, lib=lib):
+ hlist.append(h)
+ return hlist_to_string(conf, headers=hlist)
+
+
+@conf
+def CHECK_TYPE(conf, t, alternate=None, headers=None, define=None, lib=None, msg=None, cflags=''):
+ '''check for a single type'''
+ if define is None:
+ define = 'HAVE_' + t.upper().replace(' ', '_')
+ if msg is None:
+ msg='Checking for %s' % t
+ ret = CHECK_CODE(conf, '%s _x' % t,
+ define,
+ execute=False,
+ headers=headers,
+ local_include=False,
+ msg=msg,
+ cflags=cflags,
+ lib=lib,
+ link=False)
+ if not ret and alternate:
+ conf.DEFINE(t, alternate)
+ return ret
+
+
+@conf
+def CHECK_TYPES(conf, list, headers=None, define=None, alternate=None, lib=None):
+ '''check for a list of types'''
+ ret = True
+ for t in TO_LIST(list):
+ if not CHECK_TYPE(conf, t, headers=headers,
+ define=define, alternate=alternate, lib=lib):
+ ret = False
+ return ret
+
+
+@conf
+def CHECK_TYPE_IN(conf, t, headers=None, alternate=None, define=None, cflags=''):
+ '''check for a single type with a header'''
+ return CHECK_TYPE(conf, t, headers=headers, alternate=alternate, define=define, cflags=cflags)
+
+
+@conf
+def CHECK_VARIABLE(conf, v, define=None, always=False,
+ headers=None, msg=None, lib=None,
+ mandatory=False):
+ '''check for a variable declaration (or define)'''
+ if define is None:
+ define = 'HAVE_%s' % v.upper()
+
+ if msg is None:
+ msg="Checking for variable %s" % v
+
+ return CHECK_CODE(conf,
+ # we need to make sure the compiler doesn't
+ # optimize it out...
+ '''
+ #ifndef %s
+ void *_x; _x=(void *)&%s; return (int)_x;
+ #endif
+ return 0
+ ''' % (v, v),
+ execute=False,
+ link=False,
+ msg=msg,
+ local_include=False,
+ lib=lib,
+ headers=headers,
+ define=define,
+ mandatory=mandatory,
+ always=always)
+
+
+@conf
+def CHECK_DECLS(conf, vars, reverse=False, headers=None, lib=None, always=False):
+ '''check a list of variable declarations, using the HAVE_DECL_xxx form
+ of define
+
+ When reverse==True then use HAVE_xxx_DECL instead of HAVE_DECL_xxx
+ '''
+ ret = True
+ for v in TO_LIST(vars):
+ if not reverse:
+ define='HAVE_DECL_%s' % v.upper()
+ else:
+ define='HAVE_%s_DECL' % v.upper()
+ if not CHECK_VARIABLE(conf, v,
+ define=define,
+ headers=headers,
+ lib=lib,
+ msg='Checking for declaration of %s' % v,
+ always=always):
+ if not CHECK_CODE(conf,
+ '''
+ return (int)%s;
+ ''' % (v),
+ execute=False,
+ link=False,
+ msg='Checking for declaration of %s (as enum)' % v,
+ local_include=False,
+ headers=headers,
+ lib=lib,
+ define=define,
+ always=always):
+ ret = False
+ return ret
+
+
+def CHECK_FUNC(conf, f, link=True, lib=None, headers=None):
+ '''check for a function'''
+ define='HAVE_%s' % f.upper()
+
+ ret = False
+
+ in_lib_str = ""
+ if lib:
+ in_lib_str = " in %s" % lib
+ conf.COMPOUND_START('Checking for %s%s' % (f, in_lib_str))
+
+ if link is None or link:
+ ret = CHECK_CODE(conf,
+ # this is based on the autoconf strategy
+ '''
+ #define %s __fake__%s
+ #ifdef HAVE_LIMITS_H
+ # include <limits.h>
+ #else
+ # include <assert.h>
+ #endif
+ #undef %s
+ #if defined __stub_%s || defined __stub___%s
+ #error "bad glibc stub"
+ #endif
+ extern char %s();
+ int main() { return %s(); }
+ ''' % (f, f, f, f, f, f, f),
+ execute=False,
+ link=True,
+ addmain=False,
+ add_headers=False,
+ define=define,
+ local_include=False,
+ lib=lib,
+ headers=headers,
+ msg='Checking for %s' % f)
+
+ if not ret:
+ ret = CHECK_CODE(conf,
+ # it might be a macro
+ # we need to make sure the compiler doesn't
+ # optimize it out...
+ 'void *__x = (void *)%s; return (int)__x' % f,
+ execute=False,
+ link=True,
+ addmain=True,
+ add_headers=True,
+ define=define,
+ local_include=False,
+ lib=lib,
+ headers=headers,
+ msg='Checking for macro %s' % f)
+
+ if not ret and (link is None or not link):
+ ret = CHECK_VARIABLE(conf, f,
+ define=define,
+ headers=headers,
+ msg='Checking for declaration of %s' % f)
+ conf.COMPOUND_END(ret)
+ return ret
+
+
+@conf
+def CHECK_FUNCS(conf, list, link=True, lib=None, headers=None):
+ '''check for a list of functions'''
+ ret = True
+ for f in TO_LIST(list):
+ if not CHECK_FUNC(conf, f, link=link, lib=lib, headers=headers):
+ ret = False
+ return ret
+
+
+@conf
+def CHECK_SIZEOF(conf, vars, headers=None, define=None, critical=True):
+ '''check the size of a type'''
+ for v in TO_LIST(vars):
+ v_define = define
+ ret = False
+ if v_define is None:
+ v_define = 'SIZEOF_%s' % v.upper().replace(' ', '_')
+ for size in list((1, 2, 4, 8, 16, 32, 64)):
+ if CHECK_CODE(conf,
+ 'static int test_array[1 - 2 * !(((long int)(sizeof(%s))) <= %d)];' % (v, size),
+ define=v_define,
+ quote=False,
+ headers=headers,
+ local_include=False,
+ msg="Checking if size of %s == %d" % (v, size)):
+ conf.DEFINE(v_define, size)
+ ret = True
+ break
+ if not ret and critical:
+ Logs.error("Couldn't determine size of '%s'" % v)
+ sys.exit(1)
+ return ret
+
+@conf
+def CHECK_SIGN(conf, v, headers=None):
+ '''check the sign of a type'''
+ define_name = v.upper().replace(' ', '_')
+ for op, signed in [('<', 'signed'),
+ ('>', 'unsigned')]:
+ if CHECK_CODE(conf,
+ f'static int test_array[1 - 2 * !((({v})-1) {op} 0)];',
+ define=f'{define_name}_{signed.upper()}',
+ quote=False,
+ headers=headers,
+ local_include=False,
+ msg=f"Checking if '{v}' is {signed}"):
+ return True
+
+ return False
+
+@conf
+def CHECK_VALUEOF(conf, v, headers=None, define=None, lib=None):
+ '''check the value of a variable/define'''
+ ret = True
+ v_define = define
+ if v_define is None:
+ v_define = 'VALUEOF_%s' % v.upper().replace(' ', '_')
+ if CHECK_CODE(conf,
+ 'printf("%%u", (unsigned)(%s))' % v,
+ define=v_define,
+ execute=True,
+ define_ret=True,
+ quote=False,
+ lib=lib,
+ headers=headers,
+ local_include=False,
+ msg="Checking value of %s" % v):
+ return int(conf.env[v_define])
+
+ return None
+
+@conf
+def CHECK_CODE(conf, code, define,
+ always=False, execute=False, addmain=True,
+ add_headers=True, mandatory=False,
+ headers=None, msg=None, cflags='', includes='# .',
+ local_include=True, lib=None, link=True,
+ define_ret=False, quote=False,
+ on_target=True, strict=False):
+ '''check if some code compiles and/or runs'''
+
+ if CONFIG_SET(conf, define):
+ return True
+
+ if headers is not None:
+ CHECK_HEADERS(conf, headers=headers, lib=lib)
+
+ if add_headers:
+ hdrs = header_list(conf, headers=headers, lib=lib)
+ else:
+ hdrs = ''
+ if execute:
+ execute = 1
+ else:
+ execute = 0
+
+ if addmain:
+ fragment='%s\n int main(void) { %s; return 0; }\n' % (hdrs, code)
+ else:
+ fragment='%s\n%s\n' % (hdrs, code)
+
+ if msg is None:
+ msg="Checking for %s" % define
+
+ cflags = TO_LIST(cflags)
+
+ # Be strict when relying on a compiler check
+ # Some compilers (e.g. xlc) ignore non-supported features as warnings
+ if strict:
+ if 'WERROR_CFLAGS' in conf.env:
+ cflags.extend(conf.env['WERROR_CFLAGS'])
+
+ if local_include:
+ cflags.append('-I%s' % conf.path.abspath())
+
+ if not link:
+ type='nolink'
+ else:
+ type='cprogram'
+
+ uselib = TO_LIST(lib)
+
+ (ccflags, ldflags, cpppath) = library_flags(conf, uselib)
+
+ includes = TO_LIST(includes)
+ includes.extend(cpppath)
+
+ uselib = [l.upper() for l in uselib]
+
+ cflags.extend(ccflags)
+
+ if on_target:
+ test_args = conf.SAMBA_CROSS_ARGS(msg=msg)
+ else:
+ test_args = []
+
+ conf.COMPOUND_START(msg)
+
+ try:
+ ret = conf.check(fragment=fragment,
+ execute=execute,
+ define_name = define,
+ cflags=cflags,
+ ldflags=ldflags,
+ includes=includes,
+ uselib=uselib,
+ type=type,
+ msg=msg,
+ quote=quote,
+ test_args=test_args,
+ define_ret=define_ret)
+ except Exception:
+ if always:
+ conf.DEFINE(define, 0)
+ else:
+ conf.undefine(define)
+ conf.COMPOUND_END(False)
+ if mandatory:
+ raise
+ return False
+ else:
+ # Success is indicated by ret but we should unset
+ # defines set by WAF's c_config.check() because it
+ # defines it to int(ret) and we want to undefine it
+ if not ret:
+ conf.undefine(define)
+ conf.COMPOUND_END(False)
+ return False
+ if not define_ret:
+ conf.DEFINE(define, 1)
+ conf.COMPOUND_END(True)
+ else:
+ conf.DEFINE(define, ret, quote=quote)
+ conf.COMPOUND_END(ret)
+ return True
+
+
+@conf
+def CHECK_STRUCTURE_MEMBER(conf, structname, member,
+ always=False, define=None, headers=None,
+ lib=None):
+ '''check for a structure member'''
+ if define is None:
+ define = 'HAVE_%s' % member.upper()
+ return CHECK_CODE(conf,
+ '%s s; void *_x; _x=(void *)&s.%s' % (structname, member),
+ define,
+ execute=False,
+ link=False,
+ lib=lib,
+ always=always,
+ headers=headers,
+ local_include=False,
+ msg="Checking for member %s in %s" % (member, structname))
+
+
+@conf
+def CHECK_CFLAGS(conf, cflags, fragment='int main(void) { return 0; }\n',
+ mandatory=False):
+ '''check if the given cflags are accepted by the compiler
+ '''
+ check_cflags = TO_LIST(cflags)
+ if 'WERROR_CFLAGS' in conf.env:
+ check_cflags.extend(conf.env['WERROR_CFLAGS'])
+ return conf.check(fragment=fragment,
+ execute=0,
+ mandatory=mandatory,
+ type='nolink',
+ cflags=check_cflags,
+ msg="Checking compiler accepts %s" % cflags)
+
+@conf
+def CHECK_LDFLAGS(conf, ldflags,
+ mandatory=False):
+ '''check if the given ldflags are accepted by the linker
+ '''
+ return conf.check(fragment='int main(void) { return 0; }\n',
+ execute=0,
+ ldflags=ldflags,
+ mandatory=mandatory,
+ msg="Checking linker accepts %s" % ldflags)
+
+
+@conf
+def CONFIG_GET(conf, option):
+ '''return True if a configuration option was found'''
+ if (option in conf.env):
+ return conf.env[option]
+ else:
+ return None
+
+@conf
+def CONFIG_SET(conf, option):
+ '''return True if a configuration option was found'''
+ if option not in conf.env:
+ return False
+ v = conf.env[option]
+ if v is None:
+ return False
+ if v == []:
+ return False
+ if v == ():
+ return False
+ return True
+
+@conf
+def CONFIG_RESET(conf, option):
+ if option not in conf.env:
+ return
+ del conf.env[option]
+
+Build.BuildContext.CONFIG_RESET = CONFIG_RESET
+Build.BuildContext.CONFIG_SET = CONFIG_SET
+Build.BuildContext.CONFIG_GET = CONFIG_GET
+
+
+def library_flags(self, libs):
+ '''work out flags from pkg_config'''
+ ccflags = []
+ ldflags = []
+ cpppath = []
+ for lib in TO_LIST(libs):
+ # note that we do not add the -I and -L in here, as that is added by the waf
+ # core. Adding it here would just change the order that it is put on the link line
+ # which can cause system paths to be added before internal libraries
+ extra_ccflags = TO_LIST(getattr(self.env, 'CFLAGS_%s' % lib.upper(), []))
+ extra_ldflags = TO_LIST(getattr(self.env, 'LDFLAGS_%s' % lib.upper(), []))
+ extra_cpppath = TO_LIST(getattr(self.env, 'CPPPATH_%s' % lib.upper(), []))
+ ccflags.extend(extra_ccflags)
+ ldflags.extend(extra_ldflags)
+ cpppath.extend(extra_cpppath)
+
+ extra_cpppath = TO_LIST(getattr(self.env, 'INCLUDES_%s' % lib.upper(), []))
+ cpppath.extend(extra_cpppath)
+ if 'EXTRA_LDFLAGS' in self.env:
+ ldflags.extend(self.env['EXTRA_LDFLAGS'])
+
+ ccflags = unique_list(ccflags)
+ ldflags = unique_list(ldflags)
+ cpppath = unique_list(cpppath)
+ return (ccflags, ldflags, cpppath)
+
+
+@conf
+def CHECK_LIB(conf, libs, mandatory=False, empty_decl=True, set_target=True, shlib=False):
+ '''check if a set of libraries exist as system libraries
+
+ returns the sublist of libs that do exist as a syslib or []
+ '''
+
+ fragment= '''
+int foo()
+{
+ int v = 2;
+ return v*2;
+}
+'''
+ ret = []
+ liblist = TO_LIST(libs)
+ for lib in liblist[:]:
+ if GET_TARGET_TYPE(conf, lib) == 'SYSLIB':
+ ret.append(lib)
+ continue
+
+ (ccflags, ldflags, cpppath) = library_flags(conf, lib)
+ if shlib:
+ res = conf.check(features='c cshlib', fragment=fragment, lib=lib, uselib_store=lib, cflags=ccflags, ldflags=ldflags, uselib=lib.upper(), mandatory=False)
+ else:
+ res = conf.check(lib=lib, uselib_store=lib, cflags=ccflags, ldflags=ldflags, uselib=lib.upper(), mandatory=False)
+
+ if not res:
+ if mandatory:
+ Logs.error("Mandatory library '%s' not found for functions '%s'" % (lib, libs))
+ sys.exit(1)
+ if empty_decl:
+ # if it isn't a mandatory library, then remove it from dependency lists
+ if set_target:
+ SET_TARGET_TYPE(conf, lib, 'EMPTY')
+ else:
+ conf.define('HAVE_LIB%s' % lib.upper().replace('-','_').replace('.','_'), 1)
+ conf.env['LIB_' + lib.upper()] = lib
+ if set_target:
+ conf.SET_TARGET_TYPE(lib, 'SYSLIB')
+ ret.append(lib)
+
+ return ret
+
+
+
+@conf
+def CHECK_FUNCS_IN(conf, list, library, mandatory=False, checklibc=False,
+ headers=None, link=True, empty_decl=True, set_target=True):
+ """
+ check that the functions in 'list' are available in 'library'
+ if they are, then make that library available as a dependency
+
+ if the library is not available and mandatory==True, then
+ raise an error.
+
+ If the library is not available and mandatory==False, then
+ add the library to the list of dependencies to remove from
+ build rules
+
+ optionally check for the functions first in libc
+ """
+ remaining = TO_LIST(list)
+ liblist = TO_LIST(library)
+
+ # check if some already found
+ for f in remaining[:]:
+ if CONFIG_SET(conf, 'HAVE_%s' % f.upper()):
+ remaining.remove(f)
+
+ # see if the functions are in libc
+ if checklibc:
+ for f in remaining[:]:
+ if CHECK_FUNC(conf, f, link=True, headers=headers):
+ remaining.remove(f)
+
+ if remaining == []:
+ for lib in liblist:
+ if GET_TARGET_TYPE(conf, lib) != 'SYSLIB' and empty_decl:
+ SET_TARGET_TYPE(conf, lib, 'EMPTY')
+ return True
+
+ checklist = conf.CHECK_LIB(liblist, empty_decl=empty_decl, set_target=set_target)
+ for lib in liblist[:]:
+ if not lib in checklist and mandatory:
+ Logs.error("Mandatory library '%s' not found for functions '%s'" % (lib, list))
+ sys.exit(1)
+
+ ret = True
+ for f in remaining:
+ if not CHECK_FUNC(conf, f, lib=' '.join(checklist), headers=headers, link=link):
+ ret = False
+
+ return ret
+
+
+@conf
+def IN_LAUNCH_DIR(conf):
+ '''return True if this rule is being run from the launch directory'''
+ return os.path.realpath(conf.path.abspath()) == os.path.realpath(Context.launch_dir)
+Options.OptionsContext.IN_LAUNCH_DIR = IN_LAUNCH_DIR
+
+
+@conf
+def SAMBA_CONFIG_H(conf, path=None):
+ '''write out config.h in the right directory'''
+ # we don't want to produce a config.h in places like lib/replace
+ # when we are building projects that depend on lib/replace
+ if not IN_LAUNCH_DIR(conf):
+ return
+
+ # we need to build real code that can't be optimized away to test
+ stack_protect_list = ['-fstack-protector-strong', '-fstack-protector']
+ for stack_protect_flag in stack_protect_list:
+ flag_supported = conf.check(fragment='''
+ #include <stdio.h>
+
+ int main(void)
+ {
+ char t[100000];
+ while (fgets(t, sizeof(t), stdin));
+ return 0;
+ }
+ ''',
+ execute=0,
+ cflags=[ '-Werror', '-Wp,-D_FORTIFY_SOURCE=2', stack_protect_flag],
+ mandatory=False,
+ msg='Checking if compiler accepts %s' % (stack_protect_flag))
+ if flag_supported:
+ conf.ADD_CFLAGS('%s' % (stack_protect_flag))
+ break
+
+ flag_supported = conf.check(fragment='''
+ #include <stdio.h>
+
+ int main(void)
+ {
+ char t[100000];
+ while (fgets(t, sizeof(t), stdin));
+ return 0;
+ }
+ ''',
+ execute=0,
+ cflags=[ '-Werror', '-fstack-clash-protection'],
+ mandatory=False,
+ msg='Checking if compiler accepts -fstack-clash-protection')
+ if flag_supported:
+ conf.ADD_CFLAGS('-fstack-clash-protection')
+
+ if Options.options.debug:
+ conf.ADD_CFLAGS('-g', testflags=True)
+
+ if Options.options.pidl_developer:
+ conf.env.PIDL_DEVELOPER_MODE = True
+
+ if Options.options.developer:
+ conf.env.DEVELOPER_MODE = True
+
+ conf.ADD_CFLAGS('-g', testflags=True)
+ conf.ADD_CFLAGS('-Wall', testflags=True)
+ conf.ADD_CFLAGS('-Wshadow', testflags=True)
+ conf.ADD_CFLAGS('-Wmissing-prototypes', testflags=True)
+ if CHECK_CODE(conf,
+ 'struct a { int b; }; struct c { struct a d; } e = { };',
+ 'CHECK_C99_INIT',
+ link=False,
+ cflags='-Wmissing-field-initializers -Werror=missing-field-initializers',
+ msg="Checking C99 init of nested structs."):
+ conf.ADD_CFLAGS('-Wmissing-field-initializers', testflags=True)
+ conf.ADD_CFLAGS('-Wformat-overflow=2', testflags=True)
+ conf.ADD_CFLAGS('-Wformat-zero-length', testflags=True)
+ conf.ADD_CFLAGS('-Wcast-align -Wcast-qual', testflags=True)
+ conf.ADD_CFLAGS('-fno-common', testflags=True)
+
+ conf.ADD_CFLAGS('-Werror=address', testflags=True)
+ # we add these here to ensure that -Wstrict-prototypes is not set during configure
+ conf.ADD_CFLAGS('-Werror=strict-prototypes -Wstrict-prototypes',
+ testflags=True)
+ conf.ADD_CFLAGS('-Werror=write-strings -Wwrite-strings',
+ testflags=True)
+ conf.ADD_CFLAGS('-Werror-implicit-function-declaration',
+ testflags=True)
+ conf.ADD_CFLAGS('-Werror=implicit-int',
+ testflags=True)
+ conf.ADD_CFLAGS('-Werror=pointer-arith -Wpointer-arith',
+ testflags=True)
+ conf.ADD_CFLAGS('-Werror=declaration-after-statement -Wdeclaration-after-statement',
+ testflags=True)
+ conf.ADD_CFLAGS('-Werror=return-type -Wreturn-type',
+ testflags=True)
+ conf.ADD_CFLAGS('-Werror=uninitialized -Wuninitialized',
+ testflags=True)
+ conf.ADD_CFLAGS('-Wimplicit-fallthrough',
+ testflags=True)
+ conf.ADD_CFLAGS('-Werror=strict-overflow -Wstrict-overflow=2',
+ testflags=True)
+ conf.ADD_CFLAGS('-Werror=old-style-definition -Wold-style-definition',
+ testflags=True)
+
+ conf.ADD_CFLAGS('-Wformat=2 -Wno-format-y2k', testflags=True)
+ conf.ADD_CFLAGS('-Wno-format-zero-length', testflags=True)
+ conf.ADD_CFLAGS('-Werror=format-security -Wformat-security',
+ testflags=True, prereq_flags='-Wformat')
+ # This check is because for ldb_search(), a NULL format string
+ # is not an error, but some compilers complain about that.
+ if CHECK_CFLAGS(conf, ["-Werror=format", "-Wformat=2"], '''
+int testformat(char *format, ...) __attribute__ ((format (__printf__, 1, 2)));
+
+int main(void) {
+ testformat(0);
+ return 0;
+}
+
+'''):
+ if not 'EXTRA_CFLAGS' in conf.env:
+ conf.env['EXTRA_CFLAGS'] = []
+ conf.env['EXTRA_CFLAGS'].extend(TO_LIST("-Werror=format"))
+
+ if CHECK_CFLAGS(conf, ["-Wno-error=array-bounds"]):
+ conf.define('HAVE_WNO_ERROR_ARRAY_BOUNDS', 1)
+
+ if CHECK_CFLAGS(conf, ["-Wno-error=stringop-overflow"]):
+ conf.define('HAVE_WNO_ERROR_STRINGOP_OVERFLOW', 1)
+
+ if CHECK_CFLAGS(conf, ["-Wno-error=declaration-after-statement"]):
+ conf.define('HAVE_WNO_ERROR_DECLARATION_AFTER_STATEMENT', 1)
+
+ if not Options.options.disable_warnings_as_errors:
+ conf.ADD_NAMED_CFLAGS('PICKY_CFLAGS', '-Werror -Wno-error=deprecated-declarations', testflags=True)
+ conf.ADD_NAMED_CFLAGS('PICKY_CFLAGS', '-Wno-error=tautological-compare', testflags=True)
+ conf.ADD_NAMED_CFLAGS('PICKY_CFLAGS', '-Wno-error=cast-align', testflags=True)
+
+ if Options.options.fatal_errors:
+ conf.ADD_CFLAGS('-Wfatal-errors', testflags=True)
+
+ if Options.options.pedantic:
+ conf.ADD_CFLAGS('-W', testflags=True)
+
+ if (Options.options.address_sanitizer or
+ Options.options.undefined_sanitizer):
+ conf.ADD_CFLAGS('-g -O1', testflags=True)
+ if (Options.options.address_sanitizer
+ or Options.options.memory_sanitizer):
+ conf.ADD_CFLAGS('-fno-omit-frame-pointer', testflags=True)
+ if Options.options.address_sanitizer:
+ conf.ADD_CFLAGS('-fsanitize=address', testflags=True)
+ conf.ADD_LDFLAGS('-fsanitize=address', testflags=True)
+ conf.env['ADDRESS_SANITIZER'] = True
+ if Options.options.undefined_sanitizer:
+ conf.ADD_CFLAGS('-fsanitize=undefined', testflags=True)
+ conf.ADD_CFLAGS('-fsanitize=null', testflags=True)
+ conf.ADD_CFLAGS('-fsanitize=alignment', testflags=True)
+ conf.ADD_LDFLAGS('-fsanitize=undefined', testflags=True)
+ conf.env['UNDEFINED_SANITIZER'] = True
+
+ # MemorySanitizer is only available if you build with clang
+ if Options.options.memory_sanitizer:
+ conf.ADD_CFLAGS('-g -O2', testflags=True)
+ conf.ADD_CFLAGS('-fsanitize=memory', testflags=True)
+ conf.ADD_CFLAGS('-fsanitize-memory-track-origins=2', testflags=True)
+ conf.ADD_LDFLAGS('-fsanitize=memory')
+ conf.env['MEMORY_SANITIZER'] = True
+
+ # Let people pass an additional ADDITIONAL_{CFLAGS,LDFLAGS}
+ # environment variables which are only used the for final build.
+ #
+ # The CFLAGS and LDFLAGS environment variables are also
+ # used for the configure checks which might impact their results.
+ #
+ # If these variables don't pass a smoke test, fail the configure
+
+ conf.add_os_flags('ADDITIONAL_CFLAGS')
+ if conf.env.ADDITIONAL_CFLAGS:
+ conf.CHECK_CFLAGS(conf.env['ADDITIONAL_CFLAGS'],
+ mandatory=True)
+ conf.env['EXTRA_CFLAGS'].extend(conf.env['ADDITIONAL_CFLAGS'])
+
+ conf.add_os_flags('ADDITIONAL_LDFLAGS')
+ if conf.env.ADDITIONAL_LDFLAGS:
+ conf.CHECK_LDFLAGS(conf.env['ADDITIONAL_LDFLAGS'],
+ mandatory=True)
+ conf.env['EXTRA_LDFLAGS'].extend(conf.env['ADDITIONAL_LDFLAGS'])
+
+ if path is None:
+ conf.write_config_header('default/config.h', top=True, remove=False)
+ else:
+ conf.write_config_header(os.path.join(conf.variant, path), remove=False)
+ for key in conf.env.define_key:
+ conf.undefine(key, from_env=False)
+ conf.env.define_key = []
+ conf.SAMBA_CROSS_CHECK_COMPLETE()
+
+
+@conf
+def CONFIG_PATH(conf, name, default):
+ '''setup a configurable path'''
+ if not name in conf.env:
+ if default[0] == '/':
+ conf.env[name] = default
+ else:
+ conf.env[name] = conf.env['PREFIX'] + default
+
+@conf
+def ADD_NAMED_CFLAGS(conf, name, flags, testflags=False, prereq_flags=None):
+ '''add some CFLAGS to the command line
+ optionally set testflags to ensure all the flags work
+ '''
+ if prereq_flags is None:
+ prereq_flags = []
+ prereq_flags = TO_LIST(prereq_flags)
+ if testflags:
+ ok_flags=[]
+ for f in flags.split():
+ if CHECK_CFLAGS(conf, [f] + prereq_flags):
+ ok_flags.append(f)
+ flags = ok_flags
+ if not name in conf.env:
+ conf.env[name] = []
+ conf.env[name].extend(TO_LIST(flags))
+
+@conf
+def ADD_CFLAGS(conf, flags, testflags=False, prereq_flags=None):
+ '''add some CFLAGS to the command line
+ optionally set testflags to ensure all the flags work
+ '''
+ if prereq_flags is None:
+ prereq_flags = []
+ ADD_NAMED_CFLAGS(conf, 'EXTRA_CFLAGS', flags, testflags=testflags,
+ prereq_flags=prereq_flags)
+
+@conf
+def ADD_LDFLAGS(conf, flags, testflags=False):
+ '''add some LDFLAGS to the command line
+ optionally set testflags to ensure all the flags work
+
+ this will return the flags that are added, if any
+ '''
+ if testflags:
+ ok_flags=[]
+ for f in flags.split():
+ if CHECK_LDFLAGS(conf, f):
+ ok_flags.append(f)
+ flags = ok_flags
+ if not 'EXTRA_LDFLAGS' in conf.env:
+ conf.env['EXTRA_LDFLAGS'] = []
+ conf.env['EXTRA_LDFLAGS'].extend(TO_LIST(flags))
+ return flags
+
+
+@conf
+def ADD_EXTRA_INCLUDES(conf, includes):
+ '''add some extra include directories to all builds'''
+ if not 'EXTRA_INCLUDES' in conf.env:
+ conf.env['EXTRA_INCLUDES'] = []
+ conf.env['EXTRA_INCLUDES'].extend(TO_LIST(includes))
+
+
+
+def CURRENT_CFLAGS(bld, target, cflags,
+ allow_warnings=False,
+ use_hostcc=False,
+ hide_symbols=False):
+ '''work out the current flags. local flags are added first'''
+ ret = []
+ if use_hostcc:
+ ret += ['-D_SAMBA_HOSTCC_']
+ ret += TO_LIST(cflags)
+ if not 'EXTRA_CFLAGS' in bld.env:
+ list = []
+ else:
+ list = bld.env['EXTRA_CFLAGS']
+ ret.extend(list)
+ if not allow_warnings and 'PICKY_CFLAGS' in bld.env:
+ list = bld.env['PICKY_CFLAGS']
+ ret.extend(list)
+ if hide_symbols and bld.env.HAVE_VISIBILITY_ATTR:
+ ret.append(bld.env.VISIBILITY_CFLAGS)
+ return ret
+
+
+@conf
+def CHECK_CC_ENV(conf):
+ """trim whitespaces from 'CC'.
+ The build farm sometimes puts a space at the start"""
+ if os.environ.get('CC'):
+ conf.env.CC = TO_LIST(os.environ.get('CC'))
+
+
+@conf
+def SETUP_CONFIGURE_CACHE(conf, enable):
+ '''enable/disable cache of configure results'''
+ if enable:
+ # when -C is chosen, we will use a private cache and will
+ # not look into system includes. This roughly matches what
+ # autoconf does with -C
+ cache_path = os.path.join(conf.bldnode.abspath(), '.confcache')
+ mkdir_p(cache_path)
+ Options.cache_global = os.environ['WAFCACHE'] = cache_path
+ else:
+ # when -C is not chosen we will not cache configure checks
+ # We set the recursion limit low to prevent waf from spending
+ # a lot of time on the signatures of the files.
+ Options.cache_global = os.environ['WAFCACHE'] = ''
+ preproc.recursion_limit = 1
+ # in either case we don't need to scan system includes
+ preproc.go_absolute = False
+
+
+@conf
+def SAMBA_CHECK_UNDEFINED_SYMBOL_FLAGS(conf):
+ if (Options.options.address_sanitizer
+ or Options.options.memory_sanitizer
+ or Options.options.enable_libfuzzer):
+ # Sanitizers can rely on symbols undefined at library link time and the
+ # symbols used for fuzzers are only defined by compiler wrappers.
+ return
+
+ if not sys.platform.startswith("openbsd"):
+ # we don't want any libraries or modules to rely on runtime
+ # resolution of symbols
+ conf.env.undefined_ldflags = conf.ADD_LDFLAGS('-Wl,-no-undefined', testflags=True)
+
+ if (conf.env.undefined_ignore_ldflags == [] and
+ conf.CHECK_LDFLAGS(['-undefined', 'dynamic_lookup'])):
+ conf.env.undefined_ignore_ldflags = ['-undefined', 'dynamic_lookup']
diff --git a/buildtools/wafsamba/samba_autoproto.py b/buildtools/wafsamba/samba_autoproto.py
new file mode 100644
index 0000000..87fb8ef
--- /dev/null
+++ b/buildtools/wafsamba/samba_autoproto.py
@@ -0,0 +1,24 @@
+# waf build tool for building automatic prototypes from C source
+
+import os
+from waflib import Build
+from samba_utils import SET_TARGET_TYPE
+
+def SAMBA_AUTOPROTO(bld, header, source):
+ '''rule for samba prototype generation'''
+ bld.SET_BUILD_GROUP('prototypes')
+ relpath = os.path.relpath(bld.path.abspath(), bld.srcnode.abspath())
+ name = os.path.join(relpath, header)
+ SET_TARGET_TYPE(bld, name, 'PROTOTYPE')
+ t = bld(
+ name = name,
+ source = source,
+ target = header,
+ update_outputs=True,
+ ext_out='.c',
+ before ='c',
+ rule = '${PERL} "${SCRIPT}/mkproto.pl" --srcdir=.. --builddir=. --public=/dev/null --private="${TGT}" ${SRC}'
+ )
+ t.env.SCRIPT = os.path.join(bld.srcnode.abspath(), 'source4/script')
+Build.BuildContext.SAMBA_AUTOPROTO = SAMBA_AUTOPROTO
+
diff --git a/buildtools/wafsamba/samba_bundled.py b/buildtools/wafsamba/samba_bundled.py
new file mode 100644
index 0000000..029be15
--- /dev/null
+++ b/buildtools/wafsamba/samba_bundled.py
@@ -0,0 +1,273 @@
+# functions to support bundled libraries
+
+import sys
+from waflib import Build, Options, Logs
+from waflib.Configure import conf
+from wafsamba import samba_utils
+
+def PRIVATE_NAME(bld, name):
+ '''possibly rename a library to include a bundled extension'''
+
+ extension = bld.env.PRIVATE_EXTENSION
+
+ if name in bld.env.PRIVATE_EXTENSION_EXCEPTION:
+ return name
+
+ if extension and name.startswith('%s' % extension):
+ return name
+
+ if extension and name.endswith('%s' % extension):
+ return name
+
+ return "%s-%s" % (name, extension)
+
+
+def target_in_list(target, lst, default):
+ for l in lst:
+ if target == l:
+ return True
+ if '!' + target == l:
+ return False
+ if l == 'ALL':
+ return True
+ if l == 'NONE':
+ return False
+ return default
+
+
+def BUILTIN_LIBRARY(bld, name):
+ '''return True if a library should be builtin
+ instead of being built as a shared lib'''
+ return target_in_list(name, bld.env.BUILTIN_LIBRARIES, False)
+Build.BuildContext.BUILTIN_LIBRARY = BUILTIN_LIBRARY
+
+
+def BUILTIN_DEFAULT(opt, builtins):
+ '''set a comma separated default list of builtin libraries for this package'''
+ if 'BUILTIN_LIBRARIES_DEFAULT' in Options.options.__dict__:
+ return
+ Options.options.__dict__['BUILTIN_LIBRARIES_DEFAULT'] = builtins
+Options.OptionsContext.BUILTIN_DEFAULT = BUILTIN_DEFAULT
+
+
+def PRIVATE_EXTENSION_DEFAULT(opt, extension, noextension=''):
+ '''set a default private library extension'''
+ if 'PRIVATE_EXTENSION_DEFAULT' in Options.options.__dict__:
+ return
+ Options.options.__dict__['PRIVATE_EXTENSION_DEFAULT'] = extension
+ Options.options.__dict__['PRIVATE_EXTENSION_EXCEPTION'] = noextension
+Options.OptionsContext.PRIVATE_EXTENSION_DEFAULT = PRIVATE_EXTENSION_DEFAULT
+
+
+def minimum_library_version(conf, libname, default):
+ '''allow override of minimum system library version'''
+
+ minlist = Options.options.MINIMUM_LIBRARY_VERSION
+ if not minlist:
+ return default
+
+ for m in minlist.split(','):
+ a = m.split(':')
+ if len(a) != 2:
+ Logs.error("Bad syntax for --minimum-library-version of %s" % m)
+ sys.exit(1)
+ if a[0] == libname:
+ return a[1]
+ return default
+
+
+@conf
+def LIB_MAY_BE_BUNDLED(conf, libname):
+ if libname in conf.env.SYSTEM_LIBS:
+ return False
+ if libname in conf.env.BUNDLED_LIBS:
+ return True
+ if '!%s' % libname in conf.env.BUNDLED_LIBS:
+ return False
+ if 'NONE' in conf.env.BUNDLED_LIBS:
+ return False
+ return True
+
+def __LIB_MUST_BE(liblist, libname):
+ if libname in liblist:
+ return True
+ if '!%s' % libname in liblist:
+ return False
+ if 'ALL' in liblist:
+ return True
+ return False
+
+@conf
+def LIB_MUST_BE_BUNDLED(conf, libname):
+ return __LIB_MUST_BE(conf.env.BUNDLED_LIBS, libname)
+
+@conf
+def LIB_MUST_BE_PRIVATE(conf, libname):
+ return __LIB_MUST_BE(conf.env.PRIVATE_LIBS, libname)
+
+@conf
+def CHECK_BUNDLED_SYSTEM_PKG(conf, libname, minversion='0.0.0',
+ maxversion=None, version_blacklist=None,
+ onlyif=None, implied_deps=None, pkg=None):
+ '''check if a library is available as a system library.
+
+ This only tries using pkg-config
+ '''
+ if version_blacklist is None:
+ version_blacklist = []
+ return conf.CHECK_BUNDLED_SYSTEM(libname,
+ minversion=minversion,
+ maxversion=maxversion,
+ version_blacklist=version_blacklist,
+ onlyif=onlyif,
+ implied_deps=implied_deps,
+ pkg=pkg)
+
+@conf
+def CHECK_BUNDLED_SYSTEM(conf, libname, minversion='0.0.0',
+ maxversion=None, version_blacklist=None,
+ checkfunctions=None, headers=None, checkcode=None,
+ onlyif=None, implied_deps=None,
+ require_headers=True, pkg=None, set_target=True):
+ '''check if a library is available as a system library.
+ this first tries via pkg-config, then if that fails
+ tries by testing for a specified function in the specified lib
+ '''
+ # We always do a logic validation of 'onlyif' first
+ if version_blacklist is None:
+ version_blacklist = []
+ missing = []
+ if onlyif:
+ for l in samba_utils.TO_LIST(onlyif):
+ f = 'FOUND_SYSTEMLIB_%s' % l
+ if not f in conf.env:
+ Logs.error('ERROR: CHECK_BUNDLED_SYSTEM(%s) - ' % (libname) +
+ 'missing prerequisite check for ' +
+ 'system library %s, onlyif=%r' % (l, onlyif))
+ sys.exit(1)
+ if not conf.env[f]:
+ missing.append(l)
+ found = 'FOUND_SYSTEMLIB_%s' % libname
+ if found in conf.env:
+ return conf.env[found]
+ if conf.LIB_MUST_BE_BUNDLED(libname):
+ conf.env[found] = False
+ return False
+
+ # see if the library should only use a system version if another dependent
+ # system version is found. That prevents possible use of mixed library
+ # versions
+ if missing:
+ if not conf.LIB_MAY_BE_BUNDLED(libname):
+ Logs.error('ERROR: Use of system library %s depends on missing system library/libraries %r' % (libname, missing))
+ sys.exit(1)
+ conf.env[found] = False
+ return False
+
+ def check_functions_headers_code():
+ '''helper function for CHECK_BUNDLED_SYSTEM'''
+ if require_headers and headers and not conf.CHECK_HEADERS(headers, lib=libname):
+ return False
+ if checkfunctions is not None:
+ ok = conf.CHECK_FUNCS_IN(checkfunctions, libname, headers=headers,
+ empty_decl=False, set_target=False)
+ if not ok:
+ return False
+ if checkcode is not None:
+ define='CHECK_BUNDLED_SYSTEM_%s' % libname.upper()
+ ok = conf.CHECK_CODE(checkcode, lib=libname,
+ headers=headers, local_include=False,
+ msg=msg, define=define)
+ conf.CONFIG_RESET(define)
+ if not ok:
+ return False
+ return True
+
+ minversion = minimum_library_version(conf, libname, minversion)
+
+ msg = 'Checking for system %s' % libname
+ msg_ver = []
+ if minversion != '0.0.0':
+ msg_ver.append('>=%s' % minversion)
+ if maxversion is not None:
+ msg_ver.append('<=%s' % maxversion)
+ for v in version_blacklist:
+ msg_ver.append('!=%s' % v)
+ if msg_ver != []:
+ msg += " (%s)" % (" ".join(msg_ver))
+
+ uselib_store=libname.upper()
+ if pkg is None:
+ pkg = libname
+
+ version_checks = '%s >= %s' % (pkg, minversion)
+ if maxversion is not None:
+ version_checks += ' %s <= %s' % (pkg, maxversion)
+
+ version_checks += "".join(' %s != %s' % (pkg, v) for v in version_blacklist)
+
+ # try pkgconfig first
+ if (conf.CHECK_CFG(package=pkg,
+ args='"%s" --cflags --libs' % (version_checks),
+ msg=msg, uselib_store=uselib_store) and
+ check_functions_headers_code()):
+ if set_target:
+ conf.SET_TARGET_TYPE(libname, 'SYSLIB')
+ conf.env[found] = True
+ if implied_deps:
+ conf.SET_SYSLIB_DEPS(libname, implied_deps)
+ return True
+ if checkfunctions is not None:
+ if check_functions_headers_code():
+ conf.env[found] = True
+ if implied_deps:
+ conf.SET_SYSLIB_DEPS(libname, implied_deps)
+ if set_target:
+ conf.SET_TARGET_TYPE(libname, 'SYSLIB')
+ return True
+ conf.env[found] = False
+ if not conf.LIB_MAY_BE_BUNDLED(libname):
+ Logs.error('ERROR: System library %s of version %s not found, and bundling disabled' % (libname, minversion))
+ sys.exit(1)
+ return False
+
+
+def tuplize_version(version):
+ return tuple([int(x) for x in version.split(".")])
+
+@conf
+def CHECK_BUNDLED_SYSTEM_PYTHON(conf, libname, modulename, minversion='0.0.0'):
+ '''check if a python module is available on the system and
+ has the specified minimum version.
+ '''
+ if conf.LIB_MUST_BE_BUNDLED(libname):
+ return False
+
+ # see if the library should only use a system version if another dependent
+ # system version is found. That prevents possible use of mixed library
+ # versions
+ minversion = minimum_library_version(conf, libname, minversion)
+
+ try:
+ m = __import__(modulename)
+ except ImportError:
+ found = False
+ else:
+ try:
+ version = m.__version__
+ except AttributeError:
+ found = False
+ else:
+ found = tuplize_version(version) >= tuplize_version(minversion)
+ if not found and not conf.LIB_MAY_BE_BUNDLED(libname):
+ Logs.error('ERROR: Python module %s of version %s not found, and bundling disabled' % (libname, minversion))
+ sys.exit(1)
+ return found
+
+
+def NONSHARED_BINARY(bld, name):
+ '''return True if a binary should be built without non-system shared libs'''
+ return target_in_list(name, bld.env.NONSHARED_BINARIES, False)
+Build.BuildContext.NONSHARED_BINARY = NONSHARED_BINARY
+
+
diff --git a/buildtools/wafsamba/samba_conftests.py b/buildtools/wafsamba/samba_conftests.py
new file mode 100644
index 0000000..38ce20d
--- /dev/null
+++ b/buildtools/wafsamba/samba_conftests.py
@@ -0,0 +1,529 @@
+# a set of config tests that use the samba_autoconf functions
+# to test for commonly needed configuration options
+
+import os, shutil, re
+from waflib import Build, Configure, Utils, Options, Logs, Errors
+from waflib.Configure import conf
+from samba_utils import TO_LIST, ADD_LD_LIBRARY_PATH, get_string
+
+
+def add_option(self, *k, **kw):
+ '''syntax help: provide the "match" attribute to opt.add_option() so that folders can be added to specific config tests'''
+ Options.OptionsContext.parser = self
+ match = kw.get('match', [])
+ if match:
+ del kw['match']
+ opt = self.parser.add_option(*k, **kw)
+ opt.match = match
+ return opt
+Options.OptionsContext.add_option = add_option
+
+@conf
+def check(self, *k, **kw):
+ '''Override the waf defaults to inject --with-directory options'''
+
+ if not 'env' in kw:
+ kw['env'] = self.env.derive()
+
+ # match the configuration test with specific options, for example:
+ # --with-libiconv -> Options.options.iconv_open -> "Checking for library iconv"
+ additional_dirs = []
+ if 'msg' in kw:
+ msg = kw['msg']
+ for x in Options.OptionsContext.parser.parser.option_list:
+ if getattr(x, 'match', None) and msg in x.match:
+ d = getattr(Options.options, x.dest, '')
+ if d:
+ additional_dirs.append(d)
+
+ # we add the additional dirs twice: once for the test data, and again if the compilation test succeeds below
+ def add_options_dir(dirs, env):
+ for x in dirs:
+ if not x in env.CPPPATH:
+ env.CPPPATH = [os.path.join(x, 'include')] + env.CPPPATH
+ if not x in env.LIBPATH:
+ env.LIBPATH = [os.path.join(x, 'lib')] + env.LIBPATH
+
+ add_options_dir(additional_dirs, kw['env'])
+
+ self.validate_c(kw)
+ self.start_msg(kw['msg'])
+ ret = None
+ try:
+ ret = self.run_c_code(*k, **kw)
+ except Configure.ConfigurationError as e:
+ self.end_msg(kw['errmsg'], 'YELLOW')
+ if 'mandatory' in kw and kw['mandatory']:
+ if Logs.verbose > 1:
+ raise
+ else:
+ self.fatal('the configuration failed (see %r)' % self.log.name)
+ else:
+ kw['success'] = ret
+ self.end_msg(self.ret_msg(kw['okmsg'], kw))
+
+ # success! keep the CPPPATH/LIBPATH
+ add_options_dir(additional_dirs, self.env)
+
+ self.post_check(*k, **kw)
+ if not kw.get('execute', False):
+ return ret == 0
+ return ret
+
+
+@conf
+def CHECK_ICONV(conf, define='HAVE_NATIVE_ICONV'):
+ '''check if the iconv library is installed
+ optionally pass a define'''
+ if conf.CHECK_FUNCS_IN('iconv_open', 'iconv', checklibc=True, headers='iconv.h'):
+ conf.DEFINE(define, 1)
+ return True
+ return False
+
+
+@conf
+def CHECK_LARGEFILE(conf, define='HAVE_LARGEFILE'):
+ '''see what we need for largefile support'''
+ getconf_cflags = conf.CHECK_COMMAND(['getconf', 'LFS_CFLAGS'])
+ if getconf_cflags is not False:
+ if (conf.CHECK_CODE('if (sizeof(off_t) < 8) return 1',
+ define='WORKING_GETCONF_LFS_CFLAGS',
+ execute=True,
+ cflags=getconf_cflags,
+ msg='Checking getconf large file support flags work')):
+ conf.ADD_CFLAGS(getconf_cflags)
+ getconf_cflags_list=TO_LIST(getconf_cflags)
+ for flag in getconf_cflags_list:
+ if flag[:2] == "-D":
+ flag_split = flag[2:].split('=')
+ if len(flag_split) == 1:
+ conf.DEFINE(flag_split[0], '1')
+ else:
+ conf.DEFINE(flag_split[0], flag_split[1])
+
+ if conf.CHECK_CODE('if (sizeof(off_t) < 8) return 1',
+ define,
+ execute=True,
+ msg='Checking for large file support without additional flags'):
+ return True
+
+ if conf.CHECK_CODE('if (sizeof(off_t) < 8) return 1',
+ define,
+ execute=True,
+ cflags='-D_FILE_OFFSET_BITS=64',
+ msg='Checking for -D_FILE_OFFSET_BITS=64'):
+ conf.DEFINE('_FILE_OFFSET_BITS', 64)
+ return True
+
+ if conf.CHECK_CODE('if (sizeof(off_t) < 8) return 1',
+ define,
+ execute=True,
+ cflags='-D_LARGE_FILES',
+ msg='Checking for -D_LARGE_FILES'):
+ conf.DEFINE('_LARGE_FILES', 1)
+ return True
+ return False
+
+
+@conf
+def CHECK_C_PROTOTYPE(conf, function, prototype, define, headers=None, msg=None, lib=None):
+ '''verify that a C prototype matches the one on the current system'''
+ if not conf.CHECK_DECLS(function, headers=headers):
+ return False
+ if not msg:
+ msg = 'Checking C prototype for %s' % function
+ return conf.CHECK_CODE('%s; void *_x = (void *)%s' % (prototype, function),
+ define=define,
+ local_include=False,
+ headers=headers,
+ link=False,
+ execute=False,
+ msg=msg,
+ lib=lib)
+
+
+@conf
+def CHECK_CHARSET_EXISTS(conf, charset, outcharset='UCS-2LE', headers=None, define=None):
+ '''check that a named charset is able to be used with iconv_open() for conversion
+ to a target charset
+ '''
+ msg = 'Checking if can we convert from %s to %s' % (charset, outcharset)
+ if define is None:
+ define = 'HAVE_CHARSET_%s' % charset.upper().replace('-','_')
+ return conf.CHECK_CODE('''
+ iconv_t cd = iconv_open("%s", "%s");
+ if (cd == 0 || cd == (iconv_t)-1) return -1;
+ ''' % (charset, outcharset),
+ define=define,
+ execute=True,
+ msg=msg,
+ lib='iconv',
+ headers=headers)
+
+def find_config_dir(conf):
+ '''find a directory to run tests in'''
+ k = 0
+ while k < 10000:
+ dir = os.path.join(conf.bldnode.abspath(), '.conf_check_%d' % k)
+ try:
+ shutil.rmtree(dir)
+ except OSError:
+ pass
+ try:
+ os.stat(dir)
+ except:
+ break
+ k += 1
+
+ try:
+ os.makedirs(dir)
+ except:
+ conf.fatal('cannot create a configuration test folder %r' % dir)
+
+ try:
+ os.stat(dir)
+ except:
+ conf.fatal('cannot use the configuration test folder %r' % dir)
+ return dir
+
+@conf
+def CHECK_SHLIB_INTRASINC_NAME_FLAGS(conf, msg):
+ '''
+ check if the waf default flags for setting the name of lib
+ are ok
+ '''
+
+ snip = '''
+int foo(int v) {
+ return v * 2;
+}
+'''
+ return conf.check(features='c cshlib',vnum="1",fragment=snip,msg=msg, mandatory=False)
+
+@conf
+def CHECK_NEED_LC(conf, msg):
+ '''check if we need -lc'''
+
+ dir = find_config_dir(conf)
+
+ env = conf.env
+
+ bdir = os.path.join(dir, 'testbuild2')
+ if not os.path.exists(bdir):
+ os.makedirs(bdir)
+
+
+ subdir = os.path.join(dir, "liblctest")
+
+ os.makedirs(subdir)
+
+ Utils.writef(os.path.join(subdir, 'liblc1.c'), '#include <stdio.h>\nint lib_func(void) { FILE *f = fopen("foo", "r");}\n')
+
+ bld = Build.BuildContext()
+ bld.log = conf.log
+ bld.all_envs.update(conf.all_envs)
+ bld.all_envs['default'] = env
+ bld.lst_variants = bld.all_envs.keys()
+ bld.load_dirs(dir, bdir)
+
+ bld.rescan(bld.srcnode)
+
+ bld(features='c cshlib',
+ source='liblctest/liblc1.c',
+ ldflags=conf.env['EXTRA_LDFLAGS'],
+ target='liblc',
+ name='liblc')
+
+ try:
+ bld.compile()
+ conf.check_message(msg, '', True)
+ return True
+ except:
+ conf.check_message(msg, '', False)
+ return False
+
+
+@conf
+def CHECK_SHLIB_W_PYTHON(conf, msg):
+ '''check if we need -undefined dynamic_lookup'''
+
+ dir = find_config_dir(conf)
+ snip = '''
+#include <Python.h>
+#include <crt_externs.h>
+#define environ (*_NSGetEnviron())
+
+static PyObject *ldb_module = NULL;
+int foo(int v) {
+ extern char **environ;
+ environ[0] = 1;
+ ldb_module = PyImport_ImportModule("ldb");
+ return v * 2;
+}
+'''
+ return conf.check(features='c cshlib',uselib='PYEMBED',fragment=snip,msg=msg, mandatory=False)
+
+# this one is quite complex, and should probably be broken up
+# into several parts. I'd quite like to create a set of CHECK_COMPOUND()
+# functions that make writing complex compound tests like this much easier
+@conf
+def CHECK_LIBRARY_SUPPORT(conf, rpath=False, version_script=False, msg=None):
+ '''see if the platform supports building libraries'''
+
+ if msg is None:
+ if rpath:
+ msg = "rpath library support"
+ else:
+ msg = "building library support"
+
+ dir = find_config_dir(conf)
+
+ bdir = os.path.join(dir, 'testbuild')
+ if not os.path.exists(bdir):
+ os.makedirs(bdir)
+
+ env = conf.env
+
+ subdir = os.path.join(dir, "libdir")
+
+ os.makedirs(subdir)
+
+ Utils.writef(os.path.join(subdir, 'lib1.c'), 'int lib_func(void) { return 42; }\n')
+ Utils.writef(os.path.join(dir, 'main.c'),
+ 'int lib_func(void);\n'
+ 'int main(void) {return !(lib_func() == 42);}\n')
+
+ bld = Build.BuildContext()
+ bld.log = conf.log
+ bld.all_envs.update(conf.all_envs)
+ bld.all_envs['default'] = env
+ bld.lst_variants = bld.all_envs.keys()
+ bld.load_dirs(dir, bdir)
+
+ bld.rescan(bld.srcnode)
+
+ ldflags = []
+ if version_script:
+ ldflags.append("-Wl,--version-script=%s/vscript" % bld.path.abspath())
+ Utils.writef(os.path.join(dir,'vscript'), 'TEST_1.0A2 { global: *; };\n')
+
+ bld(features='c cshlib',
+ source='libdir/lib1.c',
+ target='libdir/lib1',
+ ldflags=ldflags,
+ name='lib1')
+
+ o = bld(features='c cprogram',
+ source='main.c',
+ target='prog1',
+ uselib_local='lib1')
+
+ if rpath:
+ o.rpath=os.path.join(bdir, 'default/libdir')
+
+ # compile the program
+ try:
+ bld.compile()
+ except:
+ conf.check_message(msg, '', False)
+ return False
+
+ # path for execution
+ lastprog = o.link_task.outputs[0].abspath(env)
+
+ if not rpath:
+ if 'LD_LIBRARY_PATH' in os.environ:
+ old_ld_library_path = os.environ['LD_LIBRARY_PATH']
+ else:
+ old_ld_library_path = None
+ ADD_LD_LIBRARY_PATH(os.path.join(bdir, 'default/libdir'))
+
+ # we need to run the program, try to get its result
+ args = conf.SAMBA_CROSS_ARGS(msg=msg)
+ proc = Utils.subprocess.Popen([lastprog] + args,
+ stdout=Utils.subprocess.PIPE, stderr=Utils.subprocess.PIPE)
+ (out, err) = proc.communicate()
+ w = conf.log.write
+ w(str(out))
+ w('\n')
+ w(str(err))
+ w('\nreturncode %r\n' % proc.returncode)
+ ret = (proc.returncode == 0)
+
+ if not rpath:
+ os.environ['LD_LIBRARY_PATH'] = old_ld_library_path or ''
+
+ conf.check_message(msg, '', ret)
+ return ret
+
+
+
+@conf
+def CHECK_PERL_MANPAGE(conf, msg=None, section=None):
+ '''work out what extension perl uses for manpages'''
+
+ if msg is None:
+ if section:
+ msg = "perl man%s extension" % section
+ else:
+ msg = "perl manpage generation"
+
+ conf.start_msg(msg)
+
+ dir = find_config_dir(conf)
+
+ bdir = os.path.join(dir, 'testbuild')
+ if not os.path.exists(bdir):
+ os.makedirs(bdir)
+
+ Utils.writef(os.path.join(bdir, 'Makefile.PL'), """
+use ExtUtils::MakeMaker;
+WriteMakefile(
+ 'NAME' => 'WafTest',
+ 'EXE_FILES' => [ 'WafTest' ]
+);
+""")
+ back = os.path.abspath('.')
+ os.chdir(bdir)
+ proc = Utils.subprocess.Popen(['perl', 'Makefile.PL'],
+ stdout=Utils.subprocess.PIPE,
+ stderr=Utils.subprocess.PIPE)
+ (out, err) = proc.communicate()
+ os.chdir(back)
+
+ ret = (proc.returncode == 0)
+ if not ret:
+ conf.end_msg('not found', color='YELLOW')
+ return
+
+ if section:
+ man = Utils.readf(os.path.join(bdir,'Makefile'))
+ m = re.search(r'MAN%sEXT\s+=\s+(\w+)' % section, man)
+ if not m:
+ conf.end_msg('not found', color='YELLOW')
+ return
+ ext = m.group(1)
+ conf.end_msg(ext)
+ return ext
+
+ conf.end_msg('ok')
+ return True
+
+
+@conf
+def CHECK_COMMAND(conf, cmd, msg=None, define=None, on_target=True, boolean=False):
+ '''run a command and return result'''
+ if msg is None:
+ msg = 'Checking %s' % ' '.join(cmd)
+ conf.COMPOUND_START(msg)
+ cmd = cmd[:]
+ if on_target:
+ cmd.extend(conf.SAMBA_CROSS_ARGS(msg=msg))
+ try:
+ ret = get_string(Utils.cmd_output(cmd))
+ except:
+ conf.COMPOUND_END(False)
+ return False
+ if boolean:
+ conf.COMPOUND_END('ok')
+ if define:
+ conf.DEFINE(define, '1')
+ else:
+ ret = ret.strip()
+ conf.COMPOUND_END(ret)
+ if define:
+ conf.DEFINE(define, ret, quote=True)
+ return ret
+
+
+@conf
+def CHECK_UNAME(conf):
+ '''setup SYSTEM_UNAME_* defines'''
+ ret = True
+ for v in "sysname machine release version".split():
+ if not conf.CHECK_CODE('''
+ int printf(const char *format, ...);
+ struct utsname n;
+ if (uname(&n) == -1) return -1;
+ printf("%%s", n.%s);
+ ''' % v,
+ define='SYSTEM_UNAME_%s' % v.upper(),
+ execute=True,
+ define_ret=True,
+ quote=True,
+ headers='sys/utsname.h',
+ local_include=False,
+ msg="Checking uname %s type" % v):
+ ret = False
+ return ret
+
+@conf
+def CHECK_INLINE(conf):
+ '''check for the right value for inline'''
+ conf.COMPOUND_START('Checking for inline')
+ for i in ['inline', '__inline__', '__inline']:
+ ret = conf.CHECK_CODE('''
+ typedef int foo_t;
+ static %s foo_t static_foo () {return 0; }
+ %s foo_t foo () {return 0; }\n''' % (i, i),
+ define='INLINE_MACRO',
+ addmain=False,
+ link=False)
+ if ret:
+ if i != 'inline':
+ conf.DEFINE('inline', i, quote=False)
+ break
+ if not ret:
+ conf.COMPOUND_END(ret)
+ else:
+ conf.COMPOUND_END(i)
+ return ret
+
+@conf
+def CHECK_XSLTPROC_MANPAGES(conf):
+ '''check if xsltproc can run with the given stylesheets'''
+
+
+ if not conf.CONFIG_SET('XSLTPROC'):
+ conf.find_program('xsltproc', var='XSLTPROC')
+ if not conf.CONFIG_SET('XSLTPROC'):
+ return False
+
+ s='http://docbook.sourceforge.net/release/xsl/current/manpages/docbook.xsl'
+ conf.CHECK_COMMAND('%s --nonet %s 2> /dev/null' % (conf.env.get_flat('XSLTPROC'), s),
+ msg='Checking for stylesheet %s' % s,
+ define='XSLTPROC_MANPAGES', on_target=False,
+ boolean=True)
+ if not conf.CONFIG_SET('XSLTPROC_MANPAGES'):
+ print("A local copy of the docbook.xsl wasn't found on your system" \
+ " consider installing package like docbook-xsl")
+
+#
+# Determine the standard libpath for the used compiler,
+# so we can later use that to filter out these standard
+# library paths when some tools like cups-config or
+# python-config report standard lib paths with their
+# ldflags (-L...)
+#
+@conf
+def CHECK_STANDARD_LIBPATH(conf):
+ # at least gcc and clang support this:
+ try:
+ cmd = conf.env.CC + ['-print-search-dirs']
+ out = get_string(Utils.cmd_output(cmd)).split('\n')
+ except ValueError:
+ # option not supported by compiler - use a standard list of directories
+ dirlist = [ '/usr/lib', '/usr/lib64' ]
+ except:
+ raise Errors.WafError('Unexpected error running "%s"' % (cmd))
+ else:
+ dirlist = []
+ for line in out:
+ line = line.strip()
+ if line.startswith("libraries: ="):
+ dirliststr = line[len("libraries: ="):]
+ dirlist = [ os.path.normpath(x) for x in dirliststr.split(':') ]
+ break
+
+ conf.env.STANDARD_LIBPATH = dirlist
+
diff --git a/buildtools/wafsamba/samba_cross.py b/buildtools/wafsamba/samba_cross.py
new file mode 100644
index 0000000..7ec1edc
--- /dev/null
+++ b/buildtools/wafsamba/samba_cross.py
@@ -0,0 +1,175 @@
+# functions for handling cross-compilation
+
+import os, sys, re, shlex
+from waflib import Utils, Logs, Options, Errors, Context
+from waflib.Configure import conf
+from wafsamba import samba_utils
+
+real_Popen = None
+
+ANSWER_UNKNOWN = (254, "")
+ANSWER_NO = (1, "")
+ANSWER_OK = (0, "")
+
+cross_answers_incomplete = False
+
+
+def add_answer(ca_file, msg, answer):
+ '''add an answer to a set of cross answers'''
+ try:
+ f = open(ca_file, 'a')
+ except:
+ Logs.error("Unable to open cross-answers file %s" % ca_file)
+ sys.exit(1)
+ (retcode, retstring) = answer
+ # if retstring is more than one line then we probably
+ # don't care about its actual content (the tests should
+ # yield one-line output in order to comply with the cross-answer
+ # format)
+ retstring = retstring.strip()
+ if len(retstring.split('\n')) > 1:
+ retstring = ''
+ answer = (retcode, retstring)
+
+ if answer == ANSWER_OK:
+ f.write('%s: OK\n' % msg)
+ elif answer == ANSWER_UNKNOWN:
+ f.write('%s: UNKNOWN\n' % msg)
+ elif answer == ANSWER_NO:
+ f.write('%s: NO\n' % msg)
+ else:
+ if retcode == 0:
+ f.write('%s: "%s"\n' % (msg, retstring))
+ else:
+ f.write('%s: (%d, "%s")\n' % (msg, retcode, retstring))
+ f.close()
+
+
+def cross_answer(ca_file, msg):
+ '''return a (retcode,retstring) tuple from a answers file'''
+ try:
+ f = open(ca_file, 'r')
+ except:
+ return ANSWER_UNKNOWN
+ for line in f:
+ line = line.strip()
+ if line == '' or line[0] == '#':
+ continue
+ if line.find(':') != -1:
+ a = line.split(':', 1)
+ thismsg = a[0].strip()
+ if thismsg != msg:
+ continue
+ ans = a[1].strip()
+ if ans == "OK" or ans == "YES":
+ f.close()
+ return ANSWER_OK
+ elif ans == "UNKNOWN":
+ f.close()
+ return ANSWER_UNKNOWN
+ elif ans == "FAIL" or ans == "NO":
+ f.close()
+ return ANSWER_NO
+ elif ans[0] == '"':
+ f.close()
+ return (0, ans.strip('"'))
+ elif ans[0] == "'":
+ f.close()
+ return (0, ans.strip("'"))
+ else:
+ m = re.match(r'\(\s*(-?\d+)\s*,\s*\"(.*)\"\s*\)', ans)
+ if m:
+ f.close()
+ return (int(m.group(1)), m.group(2))
+ else:
+ raise Errors.WafError("Bad answer format '%s' in %s" % (line, ca_file))
+ f.close()
+ return ANSWER_UNKNOWN
+
+
+class cross_Popen(Utils.subprocess.Popen):
+ '''cross-compilation wrapper for Popen'''
+ def __init__(*k, **kw):
+ (obj, args) = k
+ use_answers = False
+ ans = ANSWER_UNKNOWN
+
+ # Three possibilities:
+ # 1. Only cross-answers - try the cross-answers file, and if
+ # there's no corresponding answer, add to the file and mark
+ # the configure process as unfinished.
+ # 2. Only cross-execute - get the answer from cross-execute
+ # 3. Both - try the cross-answers file, and if there is no
+ # corresponding answer - use cross-execute to get an answer,
+ # and add that answer to the file.
+ if '--cross-answers' in args:
+ # when --cross-answers is set, then change the arguments
+ # to use the cross answers if available
+ use_answers = True
+ i = args.index('--cross-answers')
+ ca_file = args[i+1]
+ msg = args[i+2]
+ ans = cross_answer(ca_file, msg)
+
+ if '--cross-execute' in args and ans == ANSWER_UNKNOWN:
+ # when --cross-execute is set, then change the arguments
+ # to use the cross emulator
+ i = args.index('--cross-execute')
+ newargs = shlex.split(args[i+1])
+ newargs.extend(args[0:i])
+ if use_answers:
+ p = real_Popen(newargs,
+ stdout=Utils.subprocess.PIPE,
+ stderr=Utils.subprocess.PIPE,
+ env=kw.get('env', {}))
+ ce_out, ce_err = p.communicate()
+ ans = (p.returncode, samba_utils.get_string(ce_out))
+ add_answer(ca_file, msg, ans)
+ else:
+ args = newargs
+
+ if use_answers:
+ if ans == ANSWER_UNKNOWN:
+ global cross_answers_incomplete
+ cross_answers_incomplete = True
+ add_answer(ca_file, msg, ans)
+ (retcode, retstring) = ans
+ args = ['/bin/sh', '-c', "printf %%s '%s'; exit %d" % (retstring, retcode)]
+ real_Popen.__init__(*(obj, args), **kw)
+
+
+@conf
+def SAMBA_CROSS_ARGS(conf, msg=None):
+ '''get test_args to pass when running cross compiled binaries'''
+ if not conf.env.CROSS_COMPILE:
+ return []
+
+ global real_Popen
+ if real_Popen is None:
+ real_Popen = Utils.subprocess.Popen
+ Utils.subprocess.Popen = cross_Popen
+ Utils.run_process = Utils.run_regular_process
+ Utils.get_process = Utils.alloc_process_pool = Utils.nada
+
+ ret = []
+
+ if conf.env.CROSS_EXECUTE:
+ ret.extend(['--cross-execute', conf.env.CROSS_EXECUTE])
+
+ if conf.env.CROSS_ANSWERS:
+ if msg is None:
+ raise Errors.WafError("Cannot have NULL msg in cross-answers")
+ ret.extend(['--cross-answers', os.path.join(Context.launch_dir, conf.env.CROSS_ANSWERS), msg])
+
+ if ret == []:
+ raise Errors.WafError("Cannot cross-compile without either --cross-execute or --cross-answers")
+
+ return ret
+
+@conf
+def SAMBA_CROSS_CHECK_COMPLETE(conf):
+ '''check if we have some unanswered questions'''
+ global cross_answers_incomplete
+ if conf.env.CROSS_COMPILE and cross_answers_incomplete:
+ raise Errors.WafError("Cross answers file %s is incomplete" % conf.env.CROSS_ANSWERS)
+ return True
diff --git a/buildtools/wafsamba/samba_deps.py b/buildtools/wafsamba/samba_deps.py
new file mode 100644
index 0000000..80379d3
--- /dev/null
+++ b/buildtools/wafsamba/samba_deps.py
@@ -0,0 +1,1314 @@
+# Samba automatic dependency handling and project rules
+
+import os, sys, re
+
+from waflib import Build, Options, Logs, Utils, Errors, Task
+from waflib.Logs import debug
+from waflib.Configure import conf
+from waflib import ConfigSet
+
+from samba_utils import LOCAL_CACHE, TO_LIST, get_tgt_list, unique_list
+from samba_autoconf import library_flags
+
+@conf
+def ADD_GLOBAL_DEPENDENCY(ctx, dep):
+ '''add a dependency for all binaries and libraries'''
+ if not 'GLOBAL_DEPENDENCIES' in ctx.env:
+ ctx.env.GLOBAL_DEPENDENCIES = []
+ ctx.env.GLOBAL_DEPENDENCIES.append(dep)
+
+
+@conf
+def BREAK_CIRCULAR_LIBRARY_DEPENDENCIES(ctx):
+ '''indicate that circular dependencies between libraries should be broken.'''
+ ctx.env.ALLOW_CIRCULAR_LIB_DEPENDENCIES = True
+
+
+@conf
+def SET_SYSLIB_DEPS(conf, target, deps):
+ '''setup some implied dependencies for a SYSLIB'''
+ cache = LOCAL_CACHE(conf, 'SYSLIB_DEPS')
+ cache[target] = deps
+
+
+def expand_subsystem_deps(bld):
+ '''expand the reverse dependencies resulting from subsystem
+ attributes of modules. This is walking over the complete list
+ of declared subsystems, and expands the samba_deps_extended list for any
+ module<->subsystem dependencies'''
+
+ subsystem_list = LOCAL_CACHE(bld, 'INIT_FUNCTIONS')
+ targets = LOCAL_CACHE(bld, 'TARGET_TYPE')
+
+ for subsystem_name in subsystem_list:
+ bld.ASSERT(subsystem_name in targets, "Subsystem target %s not declared" % subsystem_name)
+ type = targets[subsystem_name]
+ if type == 'DISABLED' or type == 'EMPTY':
+ continue
+
+ # for example,
+ # subsystem_name = dcerpc_server (a subsystem)
+ # subsystem = dcerpc_server (a subsystem object)
+ # module_name = rpc_epmapper (a module within the dcerpc_server subsystem)
+ # module = rpc_epmapper (a module object within the dcerpc_server subsystem)
+
+ subsystem = bld.get_tgen_by_name(subsystem_name)
+ bld.ASSERT(subsystem is not None, "Unable to find subsystem %s" % subsystem_name)
+ for d in subsystem_list[subsystem_name]:
+ module_name = d['TARGET']
+ module_type = targets[module_name]
+ if module_type in ['DISABLED', 'EMPTY']:
+ continue
+ bld.ASSERT(subsystem is not None,
+ "Subsystem target %s for %s (%s) not found" % (subsystem_name, module_name, module_type))
+ if module_type in ['SUBSYSTEM']:
+ # if a module is a plain object type (not a library) then the
+ # subsystem it is part of needs to have it as a dependency, so targets
+ # that depend on this subsystem get the modules of that subsystem
+ subsystem.samba_deps_extended.append(module_name)
+ subsystem.samba_deps_extended = unique_list(subsystem.samba_deps_extended)
+
+
+
+def build_dependencies(self):
+ '''This builds the dependency list for a target. It runs after all the targets are declared
+
+ The reason this is not just done in the SAMBA_*() rules is that we have no way of knowing
+ the full dependency list for a target until we have all of the targets declared.
+ '''
+
+ if self.samba_type in ['LIBRARY', 'PLUGIN', 'BINARY', 'PYTHON']:
+ self.uselib = list(self.final_syslibs)
+ self.uselib_local = list(self.final_libs)
+ self.add_objects = list(self.final_objects)
+
+ # extra link flags from pkg_config
+ libs = self.final_syslibs.copy()
+
+ (cflags, ldflags, cpppath) = library_flags(self, list(libs))
+ new_ldflags = getattr(self, 'samba_ldflags', [])[:]
+ new_ldflags.extend(ldflags)
+ self.ldflags = new_ldflags
+
+ if getattr(self, 'allow_undefined_symbols', False) and self.env.undefined_ldflags:
+ for f in self.env.undefined_ldflags:
+ self.ldflags.remove(f)
+
+ if getattr(self, 'allow_undefined_symbols', False) and self.env.undefined_ignore_ldflags:
+ for f in self.env.undefined_ignore_ldflags:
+ self.ldflags.append(f)
+
+ debug('deps: computed dependencies for target %s: uselib=%s uselib_local=%s add_objects=%s',
+ self.sname, self.uselib, self.uselib_local, self.add_objects)
+
+ if self.samba_type in ['SUBSYSTEM', 'BUILTIN']:
+ # this is needed for the cflags of libs that come from pkg_config
+ self.uselib = list(self.final_syslibs)
+ self.uselib.extend(list(self.direct_syslibs))
+ for lib in self.final_libs:
+ t = self.bld.get_tgen_by_name(lib)
+ self.uselib.extend(list(t.final_syslibs))
+ self.uselib = unique_list(self.uselib)
+
+ if getattr(self, 'uselib', None):
+ up_list = []
+ for l in self.uselib:
+ up_list.append(l.upper())
+ self.uselib = up_list
+
+
+def build_includes(self):
+ '''This builds the right set of includes for a target.
+
+ One tricky part of this is that the includes= attribute for a
+ target needs to use paths which are relative to that targets
+ declaration directory (which we can get at via t.path).
+
+ The way this works is the includes list gets added as
+ samba_includes in the main build task declaration. Then this
+ function runs after all of the tasks are declared, and it
+ processes the samba_includes attribute to produce a includes=
+ attribute
+ '''
+
+ if getattr(self, 'samba_includes', None) is None:
+ return
+
+ bld = self.bld
+
+ inc_deps = includes_objects(bld, self, set(), {})
+
+ includes = []
+
+ # maybe add local includes
+ if getattr(self, 'local_include', True) and getattr(self, 'local_include_first', True):
+ includes.append('.')
+
+ includes.extend(self.samba_includes_extended)
+
+ if 'EXTRA_INCLUDES' in bld.env and getattr(self, 'global_include', True):
+ includes.extend(bld.env['EXTRA_INCLUDES'])
+
+ includes.append('#')
+
+ inc_set = set()
+ inc_abs = []
+
+ for d in inc_deps:
+ t = bld.get_tgen_by_name(d)
+ bld.ASSERT(t is not None, "Unable to find dependency %s for %s" % (d, self.sname))
+ inclist = getattr(t, 'samba_includes_extended', [])[:]
+ if getattr(t, 'local_include', True):
+ inclist.append('.')
+ if inclist == []:
+ continue
+ tpath = t.samba_abspath
+ for inc in inclist:
+ npath = tpath + '/' + inc
+ if not npath in inc_set:
+ inc_abs.append(npath)
+ inc_set.add(npath)
+
+ mypath = self.path.abspath(bld.env)
+ for inc in inc_abs:
+ relpath = os.path.relpath(inc, mypath)
+ includes.append(relpath)
+
+ if getattr(self, 'local_include', True) and not getattr(self, 'local_include_first', True):
+ includes.append('.')
+
+ # now transform the includes list to be relative to the top directory
+ # which is represented by '#' in waf. This allows waf to cache the
+ # includes lists more efficiently
+ includes_top = []
+ for i in includes:
+ if i[0] == '#':
+ # some are already top based
+ includes_top.append(i)
+ continue
+ absinc = os.path.join(self.path.abspath(), i)
+ relinc = os.path.relpath(absinc, self.bld.srcnode.abspath())
+ includes_top.append('#' + relinc)
+
+ self.includes = unique_list(includes_top)
+ debug('deps: includes for target %s: includes=%s',
+ self.sname, self.includes)
+
+
+def add_init_functions(self):
+ '''This builds the right set of init functions'''
+
+ bld = self.bld
+
+ subsystems = LOCAL_CACHE(bld, 'INIT_FUNCTIONS')
+
+ # cope with the separated object lists from BINARY and LIBRARY targets
+ sname = self.sname
+ if sname.endswith('.objlist'):
+ sname = sname[0:-8]
+
+ modules = []
+ if sname in subsystems:
+ modules.append(sname)
+
+ m = getattr(self, 'samba_modules', None)
+ if m is not None:
+ modules.extend(TO_LIST(m))
+
+ m = getattr(self, 'samba_subsystem', None)
+ if m is not None:
+ modules.append(m)
+
+ if 'pyembed' in self.features:
+ return
+
+ sentinel = getattr(self, 'init_function_sentinel', 'NULL')
+
+ targets = LOCAL_CACHE(bld, 'TARGET_TYPE')
+ cflags = getattr(self, 'samba_cflags', [])[:]
+
+ if modules == []:
+ sname = sname.replace('-','_')
+ sname = sname.replace('.','_')
+ sname = sname.replace('/','_')
+ cflags.append('-DSTATIC_%s_MODULES=%s' % (sname, sentinel))
+ if sentinel == 'NULL':
+ proto = "extern void __%s_dummy_module_proto(void)" % (sname)
+ cflags.append('-DSTATIC_%s_MODULES_PROTO=%s' % (sname, proto))
+ self.cflags = cflags
+ return
+
+ for m in modules:
+ bld.ASSERT(m in subsystems,
+ "No init_function defined for module '%s' in target '%s'" % (m, self.sname))
+ init_fn_list = []
+ for d in subsystems[m]:
+ if targets[d['TARGET']] != 'DISABLED':
+ init_fn_list.append(d['INIT_FUNCTION'])
+ if init_fn_list == []:
+ cflags.append('-DSTATIC_%s_MODULES=%s' % (m, sentinel))
+ if sentinel == 'NULL':
+ proto = "extern void __%s_dummy_module_proto(void)" % (m)
+ cflags.append('-DSTATIC_%s_MODULES_PROTO=%s' % (m, proto))
+ else:
+ cflags.append('-DSTATIC_%s_MODULES=%s' % (m, ','.join(init_fn_list) + ',' + sentinel))
+ proto = "".join('_MODULE_PROTO(%s)' % f for f in init_fn_list) +\
+ "extern void __%s_dummy_module_proto(void)" % (m)
+ cflags.append('-DSTATIC_%s_MODULES_PROTO=%s' % (m, proto))
+ self.cflags = cflags
+
+
+def check_duplicate_sources(bld, tgt_list):
+ '''see if we are compiling the same source file more than once'''
+
+ debug('deps: checking for duplicate sources')
+ targets = LOCAL_CACHE(bld, 'TARGET_TYPE')
+
+ for t in tgt_list:
+ source_list = TO_LIST(getattr(t, 'source', ''))
+ tpath = os.path.normpath(os.path.relpath(t.path.abspath(bld.env), t.env.BUILD_DIRECTORY + '/default'))
+ obj_sources = set()
+ for s in source_list:
+ if not isinstance(s, str):
+ print('strange path in check_duplicate_sources %r' % s)
+ s = s.abspath()
+ p = os.path.normpath(os.path.join(tpath, s))
+ if p in obj_sources:
+ Logs.error("ERROR: source %s appears twice in target '%s'" % (p, t.sname))
+ sys.exit(1)
+ obj_sources.add(p)
+ t.samba_source_set = obj_sources
+
+ subsystems = {}
+
+ # build a list of targets that each source file is part of
+ for t in tgt_list:
+ if not targets[t.sname] in [ 'LIBRARY', 'PLUGIN', 'BINARY', 'PYTHON' ]:
+ continue
+ for obj in t.add_objects:
+ t2 = t.bld.get_tgen_by_name(obj)
+ source_set = getattr(t2, 'samba_source_set', set())
+ for s in source_set:
+ if not s in subsystems:
+ subsystems[s] = {}
+ if not t.sname in subsystems[s]:
+ subsystems[s][t.sname] = []
+ subsystems[s][t.sname].append(t2.sname)
+
+ for s in subsystems:
+ if len(subsystems[s]) > 1 and Options.options.SHOW_DUPLICATES:
+ Logs.warn("WARNING: source %s is in more than one target: %s" % (s, subsystems[s].keys()))
+ for tname in subsystems[s]:
+ if len(subsystems[s][tname]) > 1:
+ raise Errors.WafError("ERROR: source %s is in more than one subsystem of target '%s': %s" % (s, tname, subsystems[s][tname]))
+
+ return True
+
+def check_group_ordering(bld, tgt_list):
+ '''see if we have any dependencies that violate the group ordering
+
+ It is an error for a target to depend on a target from a later
+ build group
+ '''
+
+ def group_name(g):
+ tm = bld.task_manager
+ return [x for x in tm.groups_names if id(tm.groups_names[x]) == id(g)][0]
+
+ for g in bld.task_manager.groups:
+ gname = group_name(g)
+ for t in g.tasks_gen:
+ t.samba_group = gname
+
+ grp_map = {}
+ idx = 0
+ for g in bld.task_manager.groups:
+ name = group_name(g)
+ grp_map[name] = idx
+ idx += 1
+
+ targets = LOCAL_CACHE(bld, 'TARGET_TYPE')
+
+ ret = True
+ for t in tgt_list:
+ tdeps = getattr(t, 'add_objects', []) + getattr(t, 'uselib_local', [])
+ for d in tdeps:
+ t2 = bld.get_tgen_by_name(d)
+ if t2 is None:
+ continue
+ map1 = grp_map[t.samba_group]
+ map2 = grp_map[t2.samba_group]
+
+ if map2 > map1:
+ Logs.error("Target %r in build group %r depends on target %r from later build group %r" % (
+ t.sname, t.samba_group, t2.sname, t2.samba_group))
+ ret = False
+
+ return ret
+Build.BuildContext.check_group_ordering = check_group_ordering
+
+def show_final_deps(bld, tgt_list):
+ '''show the final dependencies for all targets'''
+
+ targets = LOCAL_CACHE(bld, 'TARGET_TYPE')
+
+ for t in tgt_list:
+ if not targets[t.sname] in ['LIBRARY', 'PLUGIN', 'BINARY', 'PYTHON', 'SUBSYSTEM', 'BUILTIN']:
+ continue
+ debug('deps: final dependencies for target %s: uselib=%s uselib_local=%s add_objects=%s',
+ t.sname, t.uselib, getattr(t, 'uselib_local', []), getattr(t, 'add_objects', []))
+
+
+def add_samba_attributes(bld, tgt_list):
+ '''ensure a target has the required samba attributes'''
+
+ targets = LOCAL_CACHE(bld, 'TARGET_TYPE')
+
+ for t in tgt_list:
+ if t.name != '':
+ t.sname = t.name
+ else:
+ t.sname = t.target
+ t.samba_type = targets[t.sname]
+ t.samba_abspath = t.path.abspath(bld.env)
+ t.samba_deps_extended = t.samba_deps[:]
+ t.samba_includes_extended = TO_LIST(t.samba_includes)[:]
+ t.cflags = getattr(t, 'samba_cflags', '')
+
+def replace_builtin_subsystem_deps(bld, tgt_list):
+ '''replace dependencies based on builtin subsystems/libraries
+
+ '''
+
+ targets = LOCAL_CACHE(bld, 'TARGET_TYPE')
+
+ # If either the target or the dependency require builtin linking
+ # we should replace the dependency
+ for t in tgt_list:
+ t_require_builtin_deps = getattr(t, 'samba_require_builtin_deps', False)
+ if t_require_builtin_deps:
+ debug("deps: target %s: requires builtin dependencies..." % (t.sname))
+ else:
+ debug("deps: target %s: does not require builtin dependencies..." % (t.sname))
+
+ replacing = {}
+
+ for dep in t.samba_deps_extended:
+ bld.ASSERT(dep in targets, "target %s: dependency target %s not declared" % (t.sname, dep))
+ dtype = targets[dep]
+ bld.ASSERT(dtype != 'BUILTIN', "target %s: dependency target %s is BUILTIN" % (t.sname, dep))
+ bld.ASSERT(dtype != 'PLUGIN', "target %s: dependency target %s is PLUGIN" % (t.sname, dep))
+ if dtype not in ['SUBSYSTEM', 'LIBRARY']:
+ debug("deps: target %s: keep %s dependency %s" % (t.sname, dtype, dep))
+ continue
+ dt = bld.get_tgen_by_name(dep)
+ bld.ASSERT(dt is not None, "target %s: dependency target %s not found by name" % (t.sname, dep))
+ dt_require_builtin_deps = getattr(dt, 'samba_require_builtin_deps', False)
+ if not dt_require_builtin_deps and not t_require_builtin_deps:
+ # both target and dependency don't require builtin linking
+ continue
+ sdt = getattr(dt, 'samba_builtin_subsystem', None)
+ if not t_require_builtin_deps:
+ if sdt is None:
+ debug("deps: target %s: dependency %s requires builtin deps only" % (t.sname, dep))
+ continue
+ debug("deps: target %s: dependency %s requires builtin linking" % (t.sname, dep))
+ bld.ASSERT(sdt is not None, "target %s: dependency target %s is missing samba_builtin_subsystem" % (t.sname, dep))
+ sdep = sdt.sname
+ bld.ASSERT(sdep in targets, "target %s: builtin dependency target %s (from %s) not declared" % (t.sname, sdep, dep))
+ sdt = targets[sdep]
+ bld.ASSERT(sdt == 'BUILTIN', "target %s: builtin dependency target %s (from %s) is not BUILTIN" % (t.sname, sdep, dep))
+ replacing[dep] = sdep
+
+ for i in range(len(t.samba_deps_extended)):
+ dep = t.samba_deps_extended[i]
+ if dep in replacing:
+ sdep = replacing[dep]
+ debug("deps: target %s: replacing dependency %s with builtin subsystem %s" % (t.sname, dep, sdep))
+ t.samba_deps_extended[i] = sdep
+
+def replace_grouping_libraries(bld, tgt_list):
+ '''replace dependencies based on grouping libraries
+
+ If a library is marked as a grouping library, then any target that
+ depends on a subsystem that is part of that grouping library gets
+ that dependency replaced with a dependency on the grouping library
+ '''
+
+ targets = LOCAL_CACHE(bld, 'TARGET_TYPE')
+
+ grouping = {}
+
+ # find our list of grouping libraries, mapped from the subsystems they depend on
+ for t in tgt_list:
+ if not getattr(t, 'grouping_library', False):
+ continue
+ for dep in t.samba_deps_extended:
+ bld.ASSERT(dep in targets, "grouping library target %s not declared in %s" % (dep, t.sname))
+ if targets[dep] == 'SUBSYSTEM':
+ grouping[dep] = t.sname
+
+ # now replace any dependencies on elements of grouping libraries
+ for t in tgt_list:
+ for i in range(len(t.samba_deps_extended)):
+ dep = t.samba_deps_extended[i]
+ if dep in grouping:
+ if t.sname != grouping[dep]:
+ debug("deps: target %s: replacing dependency %s with grouping library %s" % (t.sname, dep, grouping[dep]))
+ t.samba_deps_extended[i] = grouping[dep]
+
+
+
+def build_direct_deps(bld, tgt_list):
+ '''build the direct_objects and direct_libs sets for each target'''
+
+ targets = LOCAL_CACHE(bld, 'TARGET_TYPE')
+ syslib_deps = LOCAL_CACHE(bld, 'SYSLIB_DEPS')
+
+ global_deps = bld.env.GLOBAL_DEPENDENCIES
+ global_deps_exclude = set()
+ for dep in global_deps:
+ t = bld.get_tgen_by_name(dep)
+ for d in t.samba_deps:
+ # prevent loops from the global dependencies list
+ global_deps_exclude.add(d)
+ global_deps_exclude.add(d + '.objlist')
+
+ for t in tgt_list:
+ t.direct_objects = set()
+ t.direct_libs = set()
+ t.direct_syslibs = set()
+ deps = t.samba_deps_extended[:]
+ if getattr(t, 'samba_use_global_deps', False) and not t.sname in global_deps_exclude:
+ deps.extend(global_deps)
+ for d in deps:
+ if d == t.sname: continue
+ if not d in targets:
+ Logs.error("Unknown dependency '%s' in '%s'" % (d, t.sname))
+ sys.exit(1)
+ if targets[d] in [ 'EMPTY', 'DISABLED' ]:
+ continue
+ if targets[d] == 'PYTHON' and targets[t.sname] != 'PYTHON' and t.sname.find('.objlist') == -1:
+ # this check should be more restrictive, but for now we have pidl-generated python
+ # code that directly depends on other python modules
+ Logs.error('ERROR: Target %s has dependency on python module %s' % (t.sname, d))
+ sys.exit(1)
+ if targets[d] == 'SYSLIB':
+ t.direct_syslibs.add(d)
+ if d in syslib_deps:
+ for implied in TO_LIST(syslib_deps[d]):
+ if targets[implied] == 'SUBSYSTEM':
+ it = bld.get_tgen_by_name(implied)
+ sit = getattr(it, 'samba_builtin_subsystem', None)
+ if sit:
+ implied = sit.sname
+ if targets[implied] == 'BUILTIN':
+ t.direct_objects.add(implied)
+ elif targets[implied] == 'SYSLIB':
+ t.direct_syslibs.add(implied)
+ elif targets[implied] in ['LIBRARY', 'MODULE']:
+ t.direct_libs.add(implied)
+ else:
+ Logs.error('Implied dependency %s in %s is of type %s' % (
+ implied, t.sname, targets[implied]))
+ sys.exit(1)
+ continue
+ t2 = bld.get_tgen_by_name(d)
+ if t2 is None:
+ Logs.error("no task %s of type %s in %s" % (d, targets[d], t.sname))
+ sys.exit(1)
+ if t2.samba_type in [ 'LIBRARY', 'MODULE' ]:
+ t.direct_libs.add(d)
+ elif t2.samba_type in [ 'SUBSYSTEM', 'BUILTIN', 'ASN1', 'PYTHON' ]:
+ t.direct_objects.add(d)
+ elif t2.samba_type in [ 'PLUGIN' ]:
+ Logs.error('Implicit dependency %s in %s is of type %s' % (
+ d, t.sname, t2.samba_type))
+ sys.exit(1)
+
+ debug('deps: built direct dependencies')
+
+
+def dependency_loop(loops, t, target):
+ '''add a dependency loop to the loops dictionary'''
+ if t.sname == target:
+ return
+ if not target in loops:
+ loops[target] = set()
+ if not t.sname in loops[target]:
+ loops[target].add(t.sname)
+
+
+def indirect_libs(bld, t, chain, loops):
+ '''recursively calculate the indirect library dependencies for a target
+
+ An indirect library is a library that results from a dependency on
+ a subsystem
+ '''
+
+ ret = getattr(t, 'indirect_libs', None)
+ if ret is not None:
+ return ret
+
+ ret = set()
+ for obj in t.direct_objects:
+ if obj in chain:
+ dependency_loop(loops, t, obj)
+ continue
+ chain.add(obj)
+ t2 = bld.get_tgen_by_name(obj)
+ r2 = indirect_libs(bld, t2, chain, loops)
+ chain.remove(obj)
+ ret = ret.union(t2.direct_libs)
+ ret = ret.union(r2)
+
+ for obj in indirect_objects(bld, t, set(), loops):
+ if obj in chain:
+ dependency_loop(loops, t, obj)
+ continue
+ chain.add(obj)
+ t2 = bld.get_tgen_by_name(obj)
+ r2 = indirect_libs(bld, t2, chain, loops)
+ chain.remove(obj)
+ ret = ret.union(t2.direct_libs)
+ ret = ret.union(r2)
+
+ t.indirect_libs = ret
+
+ return ret
+
+
+def indirect_objects(bld, t, chain, loops):
+ '''recursively calculate the indirect object dependencies for a target
+
+ indirect objects are the set of objects from expanding the
+ subsystem dependencies
+ '''
+
+ ret = getattr(t, 'indirect_objects', None)
+ if ret is not None: return ret
+
+ ret = set()
+ for lib in t.direct_objects:
+ if lib in chain:
+ dependency_loop(loops, t, lib)
+ continue
+ chain.add(lib)
+ t2 = bld.get_tgen_by_name(lib)
+ r2 = indirect_objects(bld, t2, chain, loops)
+ chain.remove(lib)
+ ret = ret.union(t2.direct_objects)
+ ret = ret.union(r2)
+
+ t.indirect_objects = ret
+ return ret
+
+
+def extended_objects(bld, t, chain):
+ '''recursively calculate the extended object dependencies for a target
+
+ extended objects are the union of:
+ - direct objects
+ - indirect objects
+ - direct and indirect objects of all direct and indirect libraries
+ '''
+
+ ret = getattr(t, 'extended_objects', None)
+ if ret is not None: return ret
+
+ ret = set()
+ ret = ret.union(t.final_objects)
+
+ for lib in t.final_libs:
+ if lib in chain:
+ continue
+ t2 = bld.get_tgen_by_name(lib)
+ chain.add(lib)
+ r2 = extended_objects(bld, t2, chain)
+ chain.remove(lib)
+ ret = ret.union(t2.final_objects)
+ ret = ret.union(r2)
+
+ t.extended_objects = ret
+ return ret
+
+
+def includes_objects(bld, t, chain, inc_loops):
+ '''recursively calculate the includes object dependencies for a target
+
+ includes dependencies come from either library or object dependencies
+ '''
+ ret = getattr(t, 'includes_objects', None)
+ if ret is not None:
+ return ret
+
+ ret = t.direct_objects.copy()
+ ret = ret.union(t.direct_libs)
+
+ for obj in t.direct_objects:
+ if obj in chain:
+ dependency_loop(inc_loops, t, obj)
+ continue
+ chain.add(obj)
+ t2 = bld.get_tgen_by_name(obj)
+ r2 = includes_objects(bld, t2, chain, inc_loops)
+ chain.remove(obj)
+ ret = ret.union(t2.direct_objects)
+ ret = ret.union(r2)
+
+ for lib in t.direct_libs:
+ if lib in chain:
+ dependency_loop(inc_loops, t, lib)
+ continue
+ chain.add(lib)
+ t2 = bld.get_tgen_by_name(lib)
+ if t2 is None:
+ targets = LOCAL_CACHE(bld, 'TARGET_TYPE')
+ Logs.error('Target %s of type %s not found in direct_libs for %s' % (
+ lib, targets[lib], t.sname))
+ sys.exit(1)
+ r2 = includes_objects(bld, t2, chain, inc_loops)
+ chain.remove(lib)
+ ret = ret.union(t2.direct_objects)
+ ret = ret.union(r2)
+
+ t.includes_objects = ret
+ return ret
+
+
+def break_dependency_loops(bld, tgt_list):
+ '''find and break dependency loops'''
+ loops = {}
+ inc_loops = {}
+
+ # build up the list of loops
+ for t in tgt_list:
+ indirect_objects(bld, t, set(), loops)
+ indirect_libs(bld, t, set(), loops)
+ includes_objects(bld, t, set(), inc_loops)
+
+ # break the loops
+ for t in tgt_list:
+ if t.sname in loops:
+ for attr in ['direct_objects', 'indirect_objects', 'direct_libs', 'indirect_libs']:
+ objs = getattr(t, attr, set())
+ setattr(t, attr, objs.difference(loops[t.sname]))
+
+ for loop in loops:
+ debug('deps: Found dependency loops for target %s : %s', loop, loops[loop])
+
+ for loop in inc_loops:
+ debug('deps: Found include loops for target %s : %s', loop, inc_loops[loop])
+
+ # expand the loops mapping by one level
+ for loop in loops.copy():
+ for tgt in loops[loop]:
+ if tgt in loops:
+ loops[loop] = loops[loop].union(loops[tgt])
+
+ for loop in inc_loops.copy():
+ for tgt in inc_loops[loop]:
+ if tgt in inc_loops:
+ inc_loops[loop] = inc_loops[loop].union(inc_loops[tgt])
+
+
+ # expand indirect subsystem and library loops
+ for loop in loops.copy():
+ t = bld.get_tgen_by_name(loop)
+ if t.samba_type in ['SUBSYSTEM', 'BUILTIN']:
+ loops[loop] = loops[loop].union(t.indirect_objects)
+ loops[loop] = loops[loop].union(t.direct_objects)
+ if t.samba_type in ['LIBRARY', 'PLUGIN', 'PYTHON']:
+ loops[loop] = loops[loop].union(t.indirect_libs)
+ loops[loop] = loops[loop].union(t.direct_libs)
+ if loop in loops[loop]:
+ loops[loop].remove(loop)
+
+ # expand indirect includes loops
+ for loop in inc_loops.copy():
+ t = bld.get_tgen_by_name(loop)
+ inc_loops[loop] = inc_loops[loop].union(t.includes_objects)
+ if loop in inc_loops[loop]:
+ inc_loops[loop].remove(loop)
+
+ # add in the replacement dependencies
+ for t in tgt_list:
+ for loop in loops:
+ for attr in ['indirect_objects', 'indirect_libs']:
+ objs = getattr(t, attr, set())
+ if loop in objs:
+ diff = loops[loop].difference(objs)
+ if t.sname in diff:
+ diff.remove(t.sname)
+ if diff:
+ debug('deps: Expanded target %s of type %s from loop %s by %s', t.sname, t.samba_type, loop, diff)
+ objs = objs.union(diff)
+ setattr(t, attr, objs)
+
+ for loop in inc_loops:
+ objs = getattr(t, 'includes_objects', set())
+ if loop in objs:
+ diff = inc_loops[loop].difference(objs)
+ if t.sname in diff:
+ diff.remove(t.sname)
+ if diff:
+ debug('deps: Expanded target %s includes of type %s from loop %s by %s', t.sname, t.samba_type, loop, diff)
+ objs = objs.union(diff)
+ setattr(t, 'includes_objects', objs)
+
+
+def reduce_objects(bld, tgt_list):
+ '''reduce objects by looking for indirect object dependencies'''
+ targets = LOCAL_CACHE(bld, 'TARGET_TYPE')
+
+ rely_on = {}
+
+ for t in tgt_list:
+ t.extended_objects = None
+
+ changed = False
+
+ for type in ['BINARY', 'PYTHON', 'LIBRARY', 'PLUGIN']:
+ for t in tgt_list:
+ if t.samba_type != type: continue
+ # if we will indirectly link to a target then we don't need it
+ new = t.final_objects.copy()
+ for l in t.final_libs:
+ t2 = bld.get_tgen_by_name(l)
+ t2_obj = extended_objects(bld, t2, set())
+ dup = new.intersection(t2_obj)
+ if t.sname in rely_on:
+ dup = dup.difference(rely_on[t.sname])
+ if dup:
+ # Do not remove duplicates of BUILTINS
+ for d in iter(dup.copy()):
+ dtype = targets[d]
+ if dtype == 'BUILTIN':
+ debug('deps: BUILTIN SKIP: removing dups from %s of type %s: %s also in %s %s',
+ t.sname, t.samba_type, d, t2.samba_type, l)
+ dup.remove(d)
+ if len(dup) == 0:
+ continue
+
+ debug('deps: removing dups from %s of type %s: %s also in %s %s',
+ t.sname, t.samba_type, dup, t2.samba_type, l)
+ new = new.difference(dup)
+ changed = True
+ if not l in rely_on:
+ rely_on[l] = set()
+ rely_on[l] = rely_on[l].union(dup)
+ for n in iter(new.copy()):
+ # if we got the builtin version as well
+ # as the native one, we keep using the
+ # builtin one and remove the rest.
+ # Otherwise our check_duplicate_sources()
+ # checks would trigger!
+ if n.endswith('.builtin.objlist'):
+ unused = n.replace('.builtin.objlist', '.objlist')
+ if unused in new:
+ new.remove(unused)
+ unused = n.replace('.builtin.objlist', '')
+ if unused in new:
+ new.remove(unused)
+ t.final_objects = new
+
+ if not changed:
+ return False
+
+ # add back in any objects that were relied upon by the reduction rules
+ for r in rely_on:
+ t = bld.get_tgen_by_name(r)
+ t.final_objects = t.final_objects.union(rely_on[r])
+
+ return True
+
+
+def show_library_loop(bld, lib1, lib2, path, seen):
+ '''show the detailed path of a library loop between lib1 and lib2'''
+
+ t = bld.get_tgen_by_name(lib1)
+ if not lib2 in getattr(t, 'final_libs', set()):
+ return
+
+ for d in t.samba_deps_extended:
+ if d in seen:
+ continue
+ seen.add(d)
+ path2 = path + '=>' + d
+ if d == lib2:
+ Logs.warn('library loop path: ' + path2)
+ return
+ show_library_loop(bld, d, lib2, path2, seen)
+ seen.remove(d)
+
+
+def calculate_final_deps(bld, tgt_list, loops):
+ '''calculate the final library and object dependencies'''
+ for t in tgt_list:
+ # start with the maximum possible list
+ t.final_libs = t.direct_libs.union(indirect_libs(bld, t, set(), loops))
+ t.final_objects = t.direct_objects.union(indirect_objects(bld, t, set(), loops))
+
+ for t in tgt_list:
+ # don't depend on ourselves
+ if t.sname in t.final_libs:
+ t.final_libs.remove(t.sname)
+ if t.sname in t.final_objects:
+ t.final_objects.remove(t.sname)
+
+ # handle any non-shared binaries
+ for t in tgt_list:
+ if t.samba_type == 'BINARY' and bld.NONSHARED_BINARY(t.sname):
+ subsystem_list = LOCAL_CACHE(bld, 'INIT_FUNCTIONS')
+ targets = LOCAL_CACHE(bld, 'TARGET_TYPE')
+
+ # replace lib deps with objlist deps
+ for l in t.final_libs:
+ objname = l + '.objlist'
+ t2 = bld.get_tgen_by_name(objname)
+ if t2 is None:
+ Logs.error('ERROR: subsystem %s not found' % objname)
+ sys.exit(1)
+ t.final_objects.add(objname)
+ t.final_objects = t.final_objects.union(extended_objects(bld, t2, set()))
+ if l in subsystem_list:
+ # its a subsystem - we also need the contents of any modules
+ for d in subsystem_list[l]:
+ module_name = d['TARGET']
+ if targets[module_name] == 'LIBRARY':
+ objname = module_name + '.objlist'
+ elif targets[module_name] == 'SUBSYSTEM':
+ objname = module_name
+ else:
+ continue
+ t2 = bld.get_tgen_by_name(objname)
+ if t2 is None:
+ Logs.error('ERROR: subsystem %s not found' % objname)
+ sys.exit(1)
+ t.final_objects.add(objname)
+ t.final_objects = t.final_objects.union(extended_objects(bld, t2, set()))
+ t.final_libs = set()
+
+ # find any library loops
+ for t in tgt_list:
+ if t.samba_type in ['LIBRARY', 'PYTHON']:
+ for l in t.final_libs.copy():
+ t2 = bld.get_tgen_by_name(l)
+ if t.sname in t2.final_libs:
+ if getattr(bld.env, "ALLOW_CIRCULAR_LIB_DEPENDENCIES", False):
+ # we could break this in either direction. If one of the libraries
+ # has a version number, and will this be distributed publicly, then
+ # we should make it the lower level library in the DAG
+ Logs.warn('deps: removing library loop %s from %s' % (t.sname, t2.sname))
+ dependency_loop(loops, t, t2.sname)
+ t2.final_libs.remove(t.sname)
+ else:
+ Logs.error('ERROR: circular library dependency between %s and %s'
+ % (t.sname, t2.sname))
+ show_library_loop(bld, t.sname, t2.sname, t.sname, set())
+ show_library_loop(bld, t2.sname, t.sname, t2.sname, set())
+ sys.exit(1)
+
+ for loop in loops:
+ debug('deps: Found dependency loops for target %s : %s', loop, loops[loop])
+
+ # we now need to make corrections for any library loops we broke up
+ # any target that depended on the target of the loop and doesn't
+ # depend on the source of the loop needs to get the loop source added
+ for type in ['BINARY','PYTHON','LIBRARY','PLUGIN','BINARY']:
+ for t in tgt_list:
+ if t.samba_type != type: continue
+ for loop in loops:
+ if loop in t.final_libs:
+ diff = loops[loop].difference(t.final_libs)
+ if t.sname in diff:
+ diff.remove(t.sname)
+ if t.sname in diff:
+ diff.remove(t.sname)
+ # make sure we don't recreate the loop again!
+ for d in diff.copy():
+ t2 = bld.get_tgen_by_name(d)
+ if t2.samba_type == 'LIBRARY':
+ if t.sname in t2.final_libs:
+ debug('deps: removing expansion %s from %s', d, t.sname)
+ diff.remove(d)
+ if diff:
+ debug('deps: Expanded target %s by loop %s libraries (loop %s) %s', t.sname, loop,
+ loops[loop], diff)
+ t.final_libs = t.final_libs.union(diff)
+
+ # remove objects that are also available in linked libs
+ count = 0
+ while reduce_objects(bld, tgt_list):
+ count += 1
+ if count > 100:
+ Logs.warn("WARNING: Unable to remove all inter-target object duplicates")
+ break
+ debug('deps: Object reduction took %u iterations', count)
+
+ # add in any syslib dependencies
+ for t in tgt_list:
+ if not t.samba_type in ['BINARY','PYTHON','LIBRARY','PLUGIN','SUBSYSTEM','BUILTIN']:
+ continue
+ syslibs = set()
+ for d in t.final_objects:
+ t2 = bld.get_tgen_by_name(d)
+ syslibs = syslibs.union(t2.direct_syslibs)
+ # this adds the indirect syslibs as well, which may not be needed
+ # depending on the linker flags
+ for d in t.final_libs:
+ t2 = bld.get_tgen_by_name(d)
+ syslibs = syslibs.union(t2.direct_syslibs)
+ t.final_syslibs = syslibs
+
+
+ # find any unresolved library loops
+ lib_loop_error = False
+ for t in tgt_list:
+ if t.samba_type in ['LIBRARY', 'PLUGIN', 'PYTHON']:
+ for l in t.final_libs.copy():
+ t2 = bld.get_tgen_by_name(l)
+ if t.sname in t2.final_libs:
+ Logs.error('ERROR: Unresolved library loop %s from %s' % (t.sname, t2.sname))
+ lib_loop_error = True
+ if lib_loop_error:
+ sys.exit(1)
+
+ debug('deps: removed duplicate dependencies')
+
+
+def show_dependencies(bld, target, seen):
+ '''recursively show the dependencies of target'''
+
+ if target in seen:
+ return
+
+ t = bld.get_tgen_by_name(target)
+ if t is None:
+ Logs.error("ERROR: Unable to find target '%s'" % target)
+ sys.exit(1)
+
+ Logs.info('%s(OBJECTS): %s' % (target, t.direct_objects))
+ Logs.info('%s(LIBS): %s' % (target, t.direct_libs))
+ Logs.info('%s(SYSLIBS): %s' % (target, t.direct_syslibs))
+
+ seen.add(target)
+
+ for t2 in t.direct_objects:
+ show_dependencies(bld, t2, seen)
+
+
+def show_object_duplicates(bld, tgt_list):
+ '''show a list of object files that are included in more than
+ one library or binary'''
+
+ targets = LOCAL_CACHE(bld, 'TARGET_TYPE')
+
+ used_by = {}
+
+ Logs.info("showing duplicate objects")
+
+ for t in tgt_list:
+ if not targets[t.sname] in [ 'LIBRARY', 'PYTHON' ]:
+ continue
+ for n in getattr(t, 'final_objects', set()):
+ t2 = bld.get_tgen_by_name(n)
+ if not n in used_by:
+ used_by[n] = set()
+ used_by[n].add(t.sname)
+
+ for n in used_by:
+ if len(used_by[n]) > 1:
+ Logs.info("target '%s' is used by %s" % (n, used_by[n]))
+
+ Logs.info("showing indirect dependency counts (sorted by count)")
+
+ def indirect_count(t):
+ return len(t.indirect_objects)
+
+ sorted_list = sorted(tgt_list, key=indirect_count, reverse=True)
+ for t in sorted_list:
+ if len(t.indirect_objects) > 1:
+ Logs.info("%s depends on %u indirect objects" % (t.sname, len(t.indirect_objects)))
+
+
+######################################################################
+# this provides a way to save our dependency calculations between runs
+savedeps_version = 3
+savedeps_inputs = ['samba_deps', 'samba_includes', 'local_include', 'local_include_first', 'samba_cflags',
+ 'source', 'grouping_library', 'samba_ldflags', 'allow_undefined_symbols',
+ 'use_global_deps', 'global_include' ]
+savedeps_outputs = ['uselib', 'uselib_local', 'add_objects', 'includes',
+ 'cflags', 'ldflags', 'samba_deps_extended', 'final_libs']
+savedeps_outenv = ['INC_PATHS']
+savedeps_envvars = ['NONSHARED_BINARIES', 'GLOBAL_DEPENDENCIES', 'EXTRA_CFLAGS', 'EXTRA_LDFLAGS', 'EXTRA_INCLUDES' ]
+savedeps_caches = ['GLOBAL_DEPENDENCIES', 'TARGET_TYPE', 'INIT_FUNCTIONS', 'SYSLIB_DEPS']
+savedeps_files = ['buildtools/wafsamba/samba_deps.py']
+
+def save_samba_deps(bld, tgt_list):
+ '''save the dependency calculations between builds, to make
+ further builds faster'''
+ denv = ConfigSet.ConfigSet()
+
+ denv.version = savedeps_version
+ denv.savedeps_inputs = savedeps_inputs
+ denv.savedeps_outputs = savedeps_outputs
+ denv.input = {}
+ denv.output = {}
+ denv.outenv = {}
+ denv.caches = {}
+ denv.envvar = {}
+ denv.files = {}
+
+ for f in savedeps_files:
+ denv.files[f] = os.stat(os.path.join(bld.srcnode.abspath(), f)).st_mtime
+
+ for c in savedeps_caches:
+ denv.caches[c] = LOCAL_CACHE(bld, c)
+
+ for e in savedeps_envvars:
+ denv.envvar[e] = bld.env[e]
+
+ for t in tgt_list:
+ # save all the input attributes for each target
+ tdeps = {}
+ for attr in savedeps_inputs:
+ v = getattr(t, attr, None)
+ if v is not None:
+ tdeps[attr] = v
+ if tdeps != {}:
+ denv.input[t.sname] = tdeps
+
+ # save all the output attributes for each target
+ tdeps = {}
+ for attr in savedeps_outputs:
+ v = getattr(t, attr, None)
+ if v is not None:
+ tdeps[attr] = v
+ if tdeps != {}:
+ denv.output[t.sname] = tdeps
+
+ tdeps = {}
+ for attr in savedeps_outenv:
+ if attr in t.env:
+ tdeps[attr] = t.env[attr]
+ if tdeps != {}:
+ denv.outenv[t.sname] = tdeps
+
+ depsfile = os.path.join(bld.cache_dir, "sambadeps")
+ denv.store_fast(depsfile)
+
+
+
+def load_samba_deps(bld, tgt_list):
+ '''load a previous set of build dependencies if possible'''
+ depsfile = os.path.join(bld.cache_dir, "sambadeps")
+ denv = ConfigSet.ConfigSet()
+ try:
+ debug('deps: checking saved dependencies')
+ denv.load_fast(depsfile)
+ if (denv.version != savedeps_version or
+ denv.savedeps_inputs != savedeps_inputs or
+ denv.savedeps_outputs != savedeps_outputs):
+ return False
+ except Exception:
+ return False
+
+ # check if critical files have changed
+ for f in savedeps_files:
+ if f not in denv.files:
+ return False
+ if denv.files[f] != os.stat(os.path.join(bld.srcnode.abspath(), f)).st_mtime:
+ return False
+
+ # check if caches are the same
+ for c in savedeps_caches:
+ if c not in denv.caches or denv.caches[c] != LOCAL_CACHE(bld, c):
+ return False
+
+ # check if caches are the same
+ for e in savedeps_envvars:
+ if e not in denv.envvar or denv.envvar[e] != bld.env[e]:
+ return False
+
+ # check inputs are the same
+ for t in tgt_list:
+ tdeps = {}
+ for attr in savedeps_inputs:
+ v = getattr(t, attr, None)
+ if v is not None:
+ tdeps[attr] = v
+ if t.sname in denv.input:
+ olddeps = denv.input[t.sname]
+ else:
+ olddeps = {}
+ if tdeps != olddeps:
+ #print '%s: \ntdeps=%s \nodeps=%s' % (t.sname, tdeps, olddeps)
+ return False
+
+ # put outputs in place
+ for t in tgt_list:
+ if not t.sname in denv.output: continue
+ tdeps = denv.output[t.sname]
+ for a in tdeps:
+ setattr(t, a, tdeps[a])
+
+ # put output env vars in place
+ for t in tgt_list:
+ if not t.sname in denv.outenv: continue
+ tdeps = denv.outenv[t.sname]
+ for a in tdeps:
+ t.env[a] = tdeps[a]
+
+ debug('deps: loaded saved dependencies')
+ return True
+
+
+def generate_clangdb(bld):
+ classes = []
+ for x in ('c', 'cxx'):
+ cls = Task.classes.get(x)
+ if cls:
+ classes.append(cls)
+ task_classes = tuple(classes)
+
+ tasks = []
+ for g in bld.groups:
+ for tg in g:
+ if isinstance(tg, Task.Task):
+ lst = [tg]
+ else:
+ lst = tg.tasks
+ for task in lst:
+ try:
+ task.last_cmd
+ except AttributeError:
+ continue
+ if isinstance(task, task_classes):
+ tasks.append(task)
+ if len(tasks) == 0:
+ return
+
+ database_file = bld.bldnode.make_node('compile_commands.json')
+ Logs.info('Build commands will be stored in %s',
+ database_file.path_from(bld.path))
+ try:
+ root = database_file.read_json()
+ except IOError:
+ root = []
+ clang_db = dict((x['file'], x) for x in root)
+ for task in tasks:
+ f_node = task.inputs[0]
+ cmd = task.last_cmd
+ filename = f_node.path_from(task.get_cwd())
+ entry = {
+ "directory": task.get_cwd().abspath(),
+ "arguments": cmd,
+ "file": filename,
+ }
+ clang_db[filename] = entry
+ root = list(clang_db.values())
+ database_file.write_json(root)
+
+
+def check_project_rules(bld):
+ '''check the project rules - ensuring the targets are sane'''
+
+ loops = {}
+ inc_loops = {}
+
+ tgt_list = get_tgt_list(bld)
+
+ add_samba_attributes(bld, tgt_list)
+
+ force_project_rules = (Options.options.SHOWDEPS or
+ Options.options.SHOW_DUPLICATES)
+
+ if not force_project_rules and load_samba_deps(bld, tgt_list):
+ return
+
+ timer = Utils.Timer()
+
+ bld.new_rules = True
+ Logs.info("Checking project rules ...")
+
+ debug('deps: project rules checking started')
+
+ replace_builtin_subsystem_deps(bld, tgt_list)
+
+ debug("deps: replace_builtin_subsystem_deps: %s" % str(timer))
+
+ expand_subsystem_deps(bld)
+
+ debug("deps: expand_subsystem_deps: %s" % str(timer))
+
+ replace_grouping_libraries(bld, tgt_list)
+
+ debug("deps: replace_grouping_libraries: %s" % str(timer))
+
+ build_direct_deps(bld, tgt_list)
+
+ debug("deps: build_direct_deps: %s" % str(timer))
+
+ break_dependency_loops(bld, tgt_list)
+
+ debug("deps: break_dependency_loops: %s" % str(timer))
+
+ if Options.options.SHOWDEPS:
+ show_dependencies(bld, Options.options.SHOWDEPS, set())
+
+ calculate_final_deps(bld, tgt_list, loops)
+
+ debug("deps: calculate_final_deps: %s" % str(timer))
+
+ if Options.options.SHOW_DUPLICATES:
+ show_object_duplicates(bld, tgt_list)
+
+ # run the various attribute generators
+ for f in [ build_dependencies, build_includes, add_init_functions ]:
+ debug('deps: project rules checking %s', f)
+ for t in tgt_list: f(t)
+ debug("deps: %s: %s" % (f, str(timer)))
+
+ debug('deps: project rules stage1 completed')
+
+ if not check_duplicate_sources(bld, tgt_list):
+ Logs.error("Duplicate sources present - aborting")
+ sys.exit(1)
+
+ debug("deps: check_duplicate_sources: %s" % str(timer))
+
+ if not bld.check_group_ordering(tgt_list):
+ Logs.error("Bad group ordering - aborting")
+ sys.exit(1)
+
+ debug("deps: check_group_ordering: %s" % str(timer))
+
+ show_final_deps(bld, tgt_list)
+
+ debug("deps: show_final_deps: %s" % str(timer))
+
+ debug('deps: project rules checking completed - %u targets checked',
+ len(tgt_list))
+
+ if not bld.is_install:
+ save_samba_deps(bld, tgt_list)
+
+ debug("deps: save_samba_deps: %s" % str(timer))
+
+ Logs.info("Project rules pass")
+
+ if bld.cmd == 'build':
+ Task.Task.keep_last_cmd = True
+ bld.add_post_fun(generate_clangdb)
+
+
+def CHECK_PROJECT_RULES(bld):
+ '''enable checking of project targets for sanity'''
+ if bld.env.added_project_rules:
+ return
+ bld.env.added_project_rules = True
+ bld.add_pre_fun(check_project_rules)
+Build.BuildContext.CHECK_PROJECT_RULES = CHECK_PROJECT_RULES
+
+
diff --git a/buildtools/wafsamba/samba_dist.py b/buildtools/wafsamba/samba_dist.py
new file mode 100644
index 0000000..0218cad
--- /dev/null
+++ b/buildtools/wafsamba/samba_dist.py
@@ -0,0 +1,280 @@
+# customised version of 'waf dist' for Samba tools
+# uses git ls-files to get file lists
+
+import os, sys, tarfile
+from waflib import Utils, Scripting, Logs, Options
+from waflib.Configure import conf
+from samba_utils import get_string
+from waflib import Context
+
+dist_dirs = None
+dist_files = None
+dist_blacklist = ""
+dist_archive = None
+
+class Dist(Context.Context):
+ # TODO remove
+ cmd = 'dist'
+ fun = 'dist'
+ def execute(self):
+ Context.g_module.dist()
+
+class DistCheck(Scripting.DistCheck):
+ fun = 'distcheck'
+ cmd = 'distcheck'
+ def execute(self):
+ Options.options.distcheck_args = ''
+ if Context.g_module.distcheck is Scripting.distcheck:
+ # default
+ Context.g_module.distcheck(self)
+ else:
+ Context.g_module.distcheck()
+ Context.g_module.dist()
+ self.check()
+ def get_arch_name(self):
+ global dist_archive
+ return dist_archive
+ def make_distcheck_cmd(self, tmpdir):
+ waf = os.path.abspath(sys.argv[0])
+ return [sys.executable, waf, 'configure', 'build', 'install', 'uninstall', '--destdir=' + tmpdir]
+
+def add_symlink(tar, fname, abspath, basedir):
+ '''handle symlinks to directories that may move during packaging'''
+ if not os.path.islink(abspath):
+ return False
+ tinfo = tar.gettarinfo(name=abspath, arcname=fname)
+ tgt = os.readlink(abspath)
+
+ if dist_dirs:
+ # we need to find the target relative to the main directory
+ # this is here to cope with symlinks into the buildtools
+ # directory from within the standalone libraries in Samba. For example,
+ # a symlink to ../../builtools/scripts/autogen-waf.sh needs
+ # to be rewritten as a symlink to buildtools/scripts/autogen-waf.sh
+ # when the tarball for talloc is built
+
+ # the filename without the appname-version
+ rel_fname = '/'.join(fname.split('/')[1:])
+
+ # join this with the symlink target
+ tgt_full = os.path.join(os.path.dirname(rel_fname), tgt)
+
+ # join with the base directory
+ tgt_base = os.path.normpath(os.path.join(basedir, tgt_full))
+
+ # see if this is inside one of our dist_dirs
+ for dir in dist_dirs.split():
+ if dir.find(':') != -1:
+ destdir=dir.split(':')[1]
+ dir=dir.split(':')[0]
+ else:
+ destdir = '.'
+ if dir == basedir:
+ # internal links don't get rewritten
+ continue
+ if dir == tgt_base[0:len(dir)] and tgt_base[len(dir)] == '/':
+ new_tgt = destdir + tgt_base[len(dir):]
+ tinfo.linkname = new_tgt
+ break
+
+ tinfo.uid = 0
+ tinfo.gid = 0
+ tinfo.uname = 'root'
+ tinfo.gname = 'root'
+ tar.addfile(tinfo)
+ return True
+
+def add_tarfile(tar, fname, abspath, basedir):
+ '''add a file to the tarball'''
+ if add_symlink(tar, fname, abspath, basedir):
+ return
+ try:
+ tinfo = tar.gettarinfo(name=abspath, arcname=fname)
+ except OSError:
+ Logs.error('Unable to find file %s - missing from git checkout?' % abspath)
+ sys.exit(1)
+ tinfo.uid = 0
+ tinfo.gid = 0
+ tinfo.uname = 'root'
+ tinfo.gname = 'root'
+ fh = open(abspath, "rb")
+ tar.addfile(tinfo, fileobj=fh)
+ fh.close()
+
+
+def vcs_dir_contents(path):
+ """Return the versioned files under a path.
+
+ :return: List of paths relative to path
+ """
+ repo = path
+ while repo != "/":
+ if os.path.exists(os.path.join(repo, ".git")):
+ ls_files_cmd = [ 'git', 'ls-files', '--full-name',
+ os.path.relpath(path, repo) ]
+ cwd = None
+ env = dict(os.environ)
+ env["GIT_DIR"] = os.path.join(repo, ".git")
+ break
+ repo = os.path.dirname(repo)
+ if repo == "/":
+ raise Exception("unsupported or no vcs for %s" % path)
+ return get_string(Utils.cmd_output(ls_files_cmd, cwd=cwd, env=env)).split('\n')
+
+
+def dist(appname='', version=''):
+
+ def add_files_to_tarball(tar, srcdir, srcsubdir, dstdir, dstsubdir, blacklist, files):
+ if blacklist is None:
+ blacklist = []
+ for f in files:
+ abspath = os.path.join(srcdir, f)
+
+ if srcsubdir != '.':
+ f = f[len(srcsubdir)+1:]
+
+ # Remove files in the blacklist
+ if f in blacklist:
+ continue
+ blacklisted = False
+ # Remove directories in the blacklist
+ for d in blacklist:
+ if f.startswith(d):
+ blacklisted = True
+ if blacklisted:
+ continue
+ if os.path.isdir(abspath) and not os.path.islink(abspath):
+ continue
+ if dstsubdir != '.':
+ f = dstsubdir + '/' + f
+ fname = dstdir + '/' + f
+ add_tarfile(tar, fname, abspath, srcsubdir)
+
+
+ def list_directory_files(path):
+ curdir = os.getcwd()
+ os.chdir(srcdir)
+ out_files = []
+ for root, dirs, files in os.walk(path):
+ for f in files:
+ out_files.append(os.path.join(root, f))
+ os.chdir(curdir)
+ return out_files
+
+
+ if not isinstance(appname, str) or not appname:
+ # this copes with a mismatch in the calling arguments for dist()
+ appname = Context.g_module.APPNAME
+ version = Context.g_module.VERSION
+ if not version:
+ version = Context.g_module.VERSION
+
+ srcdir = os.path.normpath(
+ os.path.join(os.path.dirname(Context.g_module.root_path),
+ Context.g_module.top))
+
+ if not dist_dirs:
+ Logs.error('You must use samba_dist.DIST_DIRS() to set which directories to package')
+ sys.exit(1)
+
+ dist_base = '%s-%s' % (appname, version)
+
+ if Options.options.SIGN_RELEASE:
+ dist_name = '%s.tar' % (dist_base)
+ tar = tarfile.open(dist_name, 'w')
+ else:
+ dist_name = '%s.tar.gz' % (dist_base)
+ tar = tarfile.open(dist_name, 'w:gz')
+
+ blacklist = dist_blacklist.split()
+
+ for dir in dist_dirs.split():
+ if dir.find(':') != -1:
+ destdir=dir.split(':')[1]
+ dir=dir.split(':')[0]
+ else:
+ destdir = '.'
+ absdir = os.path.join(srcdir, dir)
+ try:
+ files = vcs_dir_contents(absdir)
+ except Exception as e:
+ Logs.error('unable to get contents of %s: %s' % (absdir, e))
+ sys.exit(1)
+ add_files_to_tarball(tar, srcdir, dir, dist_base, destdir, blacklist, files)
+
+ if dist_files:
+ for file in dist_files.split():
+ if file.find(':') != -1:
+ destfile = file.split(':')[1]
+ file = file.split(':')[0]
+ else:
+ destfile = file
+
+ absfile = os.path.join(srcdir, file)
+
+ if os.path.isdir(absfile) and not os.path.islink(absfile):
+ destdir = destfile
+ dir = file
+ files = list_directory_files(dir)
+ add_files_to_tarball(tar, srcdir, dir, dist_base, destdir, blacklist, files)
+ else:
+ fname = dist_base + '/' + destfile
+ add_tarfile(tar, fname, absfile, destfile)
+
+ tar.close()
+
+ if Options.options.SIGN_RELEASE:
+ import gzip
+ try:
+ os.unlink(dist_name + '.asc')
+ except OSError:
+ pass
+
+ cmd = "gpg --detach-sign --armor " + dist_name
+ os.system(cmd)
+ uncompressed_tar = open(dist_name, 'rb')
+ compressed_tar = gzip.open(dist_name + '.gz', 'wb')
+ while 1:
+ buffer = uncompressed_tar.read(1048576)
+ if buffer:
+ compressed_tar.write(buffer)
+ else:
+ break
+ uncompressed_tar.close()
+ compressed_tar.close()
+ os.unlink(dist_name)
+ Logs.info('Created %s.gz %s.asc' % (dist_name, dist_name))
+ dist_name = dist_name + '.gz'
+ else:
+ Logs.info('Created %s' % dist_name)
+
+ # TODO use the ctx object instead
+ global dist_archive
+ dist_archive = dist_name
+ return dist_name
+
+
+@conf
+def DIST_DIRS(dirs):
+ '''set the directories to package, relative to top srcdir'''
+ global dist_dirs
+ if not dist_dirs:
+ dist_dirs = dirs
+
+@conf
+def DIST_FILES(files, extend=False):
+ '''set additional files for packaging, relative to top srcdir'''
+ global dist_files
+ if not dist_files:
+ dist_files = files
+ elif extend:
+ dist_files = dist_files + " " + files
+
+@conf
+def DIST_BLACKLIST(blacklist):
+ '''set the files to exclude from packaging, relative to top srcdir'''
+ global dist_blacklist
+ if not dist_blacklist:
+ dist_blacklist = blacklist
+
+Scripting.dist = dist
diff --git a/buildtools/wafsamba/samba_git.py b/buildtools/wafsamba/samba_git.py
new file mode 100644
index 0000000..fe540ec
--- /dev/null
+++ b/buildtools/wafsamba/samba_git.py
@@ -0,0 +1,58 @@
+import os
+import subprocess
+
+def find_git(env=None):
+ """Find the git binary."""
+ if env is not None and 'GIT' in env:
+ return env.get_flat('GIT')
+
+ # Get version from GIT
+ if os.path.exists("/usr/bin/git"):
+ # this is useful when doing make dist without configuring
+ return "/usr/bin/git"
+
+ return None
+
+
+def has_submodules(path):
+ """Check whether a source directory is git-versioned and has submodules.
+
+ :param path: Path to Samba source directory
+ """
+ return (os.path.isdir(os.path.join(path, ".git")) and
+ os.path.isfile(os.path.join(path, ".gitmodules")))
+
+
+def read_submodule_status(path, env=None):
+ """Check status of submodules.
+
+ :param path: Path to git directory
+ :param env: Optional waf environment
+ :return: Yields tuples with submodule relpath and status
+ (one of: 'out-of-date', 'not-checked-out', 'up-to-date')
+ :raise RuntimeError: raised when parsing of 'git submodule status' output
+ fails.
+ """
+ if not has_submodules(path):
+ # No point in running git.
+ return
+ git = find_git(env)
+ if git is None:
+ return
+ p = subprocess.Popen([git, "submodule", "status"], stdout=subprocess.PIPE,
+ cwd=path)
+ (stdout, stderr) = p.communicate(None)
+ for l in stdout.splitlines():
+ l = l.decode('utf-8')
+ l = l.rstrip()
+ status = l[0]
+ l = l[1:]
+ parts = l.split(" ")
+ if len(parts) > 2 and status in ("-", "+"):
+ yield (parts[1], "out-of-date")
+ elif len(parts) == 2 and status == "-":
+ yield (parts[1], "not-checked-out")
+ elif len(parts) > 2 and status == " ":
+ yield (parts[1], "up-to-date")
+ else:
+ raise RuntimeError("Unable to parse submodule status: %r, %r" % (status, parts))
diff --git a/buildtools/wafsamba/samba_headers.py b/buildtools/wafsamba/samba_headers.py
new file mode 100644
index 0000000..37147a9
--- /dev/null
+++ b/buildtools/wafsamba/samba_headers.py
@@ -0,0 +1,181 @@
+# specialist handling of header files for Samba
+
+import os, re, sys, fnmatch
+from waflib import Build, Logs, Utils, Errors
+from samba_utils import TO_LIST
+
+
+def header_install_path(header, header_path):
+ '''find the installation path for a header, given a header_path option'''
+ if not header_path:
+ return ''
+ if not isinstance(header_path, list):
+ return header_path
+ for (p1, dir) in header_path:
+ for p2 in TO_LIST(p1):
+ if fnmatch.fnmatch(header, p2):
+ return dir
+ # default to current path
+ return ''
+
+
+re_header = re.compile(r'^\s*#\s*include[ \t]*"([^"]+)"', re.I | re.M)
+
+# a dictionary mapping source header paths to public header paths
+header_map = {}
+
+def find_suggested_header(hpath):
+ '''find a suggested header path to use'''
+ base = os.path.basename(hpath)
+ ret = []
+ for h in header_map:
+ if os.path.basename(h) == base:
+ ret.append('<%s>' % header_map[h])
+ ret.append('"%s"' % h)
+ return ret
+
+def create_public_header(task):
+ '''create a public header from a private one, output within the build tree'''
+ src = task.inputs[0].abspath(task.env)
+ tgt = task.outputs[0].bldpath(task.env)
+
+ if os.path.exists(tgt):
+ os.unlink(tgt)
+
+ relsrc = os.path.relpath(src, task.env.TOPDIR)
+
+ infile = open(src, mode='r')
+ outfile = open(tgt, mode='w')
+ linenumber = 0
+
+ search_paths = [ '', task.env.RELPATH ]
+ for i in task.env.EXTRA_INCLUDES:
+ if i.startswith('#'):
+ search_paths.append(i[1:])
+
+ for line in infile:
+ linenumber += 1
+
+ # allow some straight substitutions
+ if task.env.public_headers_replace and line.strip() in task.env.public_headers_replace:
+ outfile.write(task.env.public_headers_replace[line.strip()] + '\n')
+ continue
+
+ # see if its an include line
+ m = re_header.match(line)
+ if m is None:
+ outfile.write(line)
+ continue
+
+ # its an include, get the header path
+ hpath = m.group(1)
+ if hpath.startswith("bin/default/"):
+ hpath = hpath[12:]
+
+ # some are always allowed
+ if task.env.public_headers_skip and hpath in task.env.public_headers_skip:
+ outfile.write(line)
+ continue
+
+ # work out the header this refers to
+ found = False
+ for s in search_paths:
+ p = os.path.normpath(os.path.join(s, hpath))
+ if p in header_map:
+ outfile.write("#include <%s>\n" % header_map[p])
+ found = True
+ break
+ if found:
+ continue
+
+ if task.env.public_headers_allow_broken:
+ Logs.warn("Broken public header include '%s' in '%s'" % (hpath, relsrc))
+ outfile.write(line)
+ continue
+
+ # try to be nice to the developer by suggesting an alternative
+ suggested = find_suggested_header(hpath)
+ outfile.close()
+ os.unlink(tgt)
+ sys.stderr.write("%s:%u:Error: unable to resolve public header %s (maybe try one of %s)\n" % (
+ os.path.relpath(src, os.getcwd()), linenumber, hpath, suggested))
+ raise Errors.WafError("Unable to resolve header path '%s' in public header '%s' in directory %s" % (
+ hpath, relsrc, task.env.RELPATH))
+ infile.close()
+ outfile.close()
+
+
+def public_headers_simple(bld, public_headers, header_path=None, public_headers_install=True):
+ '''install some headers - simple version, no munging needed
+ '''
+ if not public_headers_install:
+ return
+ for h in TO_LIST(public_headers):
+ inst_path = header_install_path(h, header_path)
+ if h.find(':') != -1:
+ s = h.split(":")
+ h_name = s[0]
+ inst_name = s[1]
+ else:
+ h_name = h
+ inst_name = os.path.basename(h)
+ bld.INSTALL_FILES('${INCLUDEDIR}', h_name, destname=inst_name)
+
+
+def PUBLIC_HEADERS(bld, public_headers, header_path=None, public_headers_install=True):
+ '''install some headers
+
+ header_path may either be a string that is added to the INCLUDEDIR,
+ or it can be a dictionary of wildcard patterns which map to destination
+ directories relative to INCLUDEDIR
+ '''
+ bld.SET_BUILD_GROUP('final')
+
+ if not bld.env.build_public_headers:
+ # in this case no header munging needed. Used for tdb, talloc etc
+ public_headers_simple(bld, public_headers, header_path=header_path,
+ public_headers_install=public_headers_install)
+ return
+
+ # create the public header in the given path
+ # in the build tree
+ for h in TO_LIST(public_headers):
+ inst_path = header_install_path(h, header_path)
+ if h.find(':') != -1:
+ s = h.split(":")
+ h_name = s[0]
+ inst_name = s[1]
+ else:
+ h_name = h
+ inst_name = os.path.basename(h)
+ curdir = bld.path.abspath()
+ relpath1 = os.path.relpath(bld.srcnode.abspath(), curdir)
+ relpath2 = os.path.relpath(curdir, bld.srcnode.abspath())
+ targetdir = os.path.normpath(os.path.join(relpath1, bld.env.build_public_headers, inst_path))
+ if not os.path.exists(os.path.join(curdir, targetdir)):
+ raise Errors.WafError("missing source directory %s for public header %s" % (targetdir, inst_name))
+ target = os.path.join(targetdir, inst_name)
+
+ # the source path of the header, relative to the top of the source tree
+ src_path = os.path.normpath(os.path.join(relpath2, h_name))
+
+ # the install path of the header, relative to the public include directory
+ target_path = os.path.normpath(os.path.join(inst_path, inst_name))
+
+ header_map[src_path] = target_path
+
+ t = bld.SAMBA_GENERATOR('HEADER_%s/%s/%s' % (relpath2, inst_path, inst_name),
+ group='headers',
+ rule=create_public_header,
+ source=h_name,
+ target=target)
+ t.env.RELPATH = relpath2
+ t.env.TOPDIR = bld.srcnode.abspath()
+ if not bld.env.public_headers_list:
+ bld.env.public_headers_list = []
+ bld.env.public_headers_list.append(os.path.join(inst_path, inst_name))
+ if public_headers_install:
+ bld.INSTALL_FILES('${INCLUDEDIR}',
+ target,
+ destname=os.path.join(inst_path, inst_name), flat=True)
+Build.BuildContext.PUBLIC_HEADERS = PUBLIC_HEADERS
diff --git a/buildtools/wafsamba/samba_install.py b/buildtools/wafsamba/samba_install.py
new file mode 100644
index 0000000..a43d103
--- /dev/null
+++ b/buildtools/wafsamba/samba_install.py
@@ -0,0 +1,236 @@
+###########################
+# this handles the magic we need to do for installing
+# with all the configure options that affect rpath and shared
+# library use
+
+import os
+from waflib import Utils, Errors
+from waflib.TaskGen import feature, before, after
+from samba_utils import LIB_PATH, MODE_755, install_rpath, build_rpath
+
+@feature('install_bin')
+@after('apply_core')
+@before('apply_link', 'apply_obj_vars')
+def install_binary(self):
+ '''install a binary, taking account of the different rpath variants'''
+ bld = self.bld
+
+ # get the ldflags we will use for install and build
+ install_ldflags = install_rpath(self)
+ build_ldflags = build_rpath(bld)
+
+ if not self.bld.is_install:
+ # just need to set rpath if we are not installing
+ self.env.RPATH = build_ldflags
+ return
+
+ # work out the install path, expanding variables
+ install_path = getattr(self, 'samba_inst_path', None) or '${BINDIR}'
+ install_path = bld.EXPAND_VARIABLES(install_path)
+
+ orig_target = os.path.basename(self.target)
+
+ if install_ldflags != build_ldflags:
+ # we will be creating a new target name, and using that for the
+ # install link. That stops us from overwriting the existing build
+ # target, which has different ldflags
+ self.target += '.inst'
+
+ # setup the right rpath link flags for the install
+ self.env.RPATH = install_ldflags
+
+ if not self.samba_install:
+ # this binary is marked not to be installed
+ return
+
+ # tell waf to install the right binary
+ bld.install_as(os.path.join(install_path, orig_target),
+ self.path.find_or_declare(self.target),
+ chmod=MODE_755)
+
+
+
+@feature('install_lib')
+@after('apply_core')
+@before('apply_link', 'apply_obj_vars')
+def install_library(self):
+ '''install a library, taking account of the different rpath variants'''
+ if getattr(self, 'done_install_library', False):
+ return
+
+ bld = self.bld
+
+ default_env = bld.all_envs['default']
+ try:
+ install_ldflags = install_rpath(self)
+ build_ldflags = build_rpath(bld)
+
+ if not self.bld.is_install or not getattr(self, 'samba_install', True):
+ # just need to set the build rpath if we are not installing
+ self.env.RPATH = build_ldflags
+ return
+
+ # setup the install path, expanding variables
+ install_path = getattr(self, 'samba_inst_path', None)
+ if install_path is None:
+ if getattr(self, 'private_library', False):
+ install_path = '${PRIVATELIBDIR}'
+ else:
+ install_path = '${LIBDIR}'
+ install_path = bld.EXPAND_VARIABLES(install_path)
+
+ target_name = self.target
+
+ if install_ldflags != build_ldflags:
+ # we will be creating a new target name, and using that for the
+ # install link. That stops us from overwriting the existing build
+ # target, which has different ldflags
+ self.done_install_library = True
+ t = self.clone(self.env)
+ t.posted = False
+ t.target += '.inst'
+ t.name = self.name + '.inst'
+ self.env.RPATH = build_ldflags
+ else:
+ t = self
+
+ t.env.RPATH = install_ldflags
+
+ dev_link = None
+
+ # in the following the names are:
+ # - inst_name is the name with .inst. in it, in the build
+ # directory
+ # - install_name is the name in the install directory
+ # - install_link is a symlink in the install directory, to install_name
+
+ if getattr(self, 'samba_realname', None):
+ install_name = self.samba_realname
+ install_link = None
+ if getattr(self, 'soname', ''):
+ install_link = self.soname
+ if getattr(self, 'samba_type', None) == 'PYTHON':
+ inst_name = bld.make_libname(t.target, nolibprefix=True, python=True)
+ else:
+ inst_name = bld.make_libname(t.target)
+ elif self.vnum:
+ vnum_base = self.vnum.split('.')[0]
+ install_name = bld.make_libname(target_name, version=self.vnum)
+ install_link = bld.make_libname(target_name, version=vnum_base)
+ inst_name = bld.make_libname(t.target)
+ if not self.private_library or not t.env.SONAME_ST:
+ # only generate the dev link for non-bundled libs
+ dev_link = bld.make_libname(target_name)
+ elif getattr(self, 'soname', ''):
+ install_name = bld.make_libname(target_name)
+ install_link = self.soname
+ inst_name = bld.make_libname(t.target)
+ else:
+ install_name = bld.make_libname(target_name)
+ install_link = None
+ inst_name = bld.make_libname(t.target)
+
+ if t.env.SONAME_ST:
+ # ensure we get the right names in the library
+ if install_link:
+ t.env.append_value('LINKFLAGS', t.env.SONAME_ST % install_link)
+ else:
+ t.env.append_value('LINKFLAGS', t.env.SONAME_ST % install_name)
+ t.env.SONAME_ST = ''
+
+ # tell waf to install the library
+ bld.install_as(os.path.join(install_path, install_name),
+ self.path.find_or_declare(inst_name),
+ chmod=MODE_755)
+
+ if install_link and install_link != install_name:
+ # and the symlink if needed
+ bld.symlink_as(os.path.join(install_path, install_link), os.path.basename(install_name))
+ if dev_link:
+ bld.symlink_as(os.path.join(install_path, dev_link), os.path.basename(install_name))
+ finally:
+ bld.all_envs['default'] = default_env
+
+
+@feature('cshlib')
+@after('apply_implib')
+@before('apply_vnum')
+def apply_soname(self):
+ '''install a library, taking account of the different rpath variants'''
+
+ if self.env.SONAME_ST and getattr(self, 'soname', ''):
+ self.env.append_value('LINKFLAGS', self.env.SONAME_ST % self.soname)
+ self.env.SONAME_ST = ''
+
+@feature('cshlib')
+@after('apply_implib')
+@before('apply_vnum')
+def apply_vscript(self):
+ '''add version-script arguments to library build'''
+
+ if self.env.HAVE_LD_VERSION_SCRIPT and getattr(self, 'version_script', ''):
+ self.env.append_value('LINKFLAGS', "-Wl,--version-script=%s" %
+ self.version_script)
+ self.version_script = None
+
+
+##############################
+# handle the creation of links for libraries and binaries in the build tree
+
+@feature('symlink_lib')
+@after('apply_link')
+def symlink_lib(self):
+ '''symlink a shared lib'''
+
+ if self.target.endswith('.inst'):
+ return
+
+ blddir = os.path.dirname(self.bld.srcnode.abspath(self.bld.env))
+ libpath = self.link_task.outputs[0].abspath(self.env)
+
+ # calculate the link target and put it in the environment
+ soext=""
+ vnum = getattr(self, 'vnum', None)
+ if vnum is not None:
+ soext = '.' + vnum.split('.')[0]
+
+ link_target = getattr(self, 'link_name', '')
+ if link_target == '':
+ basename = os.path.basename(self.bld.make_libname(self.target, version=soext))
+ if getattr(self, "private_library", False):
+ link_target = '%s/private/%s' % (LIB_PATH, basename)
+ else:
+ link_target = '%s/%s' % (LIB_PATH, basename)
+
+ link_target = os.path.join(blddir, link_target)
+
+ if os.path.lexists(link_target):
+ if os.path.islink(link_target) and os.readlink(link_target) == libpath:
+ return
+ os.unlink(link_target)
+
+ link_container = os.path.dirname(link_target)
+ if not os.path.isdir(link_container):
+ os.makedirs(link_container)
+
+ os.symlink(libpath, link_target)
+
+
+@feature('symlink_bin')
+@after('apply_link')
+def symlink_bin(self):
+ '''symlink a binary into the build directory'''
+
+ if self.target.endswith('.inst'):
+ return
+
+ if not self.link_task.outputs or not self.link_task.outputs[0]:
+ raise Errors.WafError('no outputs found for %s in symlink_bin' % self.name)
+ binpath = self.link_task.outputs[0].abspath(self.env)
+ bldpath = os.path.join(self.bld.env.BUILD_DIRECTORY, self.link_task.outputs[0].name)
+
+ if os.path.lexists(bldpath):
+ if os.path.islink(bldpath) and os.readlink(bldpath) == binpath:
+ return
+ os.unlink(bldpath)
+ os.symlink(binpath, bldpath)
diff --git a/buildtools/wafsamba/samba_patterns.py b/buildtools/wafsamba/samba_patterns.py
new file mode 100644
index 0000000..4129681
--- /dev/null
+++ b/buildtools/wafsamba/samba_patterns.py
@@ -0,0 +1,234 @@
+# a waf tool to add extension based build patterns for Samba
+
+import sys
+from waflib import Build
+from wafsamba import samba_version_file
+
+def write_version_header(task):
+ '''print version.h contents'''
+ src = task.inputs[0].srcpath(task.env)
+
+ version = samba_version_file(src, task.env.srcdir, env=task.env, is_install=task.generator.bld.is_install)
+ string = str(version)
+
+ task.outputs[0].write(string)
+ return 0
+
+
+def SAMBA_MKVERSION(bld, target, source='VERSION buildtools/wafsamba/samba_version.py'):
+ '''generate the version.h header for Samba'''
+
+ # We only force waf to re-generate this file if we are installing,
+ # because only then is information not included in the deps (the
+ # git revision) included in the version.
+ t = bld.SAMBA_GENERATOR('VERSION',
+ rule=write_version_header,
+ group='setup',
+ source=source,
+ target=target,
+ always=bld.is_install)
+Build.BuildContext.SAMBA_MKVERSION = SAMBA_MKVERSION
+
+
+def write_build_options_header(fp):
+ '''write preamble for build_options.c'''
+ fp.write("/*\n"
+ " Unix SMB/CIFS implementation.\n"
+ " Build Options for Samba Suite\n"
+ " Copyright (C) Vance Lankhaar <vlankhaar@linux.ca> 2003\n"
+ " Copyright (C) Andrew Bartlett <abartlet@samba.org> 2001\n"
+ "\n"
+ " This program is free software; you can redistribute it and/or modify\n"
+ " it under the terms of the GNU General Public License as published by\n"
+ " the Free Software Foundation; either version 3 of the License, or\n"
+ " (at your option) any later version.\n"
+ "\n"
+ " This program is distributed in the hope that it will be useful,\n"
+ " but WITHOUT ANY WARRANTY; without even the implied warranty of\n"
+ " MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the\n"
+ " GNU General Public License for more details.\n"
+ "\n"
+ " You should have received a copy of the GNU General Public License\n"
+ " along with this program; if not, see <http://www.gnu.org/licenses/>.\n"
+ "*/\n"
+ "\n"
+ "#include \"includes.h\"\n"
+ "#include \"dynconfig/dynconfig.h\"\n"
+ "#include \"lib/cluster_support.h\"\n"
+
+ "\n"
+ "static int output(bool screen, const char *format, ...) PRINTF_ATTRIBUTE(2,3);\n"
+ "void build_options(bool screen);\n"
+ "\n"
+ "\n"
+ "/****************************************************************************\n"
+ "helper function for build_options\n"
+ "****************************************************************************/\n"
+ "static int output(bool screen, const char *format, ...)\n"
+ "{\n"
+ " char *ptr = NULL;\n"
+ " int ret = 0;\n"
+ " va_list ap;\n"
+ " \n"
+ " va_start(ap, format);\n"
+ " ret = vasprintf(&ptr,format,ap);\n"
+ " va_end(ap);\n"
+ "\n"
+ " if (screen) {\n"
+ " d_printf(\"%s\", ptr ? ptr : \"\");\n"
+ " } else {\n"
+ " DEBUG(4,(\"%s\", ptr ? ptr : \"\"));\n"
+ " }\n"
+ " \n"
+ " SAFE_FREE(ptr);\n"
+ " return ret;\n"
+ "}\n"
+ "\n"
+ "/****************************************************************************\n"
+ "options set at build time for the samba suite\n"
+ "****************************************************************************/\n"
+ "void build_options(bool screen)\n"
+ "{\n"
+ " if ((DEBUGLEVEL < 4) && (!screen)) {\n"
+ " return;\n"
+ " }\n"
+ "\n"
+ "\n"
+ " /* Output various paths to files and directories */\n"
+ " output(screen,\"\\nPaths:\\n\"\n"
+ " \" SBINDIR: %s\\n\"\n"
+ " \" BINDIR: %s\\n\"\n"
+ " \" CONFIGFILE: %s\\n\"\n"
+ " \" LOGFILEBASE: %s\\n\"\n"
+ " \" LMHOSTSFILE: %s\\n\"\n"
+ " \" LIBDIR: %s\\n\"\n"
+ " \" DATADIR: %s\\n\"\n"
+ " \" SAMBA_DATADIR: %s\\n\"\n"
+ " \" MODULESDIR: %s\\n\"\n"
+ " \" SHLIBEXT: %s\\n\"\n"
+ " \" LOCKDIR: %s\\n\"\n"
+ " \" STATEDIR: %s\\n\"\n"
+ " \" CACHEDIR: %s\\n\"\n"
+ " \" PIDDIR: %s\\n\"\n"
+ " \" SMB_PASSWD_FILE: %s\\n\"\n"
+ " \" PRIVATE_DIR: %s\\n\"\n"
+ " \" BINDDNS_DIR: %s\\n\",\n"
+ " get_dyn_SBINDIR(),\n"
+ " get_dyn_BINDIR(),\n"
+ " get_dyn_CONFIGFILE(),\n"
+ " get_dyn_LOGFILEBASE(),\n"
+ " get_dyn_LMHOSTSFILE(),\n"
+ " get_dyn_LIBDIR(),\n"
+ " get_dyn_DATADIR(),\n"
+ " get_dyn_SAMBA_DATADIR(),\n"
+ " get_dyn_MODULESDIR(),\n"
+ " get_dyn_SHLIBEXT(),\n"
+ " get_dyn_LOCKDIR(),\n"
+ " get_dyn_STATEDIR(),\n"
+ " get_dyn_CACHEDIR(),\n"
+ " get_dyn_PIDDIR(),\n"
+ " get_dyn_SMB_PASSWD_FILE(),\n"
+ " get_dyn_PRIVATE_DIR(),\n"
+ " get_dyn_BINDDNS_DIR());\n"
+ "\n")
+
+def write_build_options_footer(fp):
+ fp.write(" /* Output the sizes of the various cluster features */\n"
+ " output(screen, \"\\n%s\", cluster_support_features());\n"
+ "\n"
+ " /* Output the sizes of the various types */\n"
+ " output(screen, \"\\nType sizes:\\n\"\n"
+ " \" sizeof(char): %lu\\n\"\n"
+ " \" sizeof(int): %lu\\n\"\n"
+ " \" sizeof(long): %lu\\n\"\n"
+ " \" sizeof(long long): %lu\\n\"\n"
+ " \" sizeof(uint8_t): %lu\\n\"\n"
+ " \" sizeof(uint16_t): %lu\\n\"\n"
+ " \" sizeof(uint32_t): %lu\\n\"\n"
+ " \" sizeof(short): %lu\\n\"\n"
+ " \" sizeof(void*): %lu\\n\"\n"
+ " \" sizeof(size_t): %lu\\n\"\n"
+ " \" sizeof(off_t): %lu\\n\"\n"
+ " \" sizeof(ino_t): %lu\\n\"\n"
+ " \" sizeof(dev_t): %lu\\n\",\n"
+ " (unsigned long)sizeof(char),\n"
+ " (unsigned long)sizeof(int),\n"
+ " (unsigned long)sizeof(long),\n"
+ " (unsigned long)sizeof(long long),\n"
+ " (unsigned long)sizeof(uint8_t),\n"
+ " (unsigned long)sizeof(uint16_t),\n"
+ " (unsigned long)sizeof(uint32_t),\n"
+ " (unsigned long)sizeof(short),\n"
+ " (unsigned long)sizeof(void*),\n"
+ " (unsigned long)sizeof(size_t),\n"
+ " (unsigned long)sizeof(off_t),\n"
+ " (unsigned long)sizeof(ino_t),\n"
+ " (unsigned long)sizeof(dev_t));\n"
+ "\n"
+ " output(screen, \"\\nBuiltin modules:\\n\"\n"
+ " \" %s\\n\", STRING_STATIC_MODULES);\n"
+ "}\n")
+
+def write_build_options_section(fp, keys, section):
+ fp.write("\n\t/* Show %s */\n" % section)
+ fp.write(" output(screen, \"\\n%s:\\n\");\n\n" % section)
+
+ for k in sorted(keys):
+ fp.write("#ifdef %s\n" % k)
+ fp.write(" output(screen, \" %s\\n\");\n" % k)
+ fp.write("#endif\n")
+ fp.write("\n")
+
+def write_build_options(task):
+ tbl = task.env
+ keys_option_with = []
+ keys_option_utmp = []
+ keys_option_have = []
+ keys_header_sys = []
+ keys_header_other = []
+ keys_misc = []
+ if sys.hexversion>0x300000f:
+ trans_table = bytes.maketrans(b'.-()', b'____')
+ else:
+ import string
+ trans_table = string.maketrans('.-()', '____')
+
+ for key in tbl:
+ if key.startswith("HAVE_UT_UT_") or key.find("UTMP") >= 0:
+ keys_option_utmp.append(key)
+ elif key.startswith("WITH_"):
+ keys_option_with.append(key)
+ elif key.startswith("HAVE_SYS_"):
+ keys_header_sys.append(key)
+ elif key.startswith("HAVE_"):
+ if key.endswith("_H"):
+ keys_header_other.append(key)
+ else:
+ keys_option_have.append(key)
+ elif key.startswith("static_init_"):
+ l = key.split("(")
+ keys_misc.append(l[0])
+ else:
+ keys_misc.append(key.translate(trans_table))
+
+ tgt = task.outputs[0].bldpath(task.env)
+ f = open(tgt, 'w')
+ write_build_options_header(f)
+ write_build_options_section(f, keys_header_sys, "System Headers")
+ write_build_options_section(f, keys_header_other, "Headers")
+ write_build_options_section(f, keys_option_utmp, "UTMP Options")
+ write_build_options_section(f, keys_option_have, "HAVE_* Defines")
+ write_build_options_section(f, keys_option_with, "--with Options")
+ write_build_options_section(f, keys_misc, "Build Options")
+ write_build_options_footer(f)
+ f.close()
+ return 0
+
+
+def SAMBA_BLDOPTIONS(bld, target):
+ '''generate the bld_options.c for Samba'''
+ t = bld.SAMBA_GENERATOR(target,
+ rule=write_build_options,
+ dep_vars=['defines'],
+ target=target)
+Build.BuildContext.SAMBA_BLDOPTIONS = SAMBA_BLDOPTIONS
diff --git a/buildtools/wafsamba/samba_perl.py b/buildtools/wafsamba/samba_perl.py
new file mode 100644
index 0000000..2139d14
--- /dev/null
+++ b/buildtools/wafsamba/samba_perl.py
@@ -0,0 +1,58 @@
+from waflib import Utils
+from waflib.Configure import conf
+from samba_utils import get_string
+done = {}
+
+@conf
+def SAMBA_CHECK_PERL(conf, mandatory=True, version=(5,0,0)):
+ if "done" in done:
+ return
+ done["done"] = True
+ conf.find_program('perl', var='PERL', mandatory=mandatory)
+ conf.load('perl')
+ path_perl = conf.find_program('perl')
+ conf.env.PERL_SPECIFIED = (conf.env.PERL != path_perl)
+ conf.check_perl_version(version)
+
+ def read_perl_config_var(cmd):
+ output = Utils.cmd_output([conf.env.get_flat('PERL'), '-MConfig', '-e', cmd])
+ if not isinstance(output, str):
+ output = get_string(output)
+ return Utils.to_list(output)
+
+ def check_perl_config_var(var):
+ conf.start_msg("Checking for perl $Config{%s}:" % var)
+ try:
+ v = read_perl_config_var('print $Config{%s}' % var)[0]
+ conf.end_msg("'%s'" % (v), 'GREEN')
+ return v
+ except IndexError:
+ conf.end_msg(False, 'YELLOW')
+ return None
+
+ vendor_prefix = check_perl_config_var('vendorprefix')
+
+ perl_arch_install_dir = None
+ if vendor_prefix == conf.env.PREFIX:
+ perl_arch_install_dir = check_perl_config_var('vendorarch')
+ if perl_arch_install_dir is None:
+ perl_arch_install_dir = "${LIBDIR}/perl5"
+ conf.start_msg("PERL_ARCH_INSTALL_DIR: ")
+ conf.end_msg("'%s'" % (perl_arch_install_dir), 'GREEN')
+ conf.env.PERL_ARCH_INSTALL_DIR = perl_arch_install_dir
+
+ perl_lib_install_dir = None
+ if vendor_prefix == conf.env.PREFIX:
+ perl_lib_install_dir = check_perl_config_var('vendorlib')
+ if perl_lib_install_dir is None:
+ perl_lib_install_dir = "${DATADIR}/perl5"
+ conf.start_msg("PERL_LIB_INSTALL_DIR: ")
+ conf.end_msg("'%s'" % (perl_lib_install_dir), 'GREEN')
+ conf.env.PERL_LIB_INSTALL_DIR = perl_lib_install_dir
+
+ perl_inc = read_perl_config_var('print "@INC"')
+ if '.' in perl_inc:
+ perl_inc.remove('.')
+ conf.start_msg("PERL_INC: ")
+ conf.end_msg("%s" % (perl_inc), 'GREEN')
+ conf.env.PERL_INC = perl_inc
diff --git a/buildtools/wafsamba/samba_pidl.py b/buildtools/wafsamba/samba_pidl.py
new file mode 100644
index 0000000..72997c8
--- /dev/null
+++ b/buildtools/wafsamba/samba_pidl.py
@@ -0,0 +1,175 @@
+# waf build tool for building IDL files with pidl
+
+import os
+from waflib import Build, Utils
+from waflib.TaskGen import feature, before
+from samba_utils import SET_TARGET_TYPE, TO_LIST, LOCAL_CACHE
+
+def SAMBA_PIDL(bld, pname, source,
+ options='',
+ output_dir='.',
+ generate_tables=True):
+ '''Build a IDL file using pidl.
+ This will produce up to 17 output files depending on the options used'''
+
+ bname = source[0:-4] # strip off the .idl suffix
+ bname = os.path.basename(bname)
+ name = "%s_%s" % (pname, bname.upper())
+
+ if not SET_TARGET_TYPE(bld, name, 'PIDL'):
+ return
+
+ bld.SET_BUILD_GROUP('build_source')
+
+ # the output files depend on the options used. Use this dictionary
+ # to map between the options and the resulting file names
+ options_map = { '--header' : '%s.h',
+ '--ndr-parser' : 'ndr_%s.c ndr_%s.h',
+ '--samba3-ndr-server' : 'srv_%s.c srv_%s.h',
+ '--samba3-ndr-client' : 'cli_%s.c cli_%s.h',
+ '--server' : 'ndr_%s_s.c',
+ '--server-compat' : 'ndr_%s_scompat.c ndr_%s_scompat.h',
+ '--client' : 'ndr_%s_c.c ndr_%s_c.h',
+ '--python' : 'py_%s.c',
+ '--tdr-parser' : 'tdr_%s.c tdr_%s.h',
+ '--dcom-proxy' : '%s_p.c',
+ '--com-header' : 'com_%s.h'
+ }
+
+ table_header_idx = None
+ out_files = []
+ options_list = TO_LIST(options)
+
+ for o in options_list:
+ if o in options_map:
+ ofiles = TO_LIST(options_map[o])
+ for f in ofiles:
+ out_files.append(os.path.join(output_dir, f % bname))
+ if f == 'ndr_%s.h':
+ # remember this one for the tables generation
+ table_header_idx = len(out_files) - 1
+
+ # depend on the full pidl sources
+ source = TO_LIST(source)
+ try:
+ pidl_src_nodes = bld.pidl_files_cache
+ except AttributeError:
+ bld.pidl_files_cache = bld.srcnode.ant_glob('pidl/lib/Parse/**/*.pm', flat=False)
+ bld.pidl_files_cache.extend(bld.srcnode.ant_glob('pidl', flat=False))
+ pidl_src_nodes = bld.pidl_files_cache
+
+ # the cd .. is needed because pidl currently is sensitive to the directory it is run in
+ cpp = ""
+ cc = ""
+ if bld.CONFIG_SET("CPP") and bld.CONFIG_GET("CPP") != "":
+ if isinstance(bld.CONFIG_GET("CPP"), list):
+ cpp = 'CPP="%s"' % " ".join(bld.CONFIG_GET("CPP"))
+ else:
+ cpp = 'CPP="%s"' % bld.CONFIG_GET("CPP")
+
+ if cpp == "CPP=xlc_r":
+ cpp = ""
+
+ if bld.env['PIDL_DEVELOPER_MODE']:
+ pidl_dev = 'PIDL_DEVELOPER=1 '
+ else:
+ pidl_dev = ''
+
+ if bld.CONFIG_SET("CC"):
+ if isinstance(bld.CONFIG_GET("CC"), list):
+ cc = 'CC="%s"' % " ".join(bld.CONFIG_GET("CC"))
+ else:
+ cc = 'CC="%s"' % bld.CONFIG_GET("CC")
+
+ t = bld(rule='cd ${PIDL_LAUNCH_DIR} && %s%s %s ${PERL} ${PIDL} --quiet ${OPTIONS} --outputdir ${OUTPUTDIR} -- "${IDLSRC}"' % (pidl_dev, cpp, cc),
+ ext_out = '.c',
+ before = 'c',
+ update_outputs = True,
+ shell = True,
+ source = source,
+ target = out_files,
+ name = name,
+ samba_type = 'PIDL')
+
+
+ t.env.PIDL_LAUNCH_DIR = bld.srcnode.path_from(bld.bldnode)
+ pnode = bld.srcnode.find_resource('pidl/pidl')
+ t.env.PIDL = pnode.path_from(bld.srcnode)
+ t.env.OPTIONS = TO_LIST(options)
+ snode = t.path.find_resource(source[0])
+ t.env.IDLSRC = snode.path_from(bld.srcnode)
+ t.env.OUTPUTDIR = bld.bldnode.path_from(bld.srcnode) + '/' + bld.path.find_dir(output_dir).path_from(bld.srcnode)
+
+ bld.add_manual_dependency(snode, pidl_src_nodes)
+
+ if generate_tables and table_header_idx is not None:
+ pidl_headers = LOCAL_CACHE(bld, 'PIDL_HEADERS')
+ pidl_headers[name] = [bld.path.find_or_declare(out_files[table_header_idx])]
+
+ t.more_includes = '#' + bld.path.path_from(bld.srcnode)
+Build.BuildContext.SAMBA_PIDL = SAMBA_PIDL
+
+def SAMBA_PIDL_LIST(bld, name, source,
+ options='',
+ output_dir='.',
+ generate_tables=True,
+ generate_fuzzers=True):
+ '''A wrapper for building a set of IDL files'''
+ for p in TO_LIST(source):
+ bld.SAMBA_PIDL(name, p, options=options, output_dir=output_dir, generate_tables=generate_tables)
+
+ # Some IDL files don't exactly match between name and
+ # "interface" so we need a way to skip those, while other IDL
+ # files have the table generation skipped entirely, on which
+ # the fuzzers rely
+ if generate_tables and generate_fuzzers:
+ interface = p[0:-4] # strip off the .idl suffix
+ bld.SAMBA_NDR_FUZZ(interface,
+ auto_deps=True,
+ fuzz_type="TYPE_STRUCT")
+
+ # Only generate the TYPE_STRUCT fuzzer if this isn't
+ # really DCE/RPC
+ if '--client' in options:
+ bld.SAMBA_NDR_FUZZ(interface,
+ auto_deps=True,
+ fuzz_type="TYPE_IN")
+ bld.SAMBA_NDR_FUZZ(interface,
+ auto_deps=True,
+ fuzz_type="TYPE_OUT")
+Build.BuildContext.SAMBA_PIDL_LIST = SAMBA_PIDL_LIST
+
+
+#################################################################
+# the rule for generating the NDR tables
+@feature('collect')
+@before('exec_rule')
+def collect(self):
+ pidl_headers = LOCAL_CACHE(self.bld, 'PIDL_HEADERS')
+ # The first source is tables.pl itself
+ self.source = Utils.to_list(self.source)
+ for (name, hd) in pidl_headers.items():
+ y = self.bld.get_tgen_by_name(name)
+ self.bld.ASSERT(y is not None, 'Failed to find PIDL header %s' % name)
+ y.post()
+ for node in hd:
+ self.bld.ASSERT(node is not None, 'Got None as build node generating PIDL table for %s' % name)
+ self.source.append(node)
+
+
+def SAMBA_PIDL_TABLES(bld, name, target):
+ '''generate the pidl NDR tables file'''
+ bld.SET_BUILD_GROUP('main')
+ t = bld(
+ features = 'collect',
+ rule = '${PERL} ${SRC} > ${TGT}',
+ ext_out = '.c',
+ before = 'c',
+ update_outputs = True,
+ shell = True,
+ source = '../../librpc/tables.pl',
+ target = target,
+ name = name)
+ t.env.LIBRPC = os.path.join(bld.srcnode.abspath(), 'librpc')
+Build.BuildContext.SAMBA_PIDL_TABLES = SAMBA_PIDL_TABLES
+
diff --git a/buildtools/wafsamba/samba_python.py b/buildtools/wafsamba/samba_python.py
new file mode 100644
index 0000000..12a94c8
--- /dev/null
+++ b/buildtools/wafsamba/samba_python.py
@@ -0,0 +1,157 @@
+# waf build tool for building IDL files with pidl
+
+import os, sys
+from waflib import Build, Logs, Utils, Configure, Errors
+from waflib.Configure import conf
+
+@conf
+def SAMBA_CHECK_PYTHON(conf, version=(3,6,0)):
+
+ # enable tool to build python extensions
+ if conf.env.HAVE_PYTHON_H:
+ conf.check_python_version(version)
+ return
+
+ interpreters = []
+
+ conf.find_program('python3', var='PYTHON',
+ mandatory=not conf.env.disable_python)
+ conf.load('python')
+ path_python = conf.find_program('python3')
+
+ conf.env.PYTHON_SPECIFIED = (conf.env.PYTHON != path_python)
+ conf.check_python_version(version)
+
+ interpreters.append(conf.env['PYTHON'])
+ conf.env.python_interpreters = interpreters
+
+
+@conf
+def SAMBA_CHECK_PYTHON_HEADERS(conf):
+ if conf.env.disable_python:
+
+ conf.msg("python headers", "Check disabled due to --disable-python")
+ # we don't want PYTHONDIR in config.h, as otherwise changing
+ # --prefix causes a complete rebuild
+ conf.env.DEFINES = [x for x in conf.env.DEFINES
+ if not x.startswith('PYTHONDIR=')
+ and not x.startswith('PYTHONARCHDIR=')]
+
+ return
+
+ if conf.env["python_headers_checked"] == []:
+ _check_python_headers(conf)
+ conf.env["python_headers_checked"] = "yes"
+
+ else:
+ conf.msg("python headers", "using cache")
+
+ # we don't want PYTHONDIR in config.h, as otherwise changing
+ # --prefix causes a complete rebuild
+ conf.env.DEFINES = [x for x in conf.env.DEFINES
+ if not x.startswith('PYTHONDIR=')
+ and not x.startswith('PYTHONARCHDIR=')]
+
+def _check_python_headers(conf):
+ conf.check_python_headers()
+
+ abi_pattern = os.path.splitext(conf.env['pyext_PATTERN'])[0]
+ conf.env['PYTHON_SO_ABI_FLAG'] = abi_pattern % ''
+ conf.env['PYTHON_LIBNAME_SO_ABI_FLAG'] = (
+ conf.env['PYTHON_SO_ABI_FLAG'].replace('_', '-'))
+
+ for lib in conf.env['LINKFLAGS_PYEMBED']:
+ if lib.startswith('-L'):
+ conf.env.append_unique('LIBPATH_PYEMBED', lib[2:]) # strip '-L'
+ conf.env['LINKFLAGS_PYEMBED'].remove(lib)
+
+ # same as in waf 1.5, keep only '-fno-strict-aliasing'
+ # and ignore defines such as NDEBUG _FORTIFY_SOURCE=2
+ conf.env.DEFINES_PYEXT = []
+ conf.env.CFLAGS_PYEXT = ['-fno-strict-aliasing']
+
+ return
+
+def PYTHON_BUILD_IS_ENABLED(self):
+ return self.CONFIG_SET('HAVE_PYTHON_H')
+
+Build.BuildContext.PYTHON_BUILD_IS_ENABLED = PYTHON_BUILD_IS_ENABLED
+
+
+def SAMBA_PYTHON(bld, name,
+ source='',
+ deps='',
+ public_deps='',
+ realname=None,
+ cflags='',
+ cflags_end=None,
+ includes='',
+ init_function_sentinel=None,
+ local_include=True,
+ vars=None,
+ install=True,
+ enabled=True):
+ '''build a python extension for Samba'''
+
+ # force-disable when we can't build python modules, so
+ # every single call doesn't need to pass this in.
+ if not bld.PYTHON_BUILD_IS_ENABLED():
+ enabled = False
+
+ # Save time, no need to build python bindings when fuzzing
+ if bld.env.enable_fuzzing:
+ enabled = False
+
+ # when we support static python modules we'll need to gather
+ # the list from all the SAMBA_PYTHON() targets
+ if init_function_sentinel is not None:
+ cflags += ' -DSTATIC_LIBPYTHON_MODULES=%s' % init_function_sentinel
+
+ # From https://docs.python.org/2/c-api/arg.html:
+ # Starting with Python 2.5 the type of the length argument to
+ # PyArg_ParseTuple(), PyArg_ParseTupleAndKeywords() and PyArg_Parse()
+ # can be controlled by defining the macro PY_SSIZE_T_CLEAN before
+ # including Python.h. If the macro is defined, length is a Py_ssize_t
+ # rather than an int.
+
+ # Because <Python.h> if often included before includes.h/config.h
+ # This must be in the -D compiler options
+ cflags += ' -DPY_SSIZE_T_CLEAN=1'
+
+ source = bld.EXPAND_VARIABLES(source, vars=vars)
+
+ if realname is not None:
+ link_name = 'python/%s' % realname
+ else:
+ link_name = None
+
+ bld.SAMBA_LIBRARY(name,
+ source=source,
+ deps=deps,
+ public_deps=public_deps,
+ includes=includes,
+ cflags=cflags,
+ cflags_end=cflags_end,
+ local_include=local_include,
+ vars=vars,
+ realname=realname,
+ link_name=link_name,
+ pyext=True,
+ target_type='PYTHON',
+ install_path='${PYTHONARCHDIR}',
+ allow_undefined_symbols=True,
+ install=install,
+ enabled=enabled)
+
+Build.BuildContext.SAMBA_PYTHON = SAMBA_PYTHON
+
+
+def pyembed_libname(bld, name):
+ if bld.env['PYTHON_SO_ABI_FLAG']:
+ return name + bld.env['PYTHON_SO_ABI_FLAG']
+ else:
+ return name
+
+Build.BuildContext.pyembed_libname = pyembed_libname
+
+
diff --git a/buildtools/wafsamba/samba_third_party.py b/buildtools/wafsamba/samba_third_party.py
new file mode 100644
index 0000000..5289848
--- /dev/null
+++ b/buildtools/wafsamba/samba_third_party.py
@@ -0,0 +1,48 @@
+# functions to support third party libraries
+
+import os
+from waflib import Utils, Build, Context
+from waflib.Configure import conf
+
+@conf
+def CHECK_FOR_THIRD_PARTY(conf):
+ return os.path.exists(os.path.join(Context.g_module.top, 'third_party'))
+
+Build.BuildContext.CHECK_FOR_THIRD_PARTY = CHECK_FOR_THIRD_PARTY
+
+@conf
+def CHECK_POPT(conf):
+ return conf.CHECK_BUNDLED_SYSTEM('popt', checkfunctions='poptGetContext', headers='popt.h')
+
+Build.BuildContext.CHECK_POPT = CHECK_POPT
+
+@conf
+def CHECK_CMOCKA(conf):
+ return conf.CHECK_BUNDLED_SYSTEM_PKG('cmocka', minversion='1.1.3')
+
+Build.BuildContext.CHECK_CMOCKA = CHECK_CMOCKA
+
+@conf
+def CHECK_SOCKET_WRAPPER(conf):
+ return conf.CHECK_BUNDLED_SYSTEM_PKG('socket_wrapper', minversion='1.4.2')
+Build.BuildContext.CHECK_SOCKET_WRAPPER = CHECK_SOCKET_WRAPPER
+
+@conf
+def CHECK_NSS_WRAPPER(conf):
+ return conf.CHECK_BUNDLED_SYSTEM_PKG('nss_wrapper', minversion='1.1.15')
+Build.BuildContext.CHECK_NSS_WRAPPER = CHECK_NSS_WRAPPER
+
+@conf
+def CHECK_RESOLV_WRAPPER(conf):
+ return conf.CHECK_BUNDLED_SYSTEM_PKG('resolv_wrapper', minversion='1.1.8')
+Build.BuildContext.CHECK_RESOLV_WRAPPER = CHECK_RESOLV_WRAPPER
+
+@conf
+def CHECK_UID_WRAPPER(conf):
+ return conf.CHECK_BUNDLED_SYSTEM_PKG('uid_wrapper', minversion='1.3.0')
+Build.BuildContext.CHECK_UID_WRAPPER = CHECK_UID_WRAPPER
+
+@conf
+def CHECK_PAM_WRAPPER(conf):
+ return conf.CHECK_BUNDLED_SYSTEM_PKG('pam_wrapper', minversion='1.1.4')
+Build.BuildContext.CHECK_PAM_WRAPPER = CHECK_PAM_WRAPPER
diff --git a/buildtools/wafsamba/samba_utils.py b/buildtools/wafsamba/samba_utils.py
new file mode 100644
index 0000000..f287e85
--- /dev/null
+++ b/buildtools/wafsamba/samba_utils.py
@@ -0,0 +1,754 @@
+# a waf tool to add autoconf-like macros to the configure section
+# and for SAMBA_ macros for building libraries, binaries etc
+
+import errno
+import os, sys, re, fnmatch, shlex, inspect
+from optparse import SUPPRESS_HELP
+from waflib import Build, Options, Utils, Task, Logs, Configure, Errors, Context
+from waflib import Scripting
+from waflib.TaskGen import feature, before, after
+from waflib.Configure import ConfigurationContext
+from waflib.Logs import debug
+from waflib import ConfigSet
+from waflib.Build import CACHE_SUFFIX
+
+# TODO: make this a --option
+LIB_PATH="shared"
+
+
+PY3 = sys.version_info[0] == 3
+
+if PY3:
+
+ # helper function to get a string from a variable that maybe 'str' or
+ # 'bytes' if 'bytes' then it is decoded using 'utf8'. If 'str' is passed
+ # it is returned unchanged
+ # Using this function is PY2/PY3 code should ensure in most cases
+ # the PY2 code runs unchanged in PY2 whereas the code in PY3 possibly
+ # decodes the variable (see PY2 implementation of this function below)
+ def get_string(bytesorstring):
+ tmp = bytesorstring
+ if isinstance(bytesorstring, bytes):
+ tmp = bytesorstring.decode('utf8')
+ elif not isinstance(bytesorstring, str):
+ raise ValueError('Expected byte of string for %s:%s' % (type(bytesorstring), bytesorstring))
+ return tmp
+
+else:
+
+ # Helper function to return string.
+ # if 'str' or 'unicode' passed in they are returned unchanged
+ # otherwise an exception is generated
+ # Using this function is PY2/PY3 code should ensure in most cases
+ # the PY2 code runs unchanged in PY2 whereas the code in PY3 possibly
+ # decodes the variable (see PY3 implementation of this function above)
+ def get_string(bytesorstring):
+ tmp = bytesorstring
+ if not(isinstance(bytesorstring, str) or isinstance(bytesorstring, unicode)):
+ raise ValueError('Expected str or unicode for %s:%s' % (type(bytesorstring), bytesorstring))
+ return tmp
+
+# sigh, python octal constants are a mess
+MODE_644 = int('644', 8)
+MODE_744 = int('744', 8)
+MODE_755 = int('755', 8)
+MODE_777 = int('777', 8)
+
+def conf(f):
+ # override in order to propagate the argument "mandatory"
+ def fun(*k, **kw):
+ mandatory = True
+ if 'mandatory' in kw:
+ mandatory = kw['mandatory']
+ del kw['mandatory']
+
+ try:
+ return f(*k, **kw)
+ except Errors.ConfigurationError:
+ if mandatory:
+ raise
+
+ fun.__name__ = f.__name__
+ if 'mandatory' in inspect.getsource(f):
+ fun = f
+
+ setattr(Configure.ConfigurationContext, f.__name__, fun)
+ setattr(Build.BuildContext, f.__name__, fun)
+ return f
+Configure.conf = conf
+Configure.conftest = conf
+
+@conf
+def SET_TARGET_TYPE(ctx, target, value):
+ '''set the target type of a target'''
+ cache = LOCAL_CACHE(ctx, 'TARGET_TYPE')
+ if target in cache and cache[target] != 'EMPTY':
+ Logs.error("ERROR: Target '%s' in directory %s re-defined as %s - was %s" % (target, ctx.path.abspath(), value, cache[target]))
+ sys.exit(1)
+ LOCAL_CACHE_SET(ctx, 'TARGET_TYPE', target, value)
+ debug("task_gen: Target '%s' created of type '%s' in %s" % (target, value, ctx.path.abspath()))
+ return True
+
+
+def GET_TARGET_TYPE(ctx, target):
+ '''get target type from cache'''
+ cache = LOCAL_CACHE(ctx, 'TARGET_TYPE')
+ if not target in cache:
+ return None
+ return cache[target]
+
+
+def ADD_LD_LIBRARY_PATH(path):
+ '''add something to LD_LIBRARY_PATH'''
+ if 'LD_LIBRARY_PATH' in os.environ:
+ oldpath = os.environ['LD_LIBRARY_PATH']
+ else:
+ oldpath = ''
+ newpath = oldpath.split(':')
+ if not path in newpath:
+ newpath.append(path)
+ os.environ['LD_LIBRARY_PATH'] = ':'.join(newpath)
+
+
+def needs_private_lib(bld, target):
+ '''return True if a target links to a private library'''
+ for lib in getattr(target, "final_libs", []):
+ t = bld.get_tgen_by_name(lib)
+ if t and getattr(t, 'private_library', False):
+ return True
+ return False
+
+
+def install_rpath(target):
+ '''the rpath value for installation'''
+ bld = target.bld
+ bld.env['RPATH'] = []
+ ret = set()
+ if bld.env.RPATH_ON_INSTALL:
+ ret.add(bld.EXPAND_VARIABLES(bld.env.LIBDIR))
+ if bld.env.RPATH_ON_INSTALL_PRIVATE and needs_private_lib(bld, target):
+ ret.add(bld.EXPAND_VARIABLES(bld.env.PRIVATELIBDIR))
+ return list(ret)
+
+
+def build_rpath(bld):
+ '''the rpath value for build'''
+ rpaths = [os.path.normpath('%s/%s' % (bld.env.BUILD_DIRECTORY, d)) for d in ("shared", "shared/private")]
+ bld.env['RPATH'] = []
+ if bld.env.RPATH_ON_BUILD:
+ return rpaths
+ for rpath in rpaths:
+ ADD_LD_LIBRARY_PATH(rpath)
+ return []
+
+
+@conf
+def LOCAL_CACHE(ctx, name):
+ '''return a named build cache dictionary, used to store
+ state inside other functions'''
+ if name in ctx.env:
+ return ctx.env[name]
+ ctx.env[name] = {}
+ return ctx.env[name]
+
+
+@conf
+def LOCAL_CACHE_SET(ctx, cachename, key, value):
+ '''set a value in a local cache'''
+ cache = LOCAL_CACHE(ctx, cachename)
+ cache[key] = value
+
+
+@conf
+def ASSERT(ctx, expression, msg):
+ '''a build assert call'''
+ if not expression:
+ raise Errors.WafError("ERROR: %s\n" % msg)
+Build.BuildContext.ASSERT = ASSERT
+
+
+def SUBDIR(bld, subdir, list):
+ '''create a list of files by pre-pending each with a subdir name'''
+ ret = ''
+ for l in TO_LIST(list):
+ ret = ret + os.path.normpath(os.path.join(subdir, l)) + ' '
+ return ret
+Build.BuildContext.SUBDIR = SUBDIR
+
+
+def dict_concat(d1, d2):
+ '''concatenate two dictionaries d1 += d2'''
+ for t in d2:
+ if t not in d1:
+ d1[t] = d2[t]
+
+def ADD_COMMAND(opt, name, function):
+ '''add a new top level command to waf'''
+ Context.g_module.__dict__[name] = function
+ opt.name = function
+Options.OptionsContext.ADD_COMMAND = ADD_COMMAND
+
+
+@feature('c', 'cc', 'cshlib', 'cprogram')
+@before('apply_core','exec_rule')
+def process_depends_on(self):
+ '''The new depends_on attribute for build rules
+ allow us to specify a dependency on output from
+ a source generation rule'''
+ if getattr(self , 'depends_on', None):
+ lst = self.to_list(self.depends_on)
+ for x in lst:
+ y = self.bld.get_tgen_by_name(x)
+ self.bld.ASSERT(y is not None, "Failed to find dependency %s of %s" % (x, self.name))
+ y.post()
+ if getattr(y, 'more_includes', None):
+ self.includes += " " + y.more_includes
+
+
+def unique_list(seq):
+ '''return a uniquified list in the same order as the existing list'''
+ seen = {}
+ result = []
+ for item in seq:
+ if item in seen: continue
+ seen[item] = True
+ result.append(item)
+ return result
+
+
+def TO_LIST(str, delimiter=None):
+ '''Split a list, preserving quoted strings and existing lists'''
+ if str is None:
+ return []
+ if isinstance(str, list):
+ # we need to return a new independent list...
+ return list(str)
+ if len(str) == 0:
+ return []
+ lst = str.split(delimiter)
+ # the string may have had quotes in it, now we
+ # check if we did have quotes, and use the slower shlex
+ # if we need to
+ for e in lst:
+ if e[0] == '"':
+ return shlex.split(str)
+ return lst
+
+
+def subst_vars_error(string, env):
+ '''substitute vars, throw an error if a variable is not defined'''
+ lst = re.split(r'(\$\{\w+\})', string)
+ out = []
+ for v in lst:
+ if re.match(r'\$\{\w+\}', v):
+ vname = v[2:-1]
+ if not vname in env:
+ raise KeyError("Failed to find variable %s in %s in env %s <%s>" % (vname, string, env.__class__, str(env)))
+ v = env[vname]
+ if isinstance(v, list):
+ v = ' '.join(v)
+ out.append(v)
+ return ''.join(out)
+
+
+@conf
+def SUBST_ENV_VAR(ctx, varname):
+ '''Substitute an environment variable for any embedded variables'''
+ return subst_vars_error(ctx.env[varname], ctx.env)
+Build.BuildContext.SUBST_ENV_VAR = SUBST_ENV_VAR
+
+
+def recursive_dirlist(dir, relbase, pattern=None):
+ '''recursive directory list'''
+ ret = []
+ for f in os.listdir(dir):
+ f2 = dir + '/' + f
+ if os.path.isdir(f2):
+ ret.extend(recursive_dirlist(f2, relbase))
+ else:
+ if pattern and not fnmatch.fnmatch(f, pattern):
+ continue
+ ret.append(os.path.relpath(f2, relbase))
+ return ret
+
+
+def symlink(src, dst, force=True):
+ """Can create symlink by force"""
+ try:
+ os.symlink(src, dst)
+ except OSError as exc:
+ if exc.errno == errno.EEXIST and force:
+ os.remove(dst)
+ os.symlink(src, dst)
+ else:
+ raise
+
+
+def mkdir_p(dir):
+ '''like mkdir -p'''
+ if not dir:
+ return
+ if dir.endswith("/"):
+ mkdir_p(dir[:-1])
+ return
+ if os.path.isdir(dir):
+ return
+ mkdir_p(os.path.dirname(dir))
+ os.mkdir(dir)
+
+
+def SUBST_VARS_RECURSIVE(string, env):
+ '''recursively expand variables'''
+ if string is None:
+ return string
+ limit=100
+ while (string.find('${') != -1 and limit > 0):
+ string = subst_vars_error(string, env)
+ limit -= 1
+ return string
+
+
+@conf
+def EXPAND_VARIABLES(ctx, varstr, vars=None):
+ '''expand variables from a user supplied dictionary
+
+ This is most useful when you pass vars=locals() to expand
+ all your local variables in strings
+ '''
+
+ if isinstance(varstr, list):
+ ret = []
+ for s in varstr:
+ ret.append(EXPAND_VARIABLES(ctx, s, vars=vars))
+ return ret
+
+ if not isinstance(varstr, str):
+ return varstr
+
+ env = ConfigSet.ConfigSet()
+ ret = varstr
+ # substitute on user supplied dict if available
+ if vars is not None:
+ for v in vars.keys():
+ env[v] = vars[v]
+ ret = SUBST_VARS_RECURSIVE(ret, env)
+
+ # if anything left, subst on the environment as well
+ if ret.find('${') != -1:
+ ret = SUBST_VARS_RECURSIVE(ret, ctx.env)
+ # make sure there is nothing left. Also check for the common
+ # typo of $( instead of ${
+ if ret.find('${') != -1 or ret.find('$(') != -1:
+ Logs.error('Failed to substitute all variables in varstr=%s' % ret)
+ sys.exit(1)
+ return ret
+Build.BuildContext.EXPAND_VARIABLES = EXPAND_VARIABLES
+
+
+def RUN_COMMAND(cmd,
+ env=None,
+ shell=False):
+ '''run a external command, return exit code or signal'''
+ if env:
+ cmd = SUBST_VARS_RECURSIVE(cmd, env)
+
+ status = os.system(cmd)
+ if os.WIFEXITED(status):
+ return os.WEXITSTATUS(status)
+ if os.WIFSIGNALED(status):
+ return - os.WTERMSIG(status)
+ Logs.error("Unknown exit reason %d for command: %s" % (status, cmd))
+ return -1
+
+
+def RUN_PYTHON_TESTS(testfiles, pythonpath=None, extra_env=None):
+ env = LOAD_ENVIRONMENT()
+ if pythonpath is None:
+ pythonpath = os.path.join(Context.g_module.out, 'python')
+ result = 0
+ for interp in env.python_interpreters:
+ if not isinstance(interp, str):
+ interp = ' '.join(interp)
+ for testfile in testfiles:
+ cmd = "PYTHONPATH=%s %s %s" % (pythonpath, interp, testfile)
+ if extra_env:
+ for key, value in extra_env.items():
+ cmd = "%s=%s %s" % (key, value, cmd)
+ print('Running Python test with %s: %s' % (interp, testfile))
+ ret = RUN_COMMAND(cmd)
+ if ret:
+ print('Python test failed: %s' % cmd)
+ result = ret
+ return result
+
+
+# make sure we have md5. some systems don't have it
+try:
+ from hashlib import md5
+ # Even if hashlib.md5 exists, it may be unusable.
+ # Try to use MD5 function. In FIPS mode this will cause an exception
+ # and we'll get to the replacement code
+ foo = md5(b'abcd')
+except:
+ try:
+ import md5
+ # repeat the same check here, mere success of import is not enough.
+ # Try to use MD5 function. In FIPS mode this will cause an exception
+ foo = md5.md5(b'abcd')
+ except:
+ Context.SIG_NIL = hash('abcd')
+ class replace_md5(object):
+ def __init__(self):
+ self.val = None
+ def update(self, val):
+ self.val = hash((self.val, val))
+ def digest(self):
+ return str(self.val)
+ def hexdigest(self):
+ return self.digest().encode('hex')
+ def replace_h_file(filename):
+ f = open(filename, 'rb')
+ m = replace_md5()
+ while (filename):
+ filename = f.read(100000)
+ m.update(filename)
+ f.close()
+ return m.digest()
+ Utils.md5 = replace_md5
+ Task.md5 = replace_md5
+ Utils.h_file = replace_h_file
+
+
+def LOAD_ENVIRONMENT():
+ '''load the configuration environment, allowing access to env vars
+ from new commands'''
+ env = ConfigSet.ConfigSet()
+ try:
+ p = os.path.join(Context.g_module.out, 'c4che/default'+CACHE_SUFFIX)
+ env.load(p)
+ except (OSError, IOError):
+ pass
+ return env
+
+
+def IS_NEWER(bld, file1, file2):
+ '''return True if file1 is newer than file2'''
+ curdir = bld.path.abspath()
+ t1 = os.stat(os.path.join(curdir, file1)).st_mtime
+ t2 = os.stat(os.path.join(curdir, file2)).st_mtime
+ return t1 > t2
+Build.BuildContext.IS_NEWER = IS_NEWER
+
+
+@conf
+def RECURSE(ctx, directory):
+ '''recurse into a directory, relative to the curdir or top level'''
+ try:
+ visited_dirs = ctx.visited_dirs
+ except AttributeError:
+ visited_dirs = ctx.visited_dirs = set()
+ d = os.path.join(ctx.path.abspath(), directory)
+ if os.path.exists(d):
+ abspath = os.path.abspath(d)
+ else:
+ abspath = os.path.abspath(os.path.join(Context.g_module.top, directory))
+ ctxclass = ctx.__class__.__name__
+ key = ctxclass + ':' + abspath
+ if key in visited_dirs:
+ # already done it
+ return
+ visited_dirs.add(key)
+ relpath = os.path.relpath(abspath, ctx.path.abspath())
+ if ctxclass in ['OptionsContext',
+ 'ConfigurationContext',
+ 'BuildContext',
+ 'CleanContext',
+ 'InstallContext',
+ 'UninstallContext',
+ 'ListContext']:
+ return ctx.recurse(relpath)
+ if 'waflib.extras.compat15' in sys.modules:
+ return ctx.recurse(relpath)
+ raise Errors.WafError('Unknown RECURSE context class: {}'.format(ctxclass))
+Options.OptionsContext.RECURSE = RECURSE
+Build.BuildContext.RECURSE = RECURSE
+
+
+def CHECK_MAKEFLAGS(options):
+ '''check for MAKEFLAGS environment variable in case we are being
+ called from a Makefile try to honor a few make command line flags'''
+ if not 'WAF_MAKE' in os.environ:
+ return
+ makeflags = os.environ.get('MAKEFLAGS')
+ if makeflags is None:
+ makeflags = ""
+ jobs_set = False
+ jobs = None
+ # we need to use shlex.split to cope with the escaping of spaces
+ # in makeflags
+ for opt in shlex.split(makeflags):
+ # options can come either as -x or as x
+ if opt[0:2] == 'V=':
+ options.verbose = Logs.verbose = int(opt[2:])
+ if Logs.verbose > 0:
+ Logs.zones = ['runner']
+ if Logs.verbose > 2:
+ Logs.zones = ['*']
+ elif opt[0].isupper() and opt.find('=') != -1:
+ # this allows us to set waf options on the make command line
+ # for example, if you do "make FOO=blah", then we set the
+ # option 'FOO' in Options.options, to blah. If you look in wafsamba/wscript
+ # you will see that the command line accessible options have their dest=
+ # set to uppercase, to allow for passing of options from make in this way
+ # this is also how "make test TESTS=testpattern" works, and
+ # "make VERBOSE=1" as well as things like "make SYMBOLCHECK=1"
+ loc = opt.find('=')
+ setattr(options, opt[0:loc], opt[loc+1:])
+ elif opt[0] != '-':
+ for v in opt:
+ if re.search(r'j[0-9]*$', v):
+ jobs_set = True
+ jobs = opt.strip('j')
+ elif v == 'k':
+ options.keep = True
+ elif re.search(r'-j[0-9]*$', opt):
+ jobs_set = True
+ jobs = opt.strip('-j')
+ elif opt == '-k':
+ options.keep = True
+ if not jobs_set:
+ # default to one job
+ options.jobs = 1
+ elif jobs_set and jobs:
+ options.jobs = int(jobs)
+
+waflib_options_parse_cmd_args = Options.OptionsContext.parse_cmd_args
+def wafsamba_options_parse_cmd_args(self, _args=None, cwd=None, allow_unknown=False):
+ (options, commands, envvars) = \
+ waflib_options_parse_cmd_args(self,
+ _args=_args,
+ cwd=cwd,
+ allow_unknown=allow_unknown)
+ CHECK_MAKEFLAGS(options)
+ if options.jobs == 1:
+ #
+ # waflib.Runner.Parallel processes jobs inline if the possible number
+ # of jobs is just 1. But (at least in waf <= 2.0.12) it still calls
+ # create a waflib.Runner.Spawner() which creates a single
+ # waflib.Runner.Consumer() thread that tries to process jobs from the
+ # queue.
+ #
+ # This has strange effects, which are not noticed typically,
+ # but at least on AIX python has broken threading and fails
+ # in random ways.
+ #
+ # So we just add a dummy Spawner class.
+ class NoOpSpawner(object):
+ def __init__(self, master):
+ return
+ from waflib import Runner
+ Runner.Spawner = NoOpSpawner
+ return options, commands, envvars
+Options.OptionsContext.parse_cmd_args = wafsamba_options_parse_cmd_args
+
+option_groups = {}
+
+def option_group(opt, name):
+ '''find or create an option group'''
+ global option_groups
+ if name in option_groups:
+ return option_groups[name]
+ gr = opt.add_option_group(name)
+ option_groups[name] = gr
+ return gr
+Options.OptionsContext.option_group = option_group
+
+
+def save_file(filename, contents, create_dir=False):
+ '''save data to a file'''
+ if create_dir:
+ mkdir_p(os.path.dirname(filename))
+ try:
+ f = open(filename, 'w')
+ f.write(contents)
+ f.close()
+ except:
+ return False
+ return True
+
+
+def load_file(filename):
+ '''return contents of a file'''
+ try:
+ f = open(filename, 'r')
+ r = f.read()
+ f.close()
+ except:
+ return None
+ return r
+
+
+def reconfigure(ctx):
+ '''rerun configure if necessary'''
+ if not os.path.exists(os.environ.get('WAFLOCK', '.lock-wscript')):
+ raise Errors.WafError('configure has not been run')
+ import samba_wildcard
+ bld = samba_wildcard.fake_build_environment()
+ Configure.autoconfig = True
+ Scripting.check_configured(bld)
+
+
+def map_shlib_extension(ctx, name, python=False):
+ '''map a filename with a shared library extension of .so to the real shlib name'''
+ if name is None:
+ return None
+ if name[-1:].isdigit():
+ # some libraries have specified versions in the wscript rule
+ return name
+ (root1, ext1) = os.path.splitext(name)
+ if python:
+ return ctx.env.pyext_PATTERN % root1
+ else:
+ (root2, ext2) = os.path.splitext(ctx.env.cshlib_PATTERN)
+ return root1+ext2
+Build.BuildContext.map_shlib_extension = map_shlib_extension
+
+def apply_pattern(filename, pattern):
+ '''apply a filename pattern to a filename that may have a directory component'''
+ dirname = os.path.dirname(filename)
+ if not dirname:
+ return pattern % filename
+ basename = os.path.basename(filename)
+ return os.path.join(dirname, pattern % basename)
+
+def make_libname(ctx, name, nolibprefix=False, version=None, python=False):
+ """make a library filename
+ Options:
+ nolibprefix: don't include the lib prefix
+ version : add a version number
+ python : if we should use python module name conventions"""
+
+ if python:
+ libname = apply_pattern(name, ctx.env.pyext_PATTERN)
+ else:
+ libname = apply_pattern(name, ctx.env.cshlib_PATTERN)
+ if nolibprefix and libname[0:3] == 'lib':
+ libname = libname[3:]
+ if version:
+ if version[0] == '.':
+ version = version[1:]
+ (root, ext) = os.path.splitext(libname)
+ if ext == ".dylib":
+ # special case - version goes before the prefix
+ libname = "%s.%s%s" % (root, version, ext)
+ else:
+ libname = "%s%s.%s" % (root, ext, version)
+ return libname
+Build.BuildContext.make_libname = make_libname
+
+
+def get_tgt_list(bld):
+ '''return a list of build objects for samba'''
+
+ targets = LOCAL_CACHE(bld, 'TARGET_TYPE')
+
+ # build a list of task generators we are interested in
+ tgt_list = []
+ for tgt in targets:
+ type = targets[tgt]
+ if not type in ['SUBSYSTEM', 'BUILTIN', 'MODULE', 'BINARY', 'LIBRARY', 'PLUGIN', 'ASN1', 'PYTHON']:
+ continue
+ t = bld.get_tgen_by_name(tgt)
+ if t is None:
+ Logs.error("Target %s of type %s has no task generator" % (tgt, type))
+ sys.exit(1)
+ tgt_list.append(t)
+ return tgt_list
+
+from waflib.Context import WSCRIPT_FILE
+def PROCESS_SEPARATE_RULE(self, rule):
+ ''' cause waf to process additional script based on `rule'.
+ You should have file named wscript_<stage>_rule in the current directory
+ where stage is either 'configure' or 'build'
+ '''
+ stage = ''
+ if isinstance(self, Configure.ConfigurationContext):
+ stage = 'configure'
+ elif isinstance(self, Build.BuildContext):
+ stage = 'build'
+ file_path = os.path.join(self.path.abspath(), WSCRIPT_FILE+'_'+stage+'_'+rule)
+ node = self.root.find_node(file_path)
+ if node:
+ try:
+ cache = self.recurse_cache
+ except AttributeError:
+ cache = self.recurse_cache = {}
+ if node not in cache:
+ cache[node] = True
+ self.pre_recurse(node)
+ try:
+ function_code = node.read('r', None)
+ exec(compile(function_code, node.abspath(), 'exec'), self.exec_dict)
+ finally:
+ self.post_recurse(node)
+
+Build.BuildContext.PROCESS_SEPARATE_RULE = PROCESS_SEPARATE_RULE
+ConfigurationContext.PROCESS_SEPARATE_RULE = PROCESS_SEPARATE_RULE
+
+def AD_DC_BUILD_IS_ENABLED(self):
+ if self.CONFIG_SET('AD_DC_BUILD_IS_ENABLED'):
+ return True
+ return False
+
+Build.BuildContext.AD_DC_BUILD_IS_ENABLED = AD_DC_BUILD_IS_ENABLED
+
+@feature('cprogram', 'cshlib', 'cstaticlib')
+@after('apply_lib_vars')
+@before('apply_obj_vars')
+def samba_before_apply_obj_vars(self):
+ """before apply_obj_vars for uselib, this removes the standard paths"""
+
+ def is_standard_libpath(env, path):
+ normalized_path = os.path.normpath(path)
+ for _path in env.STANDARD_LIBPATH:
+ if _path == normalized_path:
+ return True
+ return False
+
+ v = self.env
+
+ for i in v['RPATH']:
+ if is_standard_libpath(v, i):
+ v['RPATH'].remove(i)
+
+ for i in v['LIBPATH']:
+ if is_standard_libpath(v, i):
+ v['LIBPATH'].remove(i)
+
+# Samba options are mostly on by default (administrators and packagers
+# specify features to remove, not add), which is why default=True
+
+def samba_add_onoff_option(opt, option, help=(), dest=None, default=True,
+ with_name="with", without_name="without"):
+ if default is None:
+ default_str = "auto"
+ elif default is True:
+ default_str = "yes"
+ elif default is False:
+ default_str = "no"
+ else:
+ default_str = str(default)
+
+ if help == ():
+ help = ("Build with %s support (default=%s)" % (option, default_str))
+ if dest is None:
+ dest = "with_%s" % option.replace('-', '_')
+
+ with_val = "--%s-%s" % (with_name, option)
+ without_val = "--%s-%s" % (without_name, option)
+
+ opt.add_option(with_val, help=help, action="store_true", dest=dest,
+ default=default)
+ opt.add_option(without_val, help=SUPPRESS_HELP, action="store_false",
+ dest=dest)
+Options.OptionsContext.samba_add_onoff_option = samba_add_onoff_option
diff --git a/buildtools/wafsamba/samba_version.py b/buildtools/wafsamba/samba_version.py
new file mode 100644
index 0000000..31103e0
--- /dev/null
+++ b/buildtools/wafsamba/samba_version.py
@@ -0,0 +1,268 @@
+import os, sys
+from waflib import Utils, Context
+import samba_utils
+from samba_git import find_git
+
+def git_version_summary(path, env=None):
+ git = find_git(env)
+
+ if git is None:
+ return ("GIT-UNKNOWN", {})
+
+ env.GIT = git
+
+ environ = dict(os.environ)
+ environ["GIT_DIR"] = '%s/.git' % path
+ environ["GIT_WORK_TREE"] = path
+ git = samba_utils.get_string(Utils.cmd_output(env.GIT + ' show --pretty=format:"%h%n%ct%n%H%n%cd" --stat HEAD', silent=True, env=environ))
+
+ lines = git.splitlines()
+ if not lines or len(lines) < 4:
+ return ("GIT-UNKNOWN", {})
+
+ fields = {
+ "GIT_COMMIT_ABBREV": lines[0],
+ "GIT_COMMIT_FULLREV": lines[2],
+ "COMMIT_TIME": int(lines[1]),
+ "COMMIT_DATE": lines[3],
+ }
+
+ ret = "GIT-" + fields["GIT_COMMIT_ABBREV"]
+
+ if env.GIT_LOCAL_CHANGES:
+ clean = Utils.cmd_output('%s diff HEAD | wc -l' % env.GIT, silent=True).strip()
+ if clean == "0":
+ fields["COMMIT_IS_CLEAN"] = 1
+ else:
+ fields["COMMIT_IS_CLEAN"] = 0
+ ret += "+"
+
+ return (ret, fields)
+
+
+def distversion_version_summary(path):
+ #get version from .distversion file
+ suffix = None
+ fields = {}
+
+ for line in Utils.readf(path + '/.distversion').splitlines():
+ if line == '':
+ continue
+ if line.startswith("#"):
+ continue
+ try:
+ split_line = line.split("=")
+ if split_line[1] != "":
+ key = split_line[0]
+ value = split_line[1]
+ if key == "SUFFIX":
+ suffix = value
+ continue
+ fields[key] = value
+ except:
+ print("Failed to parse line %s from .distversion file." % (line))
+ raise
+
+ if "COMMIT_TIME" in fields:
+ fields["COMMIT_TIME"] = int(fields["COMMIT_TIME"])
+
+ if suffix is None:
+ return ("UNKNOWN", fields)
+
+ return (suffix, fields)
+
+
+class SambaVersion(object):
+
+ def __init__(self, version_dict, path, env=None, is_install=True):
+ '''Determine the version number of samba
+
+See VERSION for the format. Entries on that file are
+also accepted as dictionary entries here
+ '''
+
+ self.MAJOR=None
+ self.MINOR=None
+ self.RELEASE=None
+ self.REVISION=None
+ self.TP_RELEASE=None
+ self.ALPHA_RELEASE=None
+ self.BETA_RELEASE=None
+ self.PRE_RELEASE=None
+ self.RC_RELEASE=None
+ self.IS_SNAPSHOT=True
+ self.RELEASE_NICKNAME=None
+ self.VENDOR_SUFFIX=None
+ self.VENDOR_PATCH=None
+
+ for a, b in version_dict.items():
+ if a.startswith("SAMBA_VERSION_"):
+ setattr(self, a[14:], b)
+ else:
+ setattr(self, a, b)
+
+ if self.IS_GIT_SNAPSHOT == "yes":
+ self.IS_SNAPSHOT=True
+ elif self.IS_GIT_SNAPSHOT == "no":
+ self.IS_SNAPSHOT=False
+ else:
+ raise Exception("Unknown value for IS_GIT_SNAPSHOT: %s" % self.IS_GIT_SNAPSHOT)
+
+ ##
+ ## start with "3.0.22"
+ ##
+ self.MAJOR=int(self.MAJOR)
+ self.MINOR=int(self.MINOR)
+ self.RELEASE=int(self.RELEASE)
+
+ SAMBA_VERSION_STRING = ("%u.%u.%u" % (self.MAJOR, self.MINOR, self.RELEASE))
+
+##
+## maybe add "3.0.22a" or "4.0.0tp11" or "4.0.0alpha1" or "4.0.0beta1" or "3.0.22pre1" or "3.0.22rc1"
+## We do not do pre or rc version on patch/letter releases
+##
+ if self.REVISION is not None:
+ SAMBA_VERSION_STRING += self.REVISION
+ if self.TP_RELEASE is not None:
+ self.TP_RELEASE = int(self.TP_RELEASE)
+ SAMBA_VERSION_STRING += "tp%u" % self.TP_RELEASE
+ if self.ALPHA_RELEASE is not None:
+ self.ALPHA_RELEASE = int(self.ALPHA_RELEASE)
+ SAMBA_VERSION_STRING += ("alpha%u" % self.ALPHA_RELEASE)
+ if self.BETA_RELEASE is not None:
+ self.BETA_RELEASE = int(self.BETA_RELEASE)
+ SAMBA_VERSION_STRING += ("beta%u" % self.BETA_RELEASE)
+ if self.PRE_RELEASE is not None:
+ self.PRE_RELEASE = int(self.PRE_RELEASE)
+ SAMBA_VERSION_STRING += ("pre%u" % self.PRE_RELEASE)
+ if self.RC_RELEASE is not None:
+ self.RC_RELEASE = int(self.RC_RELEASE)
+ SAMBA_VERSION_STRING += ("rc%u" % self.RC_RELEASE)
+
+ if self.IS_SNAPSHOT:
+ if not is_install:
+ suffix = "DEVELOPERBUILD"
+ self.vcs_fields = {}
+ elif os.path.exists(os.path.join(path, ".git")):
+ suffix, self.vcs_fields = git_version_summary(path, env=env)
+ elif os.path.exists(os.path.join(path, ".distversion")):
+ suffix, self.vcs_fields = distversion_version_summary(path)
+ else:
+ suffix = "UNKNOWN"
+ self.vcs_fields = {}
+ self.vcs_fields["SUFFIX"] = suffix
+ SAMBA_VERSION_STRING += "-" + suffix
+ else:
+ self.vcs_fields = {}
+
+ self.OFFICIAL_STRING = SAMBA_VERSION_STRING
+
+ if self.VENDOR_SUFFIX is not None:
+ SAMBA_VERSION_STRING += ("-" + self.VENDOR_SUFFIX)
+ self.VENDOR_SUFFIX = self.VENDOR_SUFFIX
+
+ if self.VENDOR_PATCH is not None:
+ SAMBA_VERSION_STRING += ("-" + self.VENDOR_PATCH)
+ self.VENDOR_PATCH = self.VENDOR_PATCH
+
+ self.STRING = SAMBA_VERSION_STRING
+
+ if self.RELEASE_NICKNAME is not None:
+ self.STRING_WITH_NICKNAME = "%s (%s)" % (self.STRING, self.RELEASE_NICKNAME)
+ else:
+ self.STRING_WITH_NICKNAME = self.STRING
+
+ def __str__(self):
+ string="/* Autogenerated by waf */\n" +\
+ "#define SAMBA_COPYRIGHT_STRING \"%s\"\n" % self.SAMBA_COPYRIGHT_STRING +\
+ "#define SAMBA_VERSION_MAJOR %u\n" % self.MAJOR +\
+ "#define SAMBA_VERSION_MINOR %u\n" % self.MINOR +\
+ "#define SAMBA_VERSION_RELEASE %u\n" % self.RELEASE
+ if self.REVISION is not None:
+ string+="#define SAMBA_VERSION_REVISION %u\n" % self.REVISION
+
+ if self.TP_RELEASE is not None:
+ string+="#define SAMBA_VERSION_TP_RELEASE %u\n" % self.TP_RELEASE
+
+ if self.ALPHA_RELEASE is not None:
+ string+="#define SAMBA_VERSION_ALPHA_RELEASE %u\n" % self.ALPHA_RELEASE
+
+ if self.BETA_RELEASE is not None:
+ string+="#define SAMBA_VERSION_BETA_RELEASE %u\n" % self.BETA_RELEASE
+
+ if self.PRE_RELEASE is not None:
+ string+="#define SAMBA_VERSION_PRE_RELEASE %u\n" % self.PRE_RELEASE
+
+ if self.RC_RELEASE is not None:
+ string+="#define SAMBA_VERSION_RC_RELEASE %u\n" % self.RC_RELEASE
+
+ for name in sorted(self.vcs_fields.keys()):
+ string+="#define SAMBA_VERSION_%s " % name
+ value = self.vcs_fields[name]
+ string_types = str
+ if sys.version_info[0] < 3:
+ string_types = basestring
+ if isinstance(value, string_types):
+ string += "\"%s\"" % value
+ elif type(value) is int:
+ string += "%d" % value
+ else:
+ raise Exception("Unknown type for %s: %r" % (name, value))
+ string += "\n"
+
+ string+="#define SAMBA_VERSION_OFFICIAL_STRING \"" + self.OFFICIAL_STRING + "\"\n"
+
+ if self.VENDOR_SUFFIX is not None:
+ string+="#define SAMBA_VERSION_VENDOR_SUFFIX " + self.VENDOR_SUFFIX + "\n"
+ if self.VENDOR_PATCH is not None:
+ string+="#define SAMBA_VERSION_VENDOR_PATCH " + self.VENDOR_PATCH + "\n"
+
+ if self.RELEASE_NICKNAME is not None:
+ string+="#define SAMBA_VERSION_RELEASE_NICKNAME " + self.RELEASE_NICKNAME + "\n"
+
+ # We need to put this #ifdef in to the headers so that vendors can override the version with a function
+ string+='''
+#ifdef SAMBA_VERSION_VENDOR_FUNCTION
+# define SAMBA_VERSION_STRING SAMBA_VERSION_VENDOR_FUNCTION
+#else /* SAMBA_VERSION_VENDOR_FUNCTION */
+# define SAMBA_VERSION_STRING "''' + self.STRING_WITH_NICKNAME + '''"
+#endif
+'''
+ string+="/* Version for mkrelease.sh: \nSAMBA_VERSION_STRING=" + self.STRING_WITH_NICKNAME + "\n */\n"
+
+ return string
+
+
+def samba_version_file(version_file, path, env=None, is_install=True):
+ '''Parse the version information from a VERSION file'''
+
+ with open(version_file, 'r') as f:
+ version_dict = {}
+ for line in f:
+ line = line.strip()
+ if line == '':
+ continue
+ if line.startswith("#"):
+ continue
+ try:
+ split_line = line.split("=")
+ if split_line[1] != "":
+ value = split_line[1].strip('"')
+ version_dict[split_line[0]] = value
+ except:
+ print("Failed to parse line %s from %s" % (line, version_file))
+ raise
+
+ return SambaVersion(version_dict, path, env=env, is_install=is_install)
+
+
+
+def load_version(env=None, is_install=True):
+ '''load samba versions either from ./VERSION or git
+ return a version object for detailed breakdown'''
+ if not env:
+ env = samba_utils.LOAD_ENVIRONMENT()
+
+ version = samba_version_file("./VERSION", ".", env, is_install=is_install)
+ Context.g_module.VERSION = version.STRING
+ return version
diff --git a/buildtools/wafsamba/samba_waf18.py b/buildtools/wafsamba/samba_waf18.py
new file mode 100644
index 0000000..54444b3
--- /dev/null
+++ b/buildtools/wafsamba/samba_waf18.py
@@ -0,0 +1,433 @@
+# compatibility layer for building with more recent waf versions
+
+import os, shlex, sys
+from waflib import Build, Configure, Node, Utils, Options, Logs, TaskGen
+from waflib import ConfigSet
+from waflib.TaskGen import feature, after
+from waflib.Configure import conf, ConfigurationContext
+
+from waflib.Tools.flex import decide_ext
+
+# This version of flexfun runs in tsk.get_cwd() as opposed to the
+# bld.variant_dir: since input paths adjusted against tsk.get_cwd(), we have to
+# use tsk.get_cwd() for the work directory as well.
+def flexfun(tsk):
+ env = tsk.env
+ bld = tsk.generator.bld
+ def to_list(xx):
+ if isinstance(xx, str):
+ return [xx]
+ return xx
+ tsk.last_cmd = lst = []
+ lst.extend(to_list(env.FLEX))
+ lst.extend(to_list(env.FLEXFLAGS))
+ inputs = [a.path_from(tsk.get_cwd()) for a in tsk.inputs]
+ if env.FLEX_MSYS:
+ inputs = [x.replace(os.sep, '/') for x in inputs]
+ lst.extend(inputs)
+ lst = [x for x in lst if x]
+ txt = bld.cmd_and_log(lst, cwd=tsk.get_cwd(), env=env.env or None, quiet=0)
+ tsk.outputs[0].write(txt.replace('\r\n', '\n').replace('\r', '\n')) # issue #1207
+
+TaskGen.declare_chain(
+ name = 'flex',
+ rule = flexfun, # issue #854
+ ext_in = '.l',
+ decider = decide_ext,
+)
+
+Build.BuildContext.variant = 'default'
+Build.CleanContext.variant = 'default'
+Build.InstallContext.variant = 'default'
+Build.UninstallContext.variant = 'default'
+Build.ListContext.variant = 'default'
+
+def abspath(self, env=None):
+ if env and hasattr(self, 'children'):
+ return self.get_bld().abspath()
+ return self.old_abspath()
+Node.Node.old_abspath = Node.Node.abspath
+Node.Node.abspath = abspath
+
+def bldpath(self, env=None):
+ return self.abspath()
+ #return self.path_from(self.ctx.bldnode.parent)
+Node.Node.bldpath = bldpath
+
+def srcpath(self, env=None):
+ return self.abspath()
+ #return self.path_from(self.ctx.bldnode.parent)
+Node.Node.srcpath = srcpath
+
+def store_fast(self, filename):
+ file = open(filename, 'wb')
+ data = self.get_merged_dict()
+ try:
+ Build.cPickle.dump(data, file, -1)
+ finally:
+ file.close()
+ConfigSet.ConfigSet.store_fast = store_fast
+
+def load_fast(self, filename):
+ file = open(filename, 'rb')
+ try:
+ data = Build.cPickle.load(file)
+ finally:
+ file.close()
+ self.table.update(data)
+ConfigSet.ConfigSet.load_fast = load_fast
+
+@feature('c', 'cxx', 'd', 'asm', 'fc', 'includes')
+@after('propagate_uselib_vars', 'process_source')
+def apply_incpaths(self):
+ lst = self.to_incnodes(self.to_list(getattr(self, 'includes', [])) + self.env['INCLUDES'])
+ self.includes_nodes = lst
+ cwdx = getattr(self.bld, 'cwdx', self.bld.bldnode)
+ self.env['INCPATHS'] = [x.path_from(cwdx) for x in lst]
+
+@conf
+def define(self, key, val, quote=True, comment=None):
+ assert key and isinstance(key, str)
+
+ if val is None:
+ val = ()
+ elif isinstance(val, bool):
+ val = int(val)
+
+ # waf 1.5
+ self.env[key] = val
+
+ if isinstance(val, int) or isinstance(val, float):
+ s = '%s=%s'
+ else:
+ s = quote and '%s="%s"' or '%s=%s'
+ app = s % (key, str(val))
+
+ ban = key + '='
+ lst = self.env.DEFINES
+ for x in lst:
+ if x.startswith(ban):
+ lst[lst.index(x)] = app
+ break
+ else:
+ self.env.append_value('DEFINES', app)
+
+ self.env.append_unique('define_key', key)
+
+# compat15 removes this but we want to keep it
+@conf
+def undefine(self, key, from_env=True, comment=None):
+ assert key and isinstance(key, str)
+
+ ban = key + '='
+ self.env.DEFINES = [x for x in self.env.DEFINES if not x.startswith(ban)]
+ self.env.append_unique('define_key', key)
+ # waf 1.5
+ if from_env:
+ self.env[key] = ()
+
+class ConfigurationContext(Configure.ConfigurationContext):
+ def init_dirs(self):
+ self.setenv('default')
+ self.env.merge_config_header = True
+ return super(ConfigurationContext, self).init_dirs()
+
+def find_program_samba(self, *k, **kw):
+ # Override the waf default set in the @conf decorator in Configure.py
+ if 'mandatory' not in kw:
+ kw['mandatory'] = False
+ ret = self.find_program_old(*k, **kw)
+ return ret
+Configure.ConfigurationContext.find_program_old = Configure.ConfigurationContext.find_program
+Configure.ConfigurationContext.find_program = find_program_samba
+
+Build.BuildContext.ENFORCE_GROUP_ORDERING = Utils.nada
+Build.BuildContext.AUTOCLEANUP_STALE_FILES = Utils.nada
+
+@conf
+def check(self, *k, **kw):
+ '''Override the waf defaults to inject --with-directory options'''
+
+ # match the configuration test with specific options, for example:
+ # --with-libiconv -> Options.options.iconv_open -> "Checking for library iconv"
+ self.validate_c(kw)
+
+ additional_dirs = []
+ if 'msg' in kw:
+ msg = kw['msg']
+ for x in Options.OptionsContext.parser.parser.option_list:
+ if getattr(x, 'match', None) and msg in x.match:
+ d = getattr(Options.options, x.dest, '')
+ if d:
+ additional_dirs.append(d)
+
+ # we add the additional dirs twice: once for the test data, and again if the compilation test succeeds below
+ def add_options_dir(dirs, env):
+ for x in dirs:
+ if not x in env.CPPPATH:
+ env.CPPPATH = [os.path.join(x, 'include')] + env.CPPPATH
+ if not x in env.LIBPATH:
+ env.LIBPATH = [os.path.join(x, 'lib')] + env.LIBPATH
+
+ add_options_dir(additional_dirs, kw['env'])
+
+ self.start_msg(kw['msg'], **kw)
+ ret = None
+ try:
+ ret = self.run_build(*k, **kw)
+ except self.errors.ConfigurationError:
+ self.end_msg(kw['errmsg'], 'YELLOW', **kw)
+ if Logs.verbose > 1:
+ raise
+ else:
+ self.fatal('The configuration failed')
+ else:
+ kw['success'] = ret
+ # success! time for brandy
+ add_options_dir(additional_dirs, self.env)
+
+ ret = self.post_check(*k, **kw)
+ if not ret:
+ self.end_msg(kw['errmsg'], 'YELLOW', **kw)
+ self.fatal('The configuration failed %r' % ret)
+ else:
+ self.end_msg(self.ret_msg(kw['okmsg'], kw), **kw)
+ return ret
+
+@conf
+def CHECK_LIBRARY_SUPPORT(conf, rpath=False, version_script=False, msg=None):
+ '''see if the platform supports building libraries'''
+
+ if msg is None:
+ if rpath:
+ msg = "rpath library support"
+ else:
+ msg = "building library support"
+
+ def build(bld):
+ lib_node = bld.srcnode.make_node('libdir/liblc1.c')
+ lib_node.parent.mkdir()
+ lib_node.write('int lib_func(void) { return 42; }\n', 'w')
+ main_node = bld.srcnode.make_node('main.c')
+ main_node.write('int lib_func(void);\n'
+ 'int main(void) {return !(lib_func() == 42);}', 'w')
+ linkflags = []
+ if version_script:
+ script = bld.srcnode.make_node('ldscript')
+ script.write('TEST_1.0A2 { global: *; };\n', 'w')
+ linkflags.append('-Wl,--version-script=%s' % script.abspath())
+ bld(features='c cshlib', source=lib_node, target='lib1', linkflags=linkflags, name='lib1')
+ o = bld(features='c cprogram', source=main_node, target='prog1', uselib_local='lib1')
+ if rpath:
+ o.rpath = [lib_node.parent.abspath()]
+ def run_app(self):
+ args = conf.SAMBA_CROSS_ARGS(msg=msg)
+ env = dict(os.environ)
+ env['LD_LIBRARY_PATH'] = self.inputs[0].parent.abspath() + os.pathsep + env.get('LD_LIBRARY_PATH', '')
+ self.generator.bld.cmd_and_log([self.inputs[0].abspath()] + args, env=env)
+ o.post()
+ bld(rule=run_app, source=o.link_task.outputs[0])
+
+ # ok, so it builds
+ try:
+ conf.check(build_fun=build, msg='Checking for %s' % msg)
+ except conf.errors.ConfigurationError:
+ return False
+ return True
+
+@conf
+def CHECK_NEED_LC(conf, msg):
+ '''check if we need -lc'''
+ def build(bld):
+ lib_node = bld.srcnode.make_node('libdir/liblc1.c')
+ lib_node.parent.mkdir()
+ lib_node.write('#include <stdio.h>\nint lib_func(void) { FILE *f = fopen("foo", "r");}\n', 'w')
+ bld(features='c cshlib', source=[lib_node], linkflags=conf.env.EXTRA_LDFLAGS, target='liblc')
+ try:
+ conf.check(build_fun=build, msg=msg, okmsg='-lc is unnecessary', errmsg='-lc is necessary')
+ except conf.errors.ConfigurationError:
+ return False
+ return True
+
+# already implemented on "waf -v"
+def order(bld, tgt_list):
+ return True
+Build.BuildContext.check_group_ordering = order
+
+@conf
+def CHECK_CFG(self, *k, **kw):
+ if 'args' in kw:
+ kw['args'] = shlex.split(kw['args'])
+ if not 'mandatory' in kw:
+ kw['mandatory'] = False
+ kw['global_define'] = True
+ return self.check_cfg(*k, **kw)
+
+def cmd_output(cmd, **kw):
+
+ silent = False
+ if 'silent' in kw:
+ silent = kw['silent']
+ del(kw['silent'])
+
+ if 'e' in kw:
+ tmp = kw['e']
+ del(kw['e'])
+ kw['env'] = tmp
+
+ kw['shell'] = isinstance(cmd, str)
+ kw['stdout'] = Utils.subprocess.PIPE
+ if silent:
+ kw['stderr'] = Utils.subprocess.PIPE
+
+ try:
+ p = Utils.subprocess.Popen(cmd, **kw)
+ output = p.communicate()[0]
+ except OSError as e:
+ raise ValueError(str(e))
+
+ if p.returncode:
+ if not silent:
+ msg = "command execution failed: %s -> %r" % (cmd, str(output))
+ raise ValueError(msg)
+ output = ''
+ return output
+Utils.cmd_output = cmd_output
+
+
+@TaskGen.feature('c', 'cxx', 'd')
+@TaskGen.before('apply_incpaths', 'propagate_uselib_vars')
+@TaskGen.after('apply_link', 'process_source')
+def apply_uselib_local(self):
+ """
+ process the uselib_local attribute
+ execute after apply_link because of the execution order set on 'link_task'
+ """
+ env = self.env
+ from waflib.Tools.ccroot import stlink_task
+
+ # 1. the case of the libs defined in the project (visit ancestors first)
+ # the ancestors external libraries (uselib) will be prepended
+ self.uselib = self.to_list(getattr(self, 'uselib', []))
+ self.includes = self.to_list(getattr(self, 'includes', []))
+ names = self.to_list(getattr(self, 'uselib_local', []))
+ get = self.bld.get_tgen_by_name
+ seen = set()
+ seen_uselib = set()
+ tmp = Utils.deque(names) # consume a copy of the list of names
+ if tmp:
+ if Logs.verbose:
+ Logs.warn('compat: "uselib_local" is deprecated, replace by "use"')
+ while tmp:
+ lib_name = tmp.popleft()
+ # visit dependencies only once
+ if lib_name in seen:
+ continue
+
+ y = get(lib_name)
+ y.post()
+ seen.add(lib_name)
+
+ # object has ancestors to process (shared libraries): add them to the end of the list
+ if getattr(y, 'uselib_local', None):
+ for x in self.to_list(getattr(y, 'uselib_local', [])):
+ obj = get(x)
+ obj.post()
+ if getattr(obj, 'link_task', None):
+ if not isinstance(obj.link_task, stlink_task):
+ tmp.append(x)
+
+ # link task and flags
+ if getattr(y, 'link_task', None):
+
+ link_name = y.target[y.target.rfind(os.sep) + 1:]
+ if isinstance(y.link_task, stlink_task):
+ env.append_value('STLIB', [link_name])
+ else:
+ # some linkers can link against programs
+ env.append_value('LIB', [link_name])
+
+ # the order
+ self.link_task.set_run_after(y.link_task)
+
+ # for the recompilation
+ self.link_task.dep_nodes += y.link_task.outputs
+
+ # add the link path too
+ tmp_path = y.link_task.outputs[0].parent.bldpath()
+ if not tmp_path in env['LIBPATH']:
+ env.prepend_value('LIBPATH', [tmp_path])
+
+ # add ancestors uselib too - but only propagate those that have no staticlib defined
+ for v in self.to_list(getattr(y, 'uselib', [])):
+ if v not in seen_uselib:
+ seen_uselib.add(v)
+ if not env['STLIB_' + v]:
+ if not v in self.uselib:
+ self.uselib.insert(0, v)
+
+ # if the library task generator provides 'export_includes', add to the include path
+ # the export_includes must be a list of paths relative to the other library
+ if getattr(y, 'export_includes', None):
+ self.includes.extend(y.to_incnodes(y.export_includes))
+
+@TaskGen.feature('cprogram', 'cxxprogram', 'cstlib', 'cxxstlib', 'cshlib', 'cxxshlib', 'dprogram', 'dstlib', 'dshlib')
+@TaskGen.after('apply_link')
+def apply_objdeps(self):
+ "add the .o files produced by some other object files in the same manner as uselib_local"
+ names = getattr(self, 'add_objects', [])
+ if not names:
+ return
+ names = self.to_list(names)
+
+ get = self.bld.get_tgen_by_name
+ seen = []
+ while names:
+ x = names[0]
+
+ # visit dependencies only once
+ if x in seen:
+ names = names[1:]
+ continue
+
+ # object does not exist ?
+ y = get(x)
+
+ # object has ancestors to process first ? update the list of names
+ if getattr(y, 'add_objects', None):
+ added = 0
+ lst = y.to_list(y.add_objects)
+ lst.reverse()
+ for u in lst:
+ if u in seen:
+ continue
+ added = 1
+ names = [u]+names
+ if added:
+ continue # list of names modified, loop
+
+ # safe to process the current object
+ y.post()
+ seen.append(x)
+
+ for t in getattr(y, 'compiled_tasks', []):
+ self.link_task.inputs.extend(t.outputs)
+
+@TaskGen.after('apply_link')
+def process_obj_files(self):
+ if not hasattr(self, 'obj_files'):
+ return
+ for x in self.obj_files:
+ node = self.path.find_resource(x)
+ self.link_task.inputs.append(node)
+
+@TaskGen.taskgen_method
+def add_obj_file(self, file):
+ """Small example on how to link object files as if they were source
+ obj = bld.create_obj('cc')
+ obj.add_obj_file('foo.o')"""
+ if not hasattr(self, 'obj_files'):
+ self.obj_files = []
+ if not 'process_obj_files' in self.meths:
+ self.meths.append('process_obj_files')
+ self.obj_files.append(file)
diff --git a/buildtools/wafsamba/samba_wildcard.py b/buildtools/wafsamba/samba_wildcard.py
new file mode 100644
index 0000000..1ea2803
--- /dev/null
+++ b/buildtools/wafsamba/samba_wildcard.py
@@ -0,0 +1,151 @@
+# based on playground/evil in the waf svn tree
+
+import os, datetime, fnmatch
+from waflib import Scripting, Utils, Options, Logs, Errors
+from waflib import ConfigSet, Context
+from samba_utils import LOCAL_CACHE
+
+def run_task(t, k):
+ '''run a single build task'''
+ ret = t.run()
+ if ret:
+ raise Errors.WafError("Failed to build %s: %u" % (k, ret))
+
+
+def run_named_build_task(cmd):
+ '''run a named build task, matching the cmd name using fnmatch
+ wildcards against inputs and outputs of all build tasks'''
+ bld = fake_build_environment(info=False)
+ found = False
+ cwd_node = bld.root.find_dir(os.getcwd())
+ top_node = bld.root.find_dir(bld.srcnode.abspath())
+
+ cmd = os.path.normpath(cmd)
+
+ # cope with builds of bin/*/*
+ if os.path.islink(cmd):
+ cmd = os.path.relpath(os.readlink(cmd), os.getcwd())
+
+ if cmd[0:12] == "bin/default/":
+ cmd = cmd[12:]
+
+ for g in bld.task_manager.groups:
+ for attr in ['outputs', 'inputs']:
+ for t in g.tasks:
+ s = getattr(t, attr, [])
+ for k in s:
+ relpath1 = k.relpath_gen(cwd_node)
+ relpath2 = k.relpath_gen(top_node)
+ if (fnmatch.fnmatch(relpath1, cmd) or
+ fnmatch.fnmatch(relpath2, cmd)):
+ t.position = [0,0]
+ print(t.display())
+ run_task(t, k)
+ found = True
+
+
+ if not found:
+ raise Errors.WafError("Unable to find build target matching %s" % cmd)
+
+
+def rewrite_compile_targets():
+ '''cope with the bin/ form of compile target'''
+ if not Options.options.compile_targets:
+ return
+
+ bld = fake_build_environment(info=False)
+ targets = LOCAL_CACHE(bld, 'TARGET_TYPE')
+ tlist = []
+
+ for t in Options.options.compile_targets.split(','):
+ if not os.path.islink(t):
+ tlist.append(t)
+ continue
+ link = os.readlink(t)
+ list = link.split('/')
+ for name in [list[-1], '/'.join(list[-2:])]:
+ if name in targets:
+ tlist.append(name)
+ continue
+ Options.options.compile_targets = ",".join(tlist)
+
+
+
+def wildcard_main(missing_cmd_fn):
+ '''this replaces main from Scripting, allowing us to override the
+ behaviour for unknown commands
+
+ If a unknown command is found, then missing_cmd_fn() is called with
+ the name of the requested command
+ '''
+ Scripting.commands = Options.arg_line[:]
+
+ # rewrite the compile targets to cope with the bin/xx form
+ rewrite_compile_targets()
+
+ while Scripting.commands:
+ x = Scripting.commands.pop(0)
+
+ ini = datetime.datetime.now()
+ if x == 'configure':
+ fun = Scripting.configure
+ elif x == 'build':
+ fun = Scripting.build
+ else:
+ fun = getattr(Utils.g_module, x, None)
+
+ # this is the new addition on top of main from Scripting.py
+ if not fun:
+ missing_cmd_fn(x)
+ break
+
+ ctx = getattr(Utils.g_module, x + '_context', Utils.Context)()
+
+ if x in ['init', 'shutdown', 'dist', 'distclean', 'distcheck']:
+ try:
+ fun(ctx)
+ except TypeError:
+ fun()
+ else:
+ fun(ctx)
+
+ ela = ''
+ if not Options.options.progress_bar:
+ ela = ' (%s)' % Utils.get_elapsed_time(ini)
+
+ if x != 'init' and x != 'shutdown':
+ Logs.info('%r finished successfully%s' % (x, ela))
+
+ if not Scripting.commands and x != 'shutdown':
+ Scripting.commands.append('shutdown')
+
+
+
+
+def fake_build_environment(info=True, flush=False):
+ """create all the tasks for the project, but do not run the build
+ return the build context in use"""
+ bld = getattr(Context.g_module, 'build_context', Utils.Context)()
+ bld = Scripting.check_configured(bld)
+
+ Options.commands['install'] = False
+ Options.commands['uninstall'] = False
+
+ bld.is_install = 0 # False
+
+ try:
+ proj = ConfigSet.ConfigSet(Options.lockfile)
+ except IOError:
+ raise Errors.WafError("Project not configured (run './configure' first)")
+
+ bld.load_envs()
+
+ if info:
+ Logs.info("Waf: Entering directory `%s'" % bld.bldnode.abspath())
+ bld.add_subdirs([os.path.split(Context.g_module.root_path)[0]])
+
+ bld.pre_build()
+ if flush:
+ bld.flush()
+ return bld
+
diff --git a/buildtools/wafsamba/stale_files.py b/buildtools/wafsamba/stale_files.py
new file mode 100644
index 0000000..ac2c2a7
--- /dev/null
+++ b/buildtools/wafsamba/stale_files.py
@@ -0,0 +1,114 @@
+# encoding: utf-8
+# Thomas Nagy, 2006-2010 (ita)
+
+"""
+Add a pre-build hook to remove all build files
+which do not have a corresponding target
+
+This can be used for example to remove the targets
+that have changed name without performing
+a full 'waf clean'
+
+Of course, it will only work if there are no dynamically generated
+nodes/tasks, in which case the method will have to be modified
+to exclude some folders for example.
+"""
+
+from waflib import Logs, Build, Options, Utils, Errors
+import os
+from wafsamba import samba_utils
+from Runner import Parallel
+
+old_refill_task_list = Parallel.refill_task_list
+def replace_refill_task_list(self):
+ '''replacement for refill_task_list() that deletes stale files'''
+
+ iit = old_refill_task_list(self)
+ bld = self.bld
+
+ if not getattr(bld, 'new_rules', False):
+ # we only need to check for stale files if the build rules changed
+ return iit
+
+ if Options.options.compile_targets:
+ # not safe when --target is used
+ return iit
+
+ # execute only once
+ if getattr(self, 'cleanup_done', False):
+ return iit
+ self.cleanup_done = True
+
+ def group_name(g):
+ tm = self.bld.task_manager
+ return [x for x in tm.groups_names if id(tm.groups_names[x]) == id(g)][0]
+
+ bin_base = bld.bldnode.abspath()
+ bin_base_len = len(bin_base)
+
+ # paranoia
+ if bin_base[-4:] != '/bin':
+ raise Errors.WafError("Invalid bin base: %s" % bin_base)
+
+ # obtain the expected list of files
+ expected = []
+ for i in range(len(bld.task_manager.groups)):
+ g = bld.task_manager.groups[i]
+ tasks = g.tasks_gen
+ for x in tasks:
+ try:
+ if getattr(x, 'target'):
+ tlist = samba_utils.TO_LIST(getattr(x, 'target'))
+ ttype = getattr(x, 'samba_type', None)
+ task_list = getattr(x, 'compiled_tasks', [])
+ if task_list:
+ # this gets all of the .o files, including the task
+ # ids, so foo.c maps to foo_3.o for idx=3
+ for tsk in task_list:
+ for output in tsk.outputs:
+ objpath = os.path.normpath(output.abspath(bld.env))
+ expected.append(objpath)
+ for t in tlist:
+ if ttype in ['LIBRARY', 'PLUGIN', 'MODULE']:
+ t = samba_utils.apply_pattern(t, bld.env.shlib_PATTERN)
+ if ttype == 'PYTHON':
+ t = samba_utils.apply_pattern(t, bld.env.pyext_PATTERN)
+ p = os.path.join(x.path.abspath(bld.env), t)
+ p = os.path.normpath(p)
+ expected.append(p)
+ for n in x.allnodes:
+ p = n.abspath(bld.env)
+ if p[0:bin_base_len] == bin_base:
+ expected.append(p)
+ except:
+ pass
+
+ for root, dirs, files in os.walk(bin_base):
+ for f in files:
+ p = root + '/' + f
+ if os.path.islink(p):
+ link = os.readlink(p)
+ if link[0:bin_base_len] == bin_base:
+ p = link
+ if f in ['config.h']:
+ continue
+ (froot, fext) = os.path.splitext(f)
+ if fext not in [ '.c', '.h', '.so', '.o' ]:
+ continue
+ if f[-7:] == '.inst.h':
+ continue
+ if p.find("/.conf") != -1:
+ continue
+ if not p in expected and os.path.exists(p):
+ Logs.warn("Removing stale file: %s" % p)
+ os.unlink(p)
+ return iit
+
+
+def AUTOCLEANUP_STALE_FILES(bld):
+ """automatically clean up any files in bin that shouldn't be there"""
+ global old_refill_task_list
+ old_refill_task_list = Parallel.refill_task_list
+ Parallel.refill_task_list = replace_refill_task_list
+ Parallel.bld = bld
+Build.BuildContext.AUTOCLEANUP_STALE_FILES = AUTOCLEANUP_STALE_FILES
diff --git a/buildtools/wafsamba/symbols.py b/buildtools/wafsamba/symbols.py
new file mode 100644
index 0000000..99e121c
--- /dev/null
+++ b/buildtools/wafsamba/symbols.py
@@ -0,0 +1,659 @@
+# a waf tool to extract symbols from object files or libraries
+# using nm, producing a set of exposed defined/undefined symbols
+
+import os, re, subprocess
+from waflib import Utils, Build, Options, Logs, Errors
+from waflib.Logs import debug
+from samba_utils import TO_LIST, LOCAL_CACHE, get_tgt_list
+
+# these are the data structures used in symbols.py:
+#
+# bld.env.symbol_map : dictionary mapping public symbol names to list of
+# subsystem names where that symbol exists
+#
+# t.in_library : list of libraries that t is in
+#
+# bld.env.public_symbols: set of public symbols for each subsystem
+# bld.env.used_symbols : set of used symbols for each subsystem
+#
+# bld.env.syslib_symbols: dictionary mapping system library name to set of symbols
+# for that library
+# bld.env.library_dict : dictionary mapping built library paths to subsystem names
+#
+# LOCAL_CACHE(bld, 'TARGET_TYPE') : dictionary mapping subsystem name to target type
+
+
+def symbols_extract(bld, objfiles, dynamic=False):
+ '''extract symbols from objfile, returning a dictionary containing
+ the set of undefined and public symbols for each file'''
+
+ ret = {}
+
+ # see if we can get some results from the nm cache
+ if not bld.env.nm_cache:
+ bld.env.nm_cache = {}
+
+ objfiles = set(objfiles).copy()
+
+ remaining = set()
+ for obj in objfiles:
+ if obj in bld.env.nm_cache:
+ ret[obj] = bld.env.nm_cache[obj].copy()
+ else:
+ remaining.add(obj)
+ objfiles = remaining
+
+ if len(objfiles) == 0:
+ return ret
+
+ cmd = ["nm"]
+ if dynamic:
+ # needed for some .so files
+ cmd.append("-D")
+ cmd.extend(list(objfiles))
+
+ nmpipe = subprocess.Popen(cmd, stdout=subprocess.PIPE).stdout
+ if len(objfiles) == 1:
+ filename = list(objfiles)[0]
+ ret[filename] = { "PUBLIC": set(), "UNDEFINED" : set()}
+
+ for line in nmpipe:
+ line = line.strip()
+ if line.endswith(b':'):
+ filename = line[:-1]
+ ret[filename] = { "PUBLIC": set(), "UNDEFINED" : set() }
+ continue
+ cols = line.split(b" ")
+ if cols == [b'']:
+ continue
+ # see if the line starts with an address
+ if len(cols) == 3:
+ symbol_type = cols[1]
+ symbol = cols[2]
+ else:
+ symbol_type = cols[0]
+ symbol = cols[1]
+ if symbol_type in b"BDGTRVWSi":
+ # its a public symbol
+ ret[filename]["PUBLIC"].add(symbol)
+ elif symbol_type in b"U":
+ ret[filename]["UNDEFINED"].add(symbol)
+
+ # add to the cache
+ for obj in objfiles:
+ if obj in ret:
+ bld.env.nm_cache[obj] = ret[obj].copy()
+ else:
+ bld.env.nm_cache[obj] = { "PUBLIC": set(), "UNDEFINED" : set() }
+
+ return ret
+
+
+def real_name(name):
+ if name.find(".objlist") != -1:
+ name = name[:-8]
+ return name
+
+
+def find_ldd_path(bld, libname, binary):
+ '''find the path to the syslib we will link against'''
+ ret = None
+ if not bld.env.syslib_paths:
+ bld.env.syslib_paths = {}
+ if libname in bld.env.syslib_paths:
+ return bld.env.syslib_paths[libname]
+
+ lddpipe = subprocess.Popen(['ldd', binary], stdout=subprocess.PIPE).stdout
+ for line in lddpipe:
+ line = line.strip()
+ cols = line.split(b" ")
+ if len(cols) < 3 or cols[1] != b"=>":
+ continue
+ if cols[0].startswith(b"libc."):
+ # save this one too
+ bld.env.libc_path = cols[2]
+ if cols[0].startswith(libname):
+ ret = cols[2]
+ bld.env.syslib_paths[libname] = ret
+ return ret
+
+
+# some regular expressions for parsing readelf output
+re_sharedlib = re.compile(rb'Shared library: \[(.*)\]')
+# output from readelf could be `Library rpath` or `Libray runpath`
+re_rpath = re.compile(rb'Library (rpath|runpath): \[(.*)\]')
+
+def get_libs(bld, binname):
+ '''find the list of linked libraries for any binary or library
+ binname is the path to the binary/library on disk
+
+ We do this using readelf instead of ldd as we need to avoid recursing
+ into system libraries
+ '''
+
+ # see if we can get the result from the ldd cache
+ if not bld.env.lib_cache:
+ bld.env.lib_cache = {}
+ if binname in bld.env.lib_cache:
+ return bld.env.lib_cache[binname].copy()
+
+ rpath = []
+ libs = set()
+
+ elfpipe = subprocess.Popen(['readelf', '--dynamic', binname], stdout=subprocess.PIPE).stdout
+ for line in elfpipe:
+ m = re_sharedlib.search(line)
+ if m:
+ libs.add(m.group(1))
+ m = re_rpath.search(line)
+ if m:
+ # output from Popen is always bytestr even in py3
+ rpath.extend(m.group(2).split(b":"))
+
+ ret = set()
+ for lib in libs:
+ found = False
+ for r in rpath:
+ path = os.path.join(r, lib)
+ if os.path.exists(path):
+ ret.add(os.path.realpath(path))
+ found = True
+ break
+ if not found:
+ # we didn't find this lib using rpath. It is probably a system
+ # library, so to find the path to it we either need to use ldd
+ # or we need to start parsing /etc/ld.so.conf* ourselves. We'll
+ # use ldd for now, even though it is slow
+ path = find_ldd_path(bld, lib, binname)
+ if path:
+ ret.add(os.path.realpath(path))
+
+ bld.env.lib_cache[binname] = ret.copy()
+
+ return ret
+
+
+def get_libs_recursive(bld, binname, seen):
+ '''find the recursive list of linked libraries for any binary or library
+ binname is the path to the binary/library on disk. seen is a set used
+ to prevent loops
+ '''
+ if binname in seen:
+ return set()
+ ret = get_libs(bld, binname)
+ seen.add(binname)
+ for lib in ret:
+ # we don't want to recurse into system libraries. If a system
+ # library that we use (eg. libcups) happens to use another library
+ # (such as libkrb5) which contains common symbols with our own
+ # libraries, then that is not an error
+ if lib in bld.env.library_dict:
+ ret = ret.union(get_libs_recursive(bld, lib, seen))
+ return ret
+
+
+
+def find_syslib_path(bld, libname, deps):
+ '''find the path to the syslib we will link against'''
+ # the strategy is to use the targets that depend on the library, and run ldd
+ # on it to find the real location of the library that is used
+
+ linkpath = deps[0].link_task.outputs[0].abspath(bld.env)
+
+ if libname == "python":
+ libname += bld.env.PYTHON_VERSION
+
+ return find_ldd_path(bld, "lib%s" % libname.lower(), linkpath)
+
+
+def build_symbol_sets(bld, tgt_list):
+ '''build the public_symbols and undefined_symbols attributes for each target'''
+
+ if bld.env.public_symbols:
+ return
+
+ objlist = [] # list of object file
+ objmap = {} # map from object filename to target (subsystem) name
+
+ for t in tgt_list:
+ t.public_symbols = set()
+ t.undefined_symbols = set()
+ t.used_symbols = set()
+ for tsk in getattr(t, 'compiled_tasks', []):
+ for output in tsk.outputs:
+ objpath = output.abspath(bld.env)
+ objlist.append(objpath)
+ objmap[objpath] = t
+
+ symbols = symbols_extract(bld, objlist)
+ for obj in objlist:
+ t = objmap[obj]
+ t.public_symbols = t.public_symbols.union(symbols[obj]["PUBLIC"])
+ t.undefined_symbols = t.undefined_symbols.union(symbols[obj]["UNDEFINED"])
+ t.used_symbols = t.used_symbols.union(symbols[obj]["UNDEFINED"])
+
+ t.undefined_symbols = t.undefined_symbols.difference(t.public_symbols)
+
+ # and the reverse map of public symbols to subsystem name
+ bld.env.symbol_map = {}
+
+ for t in tgt_list:
+ for s in t.public_symbols:
+ if not s in bld.env.symbol_map:
+ bld.env.symbol_map[s] = []
+ bld.env.symbol_map[s].append(real_name(t.sname))
+
+ targets = LOCAL_CACHE(bld, 'TARGET_TYPE')
+
+ bld.env.public_symbols = {}
+ for t in tgt_list:
+ name = real_name(t.sname)
+ if name in bld.env.public_symbols:
+ bld.env.public_symbols[name] = bld.env.public_symbols[name].union(t.public_symbols)
+ else:
+ bld.env.public_symbols[name] = t.public_symbols
+ if t.samba_type in ['LIBRARY', 'PLUGIN']:
+ for dep in t.add_objects:
+ t2 = bld.get_tgen_by_name(dep)
+ bld.ASSERT(t2 is not None, "Library '%s' has unknown dependency '%s'" % (name, dep))
+ bld.env.public_symbols[name] = bld.env.public_symbols[name].union(t2.public_symbols)
+
+ bld.env.used_symbols = {}
+ for t in tgt_list:
+ name = real_name(t.sname)
+ if name in bld.env.used_symbols:
+ bld.env.used_symbols[name] = bld.env.used_symbols[name].union(t.used_symbols)
+ else:
+ bld.env.used_symbols[name] = t.used_symbols
+ if t.samba_type in ['LIBRARY', 'PLUGIN']:
+ for dep in t.add_objects:
+ t2 = bld.get_tgen_by_name(dep)
+ bld.ASSERT(t2 is not None, "Library '%s' has unknown dependency '%s'" % (name, dep))
+ bld.env.used_symbols[name] = bld.env.used_symbols[name].union(t2.used_symbols)
+
+
+def build_library_dict(bld, tgt_list):
+ '''build the library_dict dictionary'''
+
+ if bld.env.library_dict:
+ return
+
+ bld.env.library_dict = {}
+
+ for t in tgt_list:
+ if t.samba_type in [ 'LIBRARY', 'PLUGIN', 'PYTHON' ]:
+ linkpath = os.path.realpath(t.link_task.outputs[0].abspath(bld.env))
+ bld.env.library_dict[linkpath] = t.sname
+
+
+def build_syslib_sets(bld, tgt_list):
+ '''build the public_symbols for all syslibs'''
+
+ if bld.env.syslib_symbols:
+ return
+
+ # work out what syslibs we depend on, and what targets those are used in
+ syslibs = {}
+ objmap = {}
+ for t in tgt_list:
+ if getattr(t, 'uselib', []) and t.samba_type in [ 'LIBRARY', 'PLUGIN', 'BINARY', 'PYTHON' ]:
+ for lib in t.uselib:
+ if lib in ['PYEMBED', 'PYEXT']:
+ lib = "python"
+ if not lib in syslibs:
+ syslibs[lib] = []
+ syslibs[lib].append(t)
+
+ # work out the paths to each syslib
+ syslib_paths = []
+ for lib in syslibs:
+ path = find_syslib_path(bld, lib, syslibs[lib])
+ if path is None:
+ Logs.warn("Unable to find syslib path for %s" % lib)
+ if path is not None:
+ syslib_paths.append(path)
+ objmap[path] = lib.lower()
+
+ # add in libc
+ syslib_paths.append(bld.env.libc_path)
+ objmap[bld.env.libc_path] = 'c'
+
+ symbols = symbols_extract(bld, syslib_paths, dynamic=True)
+
+ # keep a map of syslib names to public symbols
+ bld.env.syslib_symbols = {}
+ for lib in symbols:
+ bld.env.syslib_symbols[lib] = symbols[lib]["PUBLIC"]
+
+ # add to the map of symbols to dependencies
+ for lib in symbols:
+ for sym in symbols[lib]["PUBLIC"]:
+ if not sym in bld.env.symbol_map:
+ bld.env.symbol_map[sym] = []
+ bld.env.symbol_map[sym].append(objmap[lib])
+
+ # keep the libc symbols as well, as these are useful for some of the
+ # sanity checks
+ bld.env.libc_symbols = symbols[bld.env.libc_path]["PUBLIC"]
+
+ # add to the combined map of dependency name to public_symbols
+ for lib in bld.env.syslib_symbols:
+ bld.env.public_symbols[objmap[lib]] = bld.env.syslib_symbols[lib]
+
+
+def build_autodeps(bld, t):
+ '''build the set of dependencies for a target'''
+ deps = set()
+ name = real_name(t.sname)
+
+ targets = LOCAL_CACHE(bld, 'TARGET_TYPE')
+
+ for sym in t.undefined_symbols:
+ if sym in t.public_symbols:
+ continue
+ if sym in bld.env.symbol_map:
+ depname = bld.env.symbol_map[sym]
+ if depname == [ name ]:
+ # self dependencies aren't interesting
+ continue
+ if t.in_library == depname:
+ # no need to depend on the library we are part of
+ continue
+ if depname[0] in ['c', 'python']:
+ # these don't go into autodeps
+ continue
+ if targets[depname[0]] in [ 'SYSLIB' ]:
+ deps.add(depname[0])
+ continue
+ t2 = bld.get_tgen_by_name(depname[0])
+ if len(t2.in_library) != 1:
+ deps.add(depname[0])
+ continue
+ if t2.in_library == t.in_library:
+ # if we're part of the same library, we don't need to autodep
+ continue
+ deps.add(t2.in_library[0])
+ t.autodeps = deps
+
+
+def build_library_names(bld, tgt_list):
+ '''add a in_library attribute to all targets that are part of a library'''
+
+ if bld.env.done_build_library_names:
+ return
+
+ for t in tgt_list:
+ t.in_library = []
+
+ for t in tgt_list:
+ if t.samba_type in ['LIBRARY', 'PLUGIN']:
+ for obj in t.samba_deps_extended:
+ t2 = bld.get_tgen_by_name(obj)
+ if t2 and t2.samba_type in [ 'SUBSYSTEM', 'BUILTIN', 'ASN1' ]:
+ if not t.sname in t2.in_library:
+ t2.in_library.append(t.sname)
+ bld.env.done_build_library_names = True
+
+
+def check_library_deps(bld, t):
+ '''check that all the autodeps that have mutual dependency of this
+ target are in the same library as the target'''
+
+ name = real_name(t.sname)
+
+ if len(t.in_library) > 1:
+ Logs.warn("WARNING: Target '%s' in multiple libraries: %s" % (t.sname, t.in_library))
+
+ for dep in t.autodeps:
+ t2 = bld.get_tgen_by_name(dep)
+ if t2 is None:
+ continue
+ for dep2 in t2.autodeps:
+ if dep2 == name and t.in_library != t2.in_library:
+ Logs.warn("WARNING: mutual dependency %s <=> %s" % (name, real_name(t2.sname)))
+ Logs.warn("Libraries should match. %s != %s" % (t.in_library, t2.in_library))
+ # raise Errors.WafError("illegal mutual dependency")
+
+
+def check_syslib_collisions(bld, tgt_list):
+ '''check if a target has any symbol collisions with a syslib
+
+ We do not want any code in Samba to use a symbol name from a
+ system library. The chance of that causing problems is just too
+ high. Note that libreplace uses a rep_XX approach of renaming
+ symbols via macros
+ '''
+
+ has_error = False
+ for t in tgt_list:
+ for lib in bld.env.syslib_symbols:
+ common = t.public_symbols.intersection(bld.env.syslib_symbols[lib])
+ if common:
+ Logs.error("ERROR: Target '%s' has symbols '%s' which is also in syslib '%s'" % (t.sname, common, lib))
+ has_error = True
+ if has_error:
+ raise Errors.WafError("symbols in common with system libraries")
+
+
+def check_dependencies(bld, t):
+ '''check for dependencies that should be changed'''
+
+ if bld.get_tgen_by_name(t.sname + ".objlist"):
+ return
+
+ targets = LOCAL_CACHE(bld, 'TARGET_TYPE')
+
+ remaining = t.undefined_symbols.copy()
+ remaining = remaining.difference(t.public_symbols)
+
+ sname = real_name(t.sname)
+
+ deps = set(t.samba_deps)
+ for d in t.samba_deps:
+ if targets[d] in [ 'EMPTY', 'DISABLED', 'SYSLIB', 'GENERATOR' ]:
+ continue
+ bld.ASSERT(d in bld.env.public_symbols, "Failed to find symbol list for dependency '%s'" % d)
+ diff = remaining.intersection(bld.env.public_symbols[d])
+ if not diff and targets[sname] != 'LIBRARY':
+ Logs.info("Target '%s' has no dependency on %s" % (sname, d))
+ else:
+ remaining = remaining.difference(diff)
+
+ t.unsatisfied_symbols = set()
+ needed = {}
+ for sym in remaining:
+ if sym in bld.env.symbol_map:
+ dep = bld.env.symbol_map[sym]
+ if not dep[0] in needed:
+ needed[dep[0]] = set()
+ needed[dep[0]].add(sym)
+ else:
+ t.unsatisfied_symbols.add(sym)
+
+ for dep in needed:
+ Logs.info("Target '%s' should add dep '%s' for symbols %s" % (sname, dep, " ".join(needed[dep])))
+
+
+
+def check_syslib_dependencies(bld, t):
+ '''check for syslib depenencies'''
+
+ if bld.get_tgen_by_name(t.sname + ".objlist"):
+ return
+
+ sname = real_name(t.sname)
+
+ remaining = set()
+
+ features = TO_LIST(t.features)
+ if 'pyembed' in features or 'pyext' in features:
+ if 'python' in bld.env.public_symbols:
+ t.unsatisfied_symbols = t.unsatisfied_symbols.difference(bld.env.public_symbols['python'])
+
+ needed = {}
+ for sym in t.unsatisfied_symbols:
+ if sym in bld.env.symbol_map:
+ dep = bld.env.symbol_map[sym][0]
+ if dep == 'c':
+ continue
+ if not dep in needed:
+ needed[dep] = set()
+ needed[dep].add(sym)
+ else:
+ remaining.add(sym)
+
+ for dep in needed:
+ Logs.info("Target '%s' should add syslib dep '%s' for symbols %s" % (sname, dep, " ".join(needed[dep])))
+
+ if remaining:
+ debug("deps: Target '%s' has unsatisfied symbols: %s" % (sname, " ".join(remaining)))
+
+
+
+def symbols_symbolcheck(task):
+ '''check the internal dependency lists'''
+ bld = task.env.bld
+ tgt_list = get_tgt_list(bld)
+
+ build_symbol_sets(bld, tgt_list)
+ build_library_names(bld, tgt_list)
+
+ for t in tgt_list:
+ t.autodeps = set()
+ if getattr(t, 'source', ''):
+ build_autodeps(bld, t)
+
+ for t in tgt_list:
+ check_dependencies(bld, t)
+
+ for t in tgt_list:
+ check_library_deps(bld, t)
+
+def symbols_syslibcheck(task):
+ '''check the syslib dependencies'''
+ bld = task.env.bld
+ tgt_list = get_tgt_list(bld)
+
+ build_syslib_sets(bld, tgt_list)
+ check_syslib_collisions(bld, tgt_list)
+
+ for t in tgt_list:
+ check_syslib_dependencies(bld, t)
+
+
+def symbols_whyneeded(task):
+ """check why 'target' needs to link to 'subsystem'"""
+ bld = task.env.bld
+ tgt_list = get_tgt_list(bld)
+
+ why = Options.options.WHYNEEDED.split(":")
+ if len(why) != 2:
+ raise Errors.WafError("usage: WHYNEEDED=TARGET:DEPENDENCY")
+ target = why[0]
+ subsystem = why[1]
+
+ build_symbol_sets(bld, tgt_list)
+ build_library_names(bld, tgt_list)
+ build_syslib_sets(bld, tgt_list)
+
+ Logs.info("Checking why %s needs to link to %s" % (target, subsystem))
+ if not target in bld.env.used_symbols:
+ Logs.warn("unable to find target '%s' in used_symbols dict" % target)
+ return
+ if not subsystem in bld.env.public_symbols:
+ Logs.warn("unable to find subsystem '%s' in public_symbols dict" % subsystem)
+ return
+ overlap = bld.env.used_symbols[target].intersection(bld.env.public_symbols[subsystem])
+ if not overlap:
+ Logs.info("target '%s' doesn't use any public symbols from '%s'" % (target, subsystem))
+ else:
+ Logs.info("target '%s' uses symbols %s from '%s'" % (target, overlap, subsystem))
+
+
+def report_duplicate(bld, binname, sym, libs, fail_on_error):
+ '''report duplicated symbols'''
+ if sym in ['_init', '_fini', '_edata', '_end', '__bss_start']:
+ return
+ libnames = []
+ for lib in libs:
+ if lib in bld.env.library_dict:
+ libnames.append(bld.env.library_dict[lib])
+ else:
+ libnames.append(lib)
+ if fail_on_error:
+ raise Errors.WafError("%s: Symbol %s linked in multiple libraries %s" % (binname, sym, libnames))
+ else:
+ print("%s: Symbol %s linked in multiple libraries %s" % (binname, sym, libnames))
+
+
+def symbols_dupcheck_binary(bld, binname, fail_on_error):
+ '''check for duplicated symbols in one binary'''
+
+ libs = get_libs_recursive(bld, binname, set())
+ symlist = symbols_extract(bld, libs, dynamic=True)
+
+ symmap = {}
+ for libpath in symlist:
+ for sym in symlist[libpath]['PUBLIC']:
+ if sym == '_GLOBAL_OFFSET_TABLE_':
+ continue
+ if not sym in symmap:
+ symmap[sym] = set()
+ symmap[sym].add(libpath)
+ for sym in symmap:
+ if len(symmap[sym]) > 1:
+ for libpath in symmap[sym]:
+ if libpath in bld.env.library_dict:
+ report_duplicate(bld, binname, sym, symmap[sym], fail_on_error)
+ break
+
+def symbols_dupcheck(task, fail_on_error=False):
+ '''check for symbols defined in two different subsystems'''
+ bld = task.env.bld
+ tgt_list = get_tgt_list(bld)
+
+ targets = LOCAL_CACHE(bld, 'TARGET_TYPE')
+
+ build_library_dict(bld, tgt_list)
+ for t in tgt_list:
+ if t.samba_type == 'BINARY':
+ binname = os.path.relpath(t.link_task.outputs[0].abspath(bld.env), os.getcwd())
+ symbols_dupcheck_binary(bld, binname, fail_on_error)
+
+
+def symbols_dupcheck_fatal(task):
+ '''check for symbols defined in two different subsystems (and fail if duplicates are found)'''
+ symbols_dupcheck(task, fail_on_error=True)
+
+
+def SYMBOL_CHECK(bld):
+ '''check our dependency lists'''
+ if Options.options.SYMBOLCHECK:
+ bld.SET_BUILD_GROUP('symbolcheck')
+ task = bld(rule=symbols_symbolcheck, always=True, name='symbol checking')
+ task.env.bld = bld
+
+ bld.SET_BUILD_GROUP('syslibcheck')
+ task = bld(rule=symbols_syslibcheck, always=True, name='syslib checking')
+ task.env.bld = bld
+
+ bld.SET_BUILD_GROUP('syslibcheck')
+ task = bld(rule=symbols_dupcheck, always=True, name='symbol duplicate checking')
+ task.env.bld = bld
+
+ if Options.options.WHYNEEDED:
+ bld.SET_BUILD_GROUP('syslibcheck')
+ task = bld(rule=symbols_whyneeded, always=True, name='check why a dependency is needed')
+ task.env.bld = bld
+
+
+Build.BuildContext.SYMBOL_CHECK = SYMBOL_CHECK
+
+def DUP_SYMBOL_CHECK(bld):
+ if Options.options.DUP_SYMBOLCHECK and bld.env.DEVELOPER:
+ '''check for duplicate symbols'''
+ bld.SET_BUILD_GROUP('syslibcheck')
+ task = bld(rule=symbols_dupcheck_fatal, always=True, name='symbol duplicate checking')
+ task.env.bld = bld
+
+Build.BuildContext.DUP_SYMBOL_CHECK = DUP_SYMBOL_CHECK
diff --git a/buildtools/wafsamba/test_duplicate_symbol.sh b/buildtools/wafsamba/test_duplicate_symbol.sh
new file mode 100755
index 0000000..dffac75
--- /dev/null
+++ b/buildtools/wafsamba/test_duplicate_symbol.sh
@@ -0,0 +1,15 @@
+#!/bin/sh
+# Run the waf duplicate symbol check, wrapped in subunit.
+
+. testprogs/blackbox/subunit.sh
+
+subunit_start_test duplicate_symbols
+
+PYTHONHASHSEED=1
+export PYTHONHASHSEED
+
+if $PYTHON ./buildtools/bin/waf build --dup-symbol-check; then
+ subunit_pass_test duplicate_symbols
+else
+ echo | subunit_fail_test duplicate_symbols
+fi
diff --git a/buildtools/wafsamba/tests/__init__.py b/buildtools/wafsamba/tests/__init__.py
new file mode 100644
index 0000000..ae27418
--- /dev/null
+++ b/buildtools/wafsamba/tests/__init__.py
@@ -0,0 +1,35 @@
+# Copyright (C) 2012 Jelmer Vernooij <jelmer@samba.org>
+
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License as published by
+# the Free Software Foundation; either version 2.1 of the License, or
+# (at your option) any later version.
+
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU Lesser General Public License for more details.
+
+# You should have received a copy of the GNU Lesser General Public License
+# along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+
+"""Tests for wafsamba."""
+
+from unittest import (
+ TestCase,
+ TestLoader,
+ )
+
+def test_suite():
+ names = [
+ 'abi',
+ 'bundled',
+ 'utils',
+ ]
+ module_names = ['wafsamba.tests.test_' + name for name in names]
+ loader = TestLoader()
+ result = loader.suiteClass()
+ suite = loader.loadTestsFromNames(module_names)
+ result.addTests(suite)
+ return result
diff --git a/buildtools/wafsamba/tests/test_abi.py b/buildtools/wafsamba/tests/test_abi.py
new file mode 100644
index 0000000..ffd5a56
--- /dev/null
+++ b/buildtools/wafsamba/tests/test_abi.py
@@ -0,0 +1,134 @@
+# Copyright (C) 2012 Jelmer Vernooij <jelmer@samba.org>
+
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License as published by
+# the Free Software Foundation; either version 2.1 of the License, or
+# (at your option) any later version.
+
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU Lesser General Public License for more details.
+
+# You should have received a copy of the GNU Lesser General Public License
+# along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+
+from wafsamba.tests import TestCase
+
+from wafsamba.samba_abi import (
+ abi_write_vscript,
+ normalise_signature,
+ )
+
+from io import StringIO
+
+
+class NormaliseSignatureTests(TestCase):
+
+ def test_function_simple(self):
+ self.assertEqual("int (const struct GUID *, const struct GUID *)",
+ normalise_signature("$2 = {int (const struct GUID *, const struct GUID *)} 0xe871 <GUID_compare>"))
+
+ def test_maps_Bool(self):
+ # Some types have different internal names
+ self.assertEqual("bool (const struct GUID *)",
+ normalise_signature("$1 = {_Bool (const struct GUID *)} 0xe75b <GUID_all_zero>"))
+
+ def test_function_keep(self):
+ self.assertEqual(
+ "enum ndr_err_code (struct ndr_push *, int, const union winreg_Data *)",
+ normalise_signature("enum ndr_err_code (struct ndr_push *, int, const union winreg_Data *)"))
+
+ def test_struct_constant(self):
+ self.assertEqual(
+ 'uuid = {time_low = 0, time_mid = 0, time_hi_and_version = 0, clock_seq = "\\000", node = "\\000\\000\\000\\000\\000"}, if_version = 0',
+ normalise_signature('$239 = {uuid = {time_low = 0, time_mid = 0, time_hi_and_version = 0, clock_seq = "\\000", node = "\\000\\000\\000\\000\\000"}, if_version = 0}'))
+
+ def test_incomplete_sequence(self):
+ # Newer versions of gdb insert these incomplete sequence elements
+ self.assertEqual(
+ 'uuid = {time_low = 2324192516, time_mid = 7403, time_hi_and_version = 4553, clock_seq = "\\237\\350", node = "\\b\\000+\\020H`"}, if_version = 2',
+ normalise_signature('$244 = {uuid = {time_low = 2324192516, time_mid = 7403, time_hi_and_version = 4553, clock_seq = "\\237", <incomplete sequence \\350>, node = "\\b\\000+\\020H`"}, if_version = 2}'))
+ self.assertEqual(
+ 'uuid = {time_low = 2324192516, time_mid = 7403, time_hi_and_version = 4553, clock_seq = "\\237\\350", node = "\\b\\000+\\020H`"}, if_version = 2',
+ normalise_signature('$244 = {uuid = {time_low = 2324192516, time_mid = 7403, time_hi_and_version = 4553, clock_seq = "\\237\\350", node = "\\b\\000+\\020H`"}, if_version = 2}'))
+
+
+class WriteVscriptTests(TestCase):
+
+ def test_one(self):
+ f = StringIO()
+ abi_write_vscript(f, "MYLIB", "1.0", [], {
+ "old": "1.0",
+ "new": "1.0"}, ["*"])
+ self.assertEqual(f.getvalue(), """\
+1.0 {
+\tglobal:
+\t\t*;
+\tlocal:
+\t\t_end;
+\t\t__bss_start;
+\t\t_edata;
+};
+""")
+
+ def test_simple(self):
+ # No restrictions.
+ f = StringIO()
+ abi_write_vscript(f, "MYLIB", "1.0", ["0.1"], {
+ "old": "0.1",
+ "new": "1.0"}, ["*"])
+ self.assertEqual(f.getvalue(), """\
+MYLIB_0.1 {
+\tglobal:
+\t\told;
+};
+
+1.0 {
+\tglobal:
+\t\t*;
+\tlocal:
+\t\t_end;
+\t\t__bss_start;
+\t\t_edata;
+};
+""")
+
+ def test_exclude(self):
+ f = StringIO()
+ abi_write_vscript(f, "MYLIB", "1.0", [], {
+ "exc_old": "0.1",
+ "old": "0.1",
+ "new": "1.0"}, ["!exc_*"])
+ self.assertEqual(f.getvalue(), """\
+1.0 {
+\tglobal:
+\t\t*;
+\tlocal:
+\t\texc_*;
+\t\t_end;
+\t\t__bss_start;
+\t\t_edata;
+};
+""")
+
+ def test_excludes_and_includes(self):
+ f = StringIO()
+ abi_write_vscript(f, "MYLIB", "1.0", [], {
+ "pub_foo": "1.0",
+ "exc_bar": "1.0",
+ "other": "1.0"
+ }, ["pub_*", "!exc_*"])
+ self.assertEqual(f.getvalue(), """\
+1.0 {
+\tglobal:
+\t\tpub_*;
+\tlocal:
+\t\texc_*;
+\t\t_end;
+\t\t__bss_start;
+\t\t_edata;
+\t\t*;
+};
+""")
diff --git a/buildtools/wafsamba/tests/test_bundled.py b/buildtools/wafsamba/tests/test_bundled.py
new file mode 100644
index 0000000..a8e9821
--- /dev/null
+++ b/buildtools/wafsamba/tests/test_bundled.py
@@ -0,0 +1,27 @@
+# Copyright (C) 2012 Jelmer Vernooij <jelmer@samba.org>
+
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License as published by
+# the Free Software Foundation; either version 2.1 of the License, or
+# (at your option) any later version.
+
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU Lesser General Public License for more details.
+
+# You should have received a copy of the GNU Lesser General Public License
+# along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+
+from wafsamba.tests import TestCase
+
+from wafsamba.samba_bundled import (
+ tuplize_version,
+ )
+
+
+class TuplizeVersionTests(TestCase):
+
+ def test_simple(self):
+ self.assertEqual((1, 2, 10), tuplize_version("1.2.10"))
diff --git a/buildtools/wafsamba/tests/test_utils.py b/buildtools/wafsamba/tests/test_utils.py
new file mode 100644
index 0000000..77fc55c
--- /dev/null
+++ b/buildtools/wafsamba/tests/test_utils.py
@@ -0,0 +1,76 @@
+# Copyright (C) 2012 Jelmer Vernooij <jelmer@samba.org>
+
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License as published by
+# the Free Software Foundation; either version 2.1 of the License, or
+# (at your option) any later version.
+
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU Lesser General Public License for more details.
+
+# You should have received a copy of the GNU Lesser General Public License
+# along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+
+from wafsamba.tests import TestCase
+
+from wafsamba.samba_utils import (
+ TO_LIST,
+ dict_concat,
+ subst_vars_error,
+ unique_list,
+ )
+
+class ToListTests(TestCase):
+
+ def test_none(self):
+ self.assertEqual([], TO_LIST(None))
+
+ def test_already_list(self):
+ self.assertEqual(["foo", "bar", 1], TO_LIST(["foo", "bar", 1]))
+
+ def test_default_delimiter(self):
+ self.assertEqual(["foo", "bar"], TO_LIST("foo bar"))
+ self.assertEqual(["foo", "bar"], TO_LIST(" foo bar "))
+ self.assertEqual(["foo ", "bar"], TO_LIST(" \"foo \" bar "))
+
+ def test_delimiter(self):
+ self.assertEqual(["foo", "bar"], TO_LIST("foo,bar", ","))
+ self.assertEqual([" foo", "bar "], TO_LIST(" foo,bar ", ","))
+ self.assertEqual([" \" foo\"", " bar "], TO_LIST(" \" foo\", bar ", ","))
+
+
+class UniqueListTests(TestCase):
+
+ def test_unique_list(self):
+ self.assertEqual(["foo", "bar"], unique_list(["foo", "bar", "foo"]))
+
+
+class SubstVarsErrorTests(TestCase):
+
+ def test_valid(self):
+ self.assertEqual("", subst_vars_error("", {}))
+ self.assertEqual("FOO bar", subst_vars_error("${F} bar", {"F": "FOO"}))
+
+ def test_invalid(self):
+ self.assertRaises(KeyError, subst_vars_error, "${F}", {})
+
+
+class DictConcatTests(TestCase):
+
+ def test_empty(self):
+ ret = {}
+ dict_concat(ret, {})
+ self.assertEqual({}, ret)
+
+ def test_same(self):
+ ret = {"foo": "bar"}
+ dict_concat(ret, {"foo": "bla"})
+ self.assertEqual({"foo": "bar"}, ret)
+
+ def test_simple(self):
+ ret = {"foo": "bar"}
+ dict_concat(ret, {"blie": "bla"})
+ self.assertEqual({"foo": "bar", "blie": "bla"}, ret)
diff --git a/buildtools/wafsamba/wafsamba.py b/buildtools/wafsamba/wafsamba.py
new file mode 100644
index 0000000..e1c2877
--- /dev/null
+++ b/buildtools/wafsamba/wafsamba.py
@@ -0,0 +1,1227 @@
+# a waf tool to add autoconf-like macros to the configure section
+# and for SAMBA_ macros for building libraries, binaries etc
+
+import os, sys, re, shutil, fnmatch
+from waflib import Build, Options, Task, Utils, TaskGen, Logs, Context, Errors
+from waflib.Configure import conf
+from waflib.Logs import debug
+from samba_utils import SUBST_VARS_RECURSIVE
+TaskGen.task_gen.apply_verif = Utils.nada
+
+# bring in the other samba modules
+from samba_utils import *
+from samba_utils import symlink
+from samba_version import *
+from samba_autoconf import *
+from samba_patterns import *
+from samba_pidl import *
+from samba_autoproto import *
+from samba_python import *
+from samba_perl import *
+from samba_deps import *
+from samba_bundled import *
+from samba_third_party import *
+import samba_cross
+import samba_install
+import samba_conftests
+import samba_abi
+import samba_headers
+import generic_cc
+import samba_dist
+import samba_wildcard
+import symbols
+import pkgconfig
+import configure_file
+import samba_waf18
+
+LIB_PATH="shared"
+
+os.environ['PYTHONUNBUFFERED'] = '1'
+
+if Context.HEXVERSION not in (0x2001a00,):
+ Logs.error('''
+Please use the version of waf that comes with Samba, not
+a system installed version. See http://wiki.samba.org/index.php/Waf
+for details.
+
+Alternatively, please run ./configure and make as usual. That will
+call the right version of waf.''')
+ sys.exit(1)
+
+@conf
+def SAMBA_BUILD_ENV(conf):
+ '''create the samba build environment'''
+ conf.env.BUILD_DIRECTORY = conf.bldnode.abspath()
+ mkdir_p(os.path.join(conf.env.BUILD_DIRECTORY, LIB_PATH))
+ mkdir_p(os.path.join(conf.env.BUILD_DIRECTORY, LIB_PATH, "private"))
+ mkdir_p(os.path.join(conf.env.BUILD_DIRECTORY, "modules"))
+ mkdir_p(os.path.join(conf.env.BUILD_DIRECTORY, "plugins"))
+ mkdir_p(os.path.join(conf.env.BUILD_DIRECTORY, 'python/samba/dcerpc'))
+ # this allows all of the bin/shared and bin/python targets
+ # to be expressed in terms of build directory paths
+ mkdir_p(os.path.join(conf.env.BUILD_DIRECTORY, 'default'))
+ for (source, target) in [('shared', 'shared'), ('modules', 'modules'), ('plugins', 'plugins'), ('python', 'python')]:
+ link_target = os.path.join(conf.env.BUILD_DIRECTORY, 'default/' + target)
+ if not os.path.lexists(link_target):
+ symlink('../' + source, link_target)
+
+ # get perl to put the blib files in the build directory
+ blib_bld = os.path.join(conf.env.BUILD_DIRECTORY, 'default/pidl/blib')
+ blib_src = os.path.join(conf.srcnode.abspath(), 'pidl/blib')
+ mkdir_p(blib_bld + '/man1')
+ mkdir_p(blib_bld + '/man3')
+ if os.path.islink(blib_src):
+ os.unlink(blib_src)
+ elif os.path.exists(blib_src):
+ shutil.rmtree(blib_src)
+
+
+def ADD_INIT_FUNCTION(bld, subsystem, target, init_function):
+ '''add an init_function to the list for a subsystem'''
+ if init_function is None:
+ return
+ bld.ASSERT(subsystem is not None, "You must specify a subsystem for init_function '%s'" % init_function)
+ cache = LOCAL_CACHE(bld, 'INIT_FUNCTIONS')
+ if subsystem not in cache:
+ cache[subsystem] = []
+ cache[subsystem].append( { 'TARGET':target, 'INIT_FUNCTION':init_function } )
+Build.BuildContext.ADD_INIT_FUNCTION = ADD_INIT_FUNCTION
+
+
+def generate_empty_file(task):
+ task.outputs[0].write('')
+ return 0
+
+#################################################################
+def SAMBA_LIBRARY(bld, libname, source,
+ deps='',
+ public_deps='',
+ includes='',
+ public_headers=None,
+ public_headers_install=True,
+ private_headers=None,
+ header_path=None,
+ pc_files=None,
+ vnum=None,
+ soname=None,
+ cflags='',
+ cflags_end=None,
+ ldflags='',
+ external_library=False,
+ realname=None,
+ autoproto=None,
+ autoproto_extra_source='',
+ group='main',
+ depends_on='',
+ local_include=True,
+ global_include=True,
+ vars=None,
+ subdir=None,
+ install_path=None,
+ install=True,
+ pyembed=False,
+ pyext=False,
+ target_type='LIBRARY',
+ bundled_name=None,
+ link_name=None,
+ abi_directory=None,
+ abi_match=None,
+ orig_vscript_map=None,
+ hide_symbols=False,
+ manpages=None,
+ private_library=False,
+ grouping_library=False,
+ require_builtin_deps=False,
+ provide_builtin_linking=False,
+ builtin_cflags='',
+ force_unversioned=False,
+ allow_undefined_symbols=False,
+ allow_warnings=False,
+ enabled=True):
+ '''define a Samba library'''
+
+ # We support:
+ # - LIBRARY: this can be used to link via -llibname
+ # - MODULE: this is module from SAMBA_MODULE()
+ # - PLUGIN: this is plugin for external consumers to be
+ # loaded via dlopen()
+ # - PYTHON: a python C binding library
+ #
+ if target_type not in ['LIBRARY', 'MODULE', 'PLUGIN', 'PYTHON']:
+ raise Errors.WafError("target_type[%s] not supported in SAMBA_LIBRARY('%s')" %
+ (target_type, libname))
+
+ if require_builtin_deps:
+ # For now we only support require_builtin_deps only for libraries, plugins
+ if target_type not in ['LIBRARY', 'PLUGIN']:
+ raise Errors.WafError("target_type[%s] not supported SAMBA_LIBRARY('%s', require_builtin_deps=True)" %
+ (target_type, libname))
+
+ if private_library and public_headers:
+ raise Errors.WafError("private library '%s' must not have public header files" %
+ libname)
+
+ if orig_vscript_map and not private_library:
+ raise Errors.WafError("public library '%s' must not have orig_vscript_map" %
+ libname)
+
+ if orig_vscript_map and abi_directory:
+ raise Errors.WafError("private library '%s' with orig_vscript_map must not have abi_directory" %
+ libname)
+ if orig_vscript_map and abi_match:
+ raise Errors.WafError("private library '%s' with orig_vscript_map must not have abi_match" %
+ libname)
+
+ if force_unversioned and private_library:
+ raise Errors.WafError("private library '%s': can't have force_unversioned=True" %
+ libname)
+
+ if force_unversioned and realname is None:
+ raise Errors.WafError("library '%s': force_unversioned=True needs realname too" %
+ libname)
+
+ if LIB_MUST_BE_PRIVATE(bld, libname) and target_type not in ['PLUGIN']:
+ private_library = True
+
+ if force_unversioned:
+ private_library = False
+
+ if not enabled:
+ SET_TARGET_TYPE(bld, libname, 'DISABLED')
+ return
+
+ source = bld.EXPAND_VARIABLES(source, vars=vars)
+ if subdir:
+ source = bld.SUBDIR(subdir, source)
+
+ # remember empty libraries, so we can strip the dependencies
+ if ((source == '') or (source == [])):
+ if deps == '' and public_deps == '':
+ SET_TARGET_TYPE(bld, libname, 'EMPTY')
+ return
+ empty_c = libname + '.empty.c'
+ bld.SAMBA_GENERATOR('%s_empty_c' % libname,
+ rule=generate_empty_file,
+ target=empty_c)
+ source=empty_c
+
+ samba_deps = deps + ' ' + public_deps
+ samba_deps = TO_LIST(samba_deps)
+
+ if BUILTIN_LIBRARY(bld, libname):
+ builtin_target = libname + '.builtin.objlist'
+ builtin_cflags_end = '-D_PUBLIC_=_PRIVATE_'
+ empty_target = libname
+ obj_target = None
+ else:
+ if provide_builtin_linking:
+ builtin_target = libname + '.builtin.objlist'
+ builtin_cflags_end = '-D_PUBLIC_=_PRIVATE_'
+ else:
+ builtin_target = None
+ empty_target = None
+ obj_target = libname + '.objlist'
+ if require_builtin_deps:
+ # hide the builtin deps from the callers
+ samba_deps = TO_LIST('')
+ dep_target = obj_target
+
+ if group == 'libraries':
+ subsystem_group = 'main'
+ else:
+ subsystem_group = group
+
+ # first create a target for building the object files for this library
+ # by separating in this way, we avoid recompiling the C files
+ # separately for the install library and the build library
+ if builtin_target:
+ __t = __SAMBA_SUBSYSTEM_BUILTIN(bld, builtin_target, source,
+ deps=deps,
+ public_deps=public_deps,
+ includes=includes,
+ header_path=header_path,
+ builtin_cflags=builtin_cflags,
+ builtin_cflags_end=builtin_cflags_end,
+ group=group,
+ depends_on=depends_on,
+ local_include=local_include,
+ global_include=global_include,
+ allow_warnings=allow_warnings)
+ builtin_subsystem = __t
+ else:
+ builtin_subsystem = None
+ if obj_target:
+ bld.SAMBA_SUBSYSTEM(obj_target,
+ source = source,
+ deps = deps,
+ public_deps = public_deps,
+ includes = includes,
+ public_headers = public_headers,
+ public_headers_install = public_headers_install,
+ private_headers= private_headers,
+ header_path = header_path,
+ cflags = cflags,
+ cflags_end = cflags_end,
+ group = subsystem_group,
+ autoproto = autoproto,
+ autoproto_extra_source=autoproto_extra_source,
+ depends_on = depends_on,
+ hide_symbols = hide_symbols,
+ allow_warnings = allow_warnings,
+ pyembed = pyembed,
+ pyext = pyext,
+ local_include = local_include,
+ __require_builtin_deps=require_builtin_deps,
+ global_include = global_include)
+ else:
+ et = bld.SAMBA_SUBSYSTEM(empty_target,
+ source=[],
+ __force_empty=True,
+ __require_builtin_deps=True)
+ et.samba_builtin_subsystem = builtin_subsystem
+
+ if BUILTIN_LIBRARY(bld, libname):
+ return
+
+ if not SET_TARGET_TYPE(bld, libname, target_type):
+ return
+
+ # the library itself will depend on that object target
+ samba_deps.append(dep_target)
+
+ realname = bld.map_shlib_extension(realname, python=(target_type=='PYTHON'))
+ link_name = bld.map_shlib_extension(link_name, python=(target_type=='PYTHON'))
+
+ # we don't want any public libraries without version numbers
+ if (not private_library and target_type != 'PYTHON' and not realname):
+ if vnum is None and soname is None:
+ raise Errors.WafError("public library '%s' must have a vnum" %
+ libname)
+ if pc_files is None:
+ raise Errors.WafError("public library '%s' must have pkg-config file" %
+ libname)
+ if public_headers is None:
+ raise Errors.WafError("public library '%s' must have header files" %
+ libname)
+
+ abi_vnum = vnum
+
+ if bundled_name is not None:
+ pass
+ elif target_type == 'PYTHON' or realname or not private_library:
+ bundled_name = libname.replace('_', '-')
+ else:
+ assert (private_library is True and realname is None)
+ bundled_name = PRIVATE_NAME(bld, libname.replace('_', '-'))
+ vnum = None
+
+ ldflags = TO_LIST(ldflags)
+ if bld.env['ENABLE_RELRO'] is True:
+ ldflags.extend(TO_LIST('-Wl,-z,relro,-z,now'))
+
+ features = 'c cshlib symlink_lib install_lib'
+ if pyext:
+ features += ' pyext'
+ if pyembed:
+ features += ' pyembed'
+
+ if abi_directory:
+ features += ' abi_check'
+
+ if pyembed and bld.env['PYTHON_SO_ABI_FLAG']:
+ # For ABI checking, we don't care about the Python version.
+ # Remove the Python ABI tag (e.g. ".cpython-35m")
+ abi_flag = bld.env['PYTHON_SO_ABI_FLAG']
+ replacement = ''
+ version_libname = libname.replace(abi_flag, replacement)
+ else:
+ version_libname = libname
+
+ vscript = None
+ if bld.env.HAVE_LD_VERSION_SCRIPT:
+ if force_unversioned:
+ version = None
+ elif private_library:
+ version = bld.env.PRIVATE_VERSION
+ elif vnum:
+ version = "%s_%s" % (libname, vnum)
+ else:
+ version = None
+ if version:
+ vscript = "%s.vscript" % libname
+ if orig_vscript_map:
+ bld.VSCRIPT_MAP_PRIVATE(version_libname, orig_vscript_map, version, vscript)
+ else:
+ bld.ABI_VSCRIPT(version_libname, abi_directory, version, vscript,
+ abi_match, private_library)
+ fullname = apply_pattern(bundled_name, bld.env.cshlib_PATTERN)
+ fullpath = bld.path.find_or_declare(fullname)
+ vscriptpath = bld.path.find_or_declare(vscript)
+ if not fullpath:
+ raise Errors.WafError("unable to find fullpath for %s" % fullname)
+ if not vscriptpath:
+ raise Errors.WafError("unable to find vscript path for %s" % vscript)
+ bld.add_manual_dependency(fullpath, vscriptpath)
+ if bld.is_install:
+ # also make the .inst file depend on the vscript
+ instname = apply_pattern(bundled_name + '.inst', bld.env.cshlib_PATTERN)
+ bld.add_manual_dependency(bld.path.find_or_declare(instname), bld.path.find_or_declare(vscript))
+ vscript = os.path.join(bld.path.abspath(bld.env), vscript)
+
+ bld.SET_BUILD_GROUP(group)
+ t = bld(
+ features = features,
+ source = [],
+ target = bundled_name,
+ depends_on = depends_on,
+ samba_ldflags = ldflags,
+ samba_deps = samba_deps,
+ samba_includes = includes,
+ version_script = vscript,
+ version_libname = version_libname,
+ local_include = local_include,
+ global_include = global_include,
+ vnum = vnum,
+ soname = soname,
+ install_path = None,
+ samba_inst_path = install_path,
+ name = libname,
+ samba_realname = realname,
+ samba_install = install,
+ abi_directory = "%s/%s" % (bld.path.abspath(), abi_directory),
+ abi_match = abi_match,
+ abi_vnum = abi_vnum,
+ private_library = private_library,
+ grouping_library=grouping_library,
+ allow_undefined_symbols=allow_undefined_symbols,
+ samba_require_builtin_deps=False,
+ samba_builtin_subsystem=builtin_subsystem,
+ )
+
+ if realname and not link_name:
+ link_name = 'shared/%s' % realname
+
+ if link_name:
+ if 'waflib.extras.compat15' in sys.modules:
+ link_name = 'default/' + link_name
+ t.link_name = link_name
+
+ if pc_files is not None and not private_library:
+ if pyembed:
+ bld.PKG_CONFIG_FILES(pc_files, vnum=vnum, extra_name=bld.env['PYTHON_SO_ABI_FLAG'])
+ else:
+ bld.PKG_CONFIG_FILES(pc_files, vnum=vnum)
+
+ if (manpages is not None and 'XSLTPROC_MANPAGES' in bld.env and
+ bld.env['XSLTPROC_MANPAGES']):
+ bld.MANPAGES(manpages, install)
+
+
+Build.BuildContext.SAMBA_LIBRARY = SAMBA_LIBRARY
+
+
+#################################################################
+def SAMBA_BINARY(bld, binname, source,
+ deps='',
+ includes='',
+ public_headers=None,
+ private_headers=None,
+ header_path=None,
+ modules=None,
+ ldflags=None,
+ cflags='',
+ cflags_end=None,
+ autoproto=None,
+ use_hostcc=False,
+ use_global_deps=True,
+ compiler=None,
+ group='main',
+ manpages=None,
+ local_include=True,
+ global_include=True,
+ subsystem_name=None,
+ allow_warnings=False,
+ pyembed=False,
+ vars=None,
+ subdir=None,
+ install=True,
+ install_path=None,
+ enabled=True,
+ fuzzer=False,
+ for_selftest=False):
+ '''define a Samba binary'''
+
+ if for_selftest:
+ install=False
+ if not bld.CONFIG_GET('ENABLE_SELFTEST'):
+ enabled=False
+
+ if not enabled:
+ SET_TARGET_TYPE(bld, binname, 'DISABLED')
+ return
+
+ # Fuzzing builds do not build normal binaries
+ # however we must build asn1compile etc
+
+ if not use_hostcc and bld.env.enable_fuzzing != fuzzer:
+ SET_TARGET_TYPE(bld, binname, 'DISABLED')
+ return
+
+ if fuzzer:
+ install = False
+ if ldflags is None:
+ ldflags = bld.env['FUZZ_TARGET_LDFLAGS']
+
+ if not SET_TARGET_TYPE(bld, binname, 'BINARY'):
+ return
+
+ features = 'c cprogram symlink_bin install_bin'
+ if pyembed:
+ features += ' pyembed'
+
+ obj_target = binname + '.objlist'
+
+ source = bld.EXPAND_VARIABLES(source, vars=vars)
+ if subdir:
+ source = bld.SUBDIR(subdir, source)
+ source = unique_list(TO_LIST(source))
+
+ if group == 'binaries':
+ subsystem_group = 'main'
+ elif group == 'build_compilers':
+ subsystem_group = 'compiler_libraries'
+ else:
+ subsystem_group = group
+
+ # only specify PIE flags for binaries
+ pie_cflags = TO_LIST(cflags)
+ pie_ldflags = TO_LIST(ldflags)
+ if bld.env['ENABLE_PIE'] is True:
+ pie_cflags.extend(TO_LIST('-fPIE'))
+ pie_ldflags.extend(TO_LIST('-pie'))
+ if bld.env['ENABLE_RELRO'] is True:
+ pie_ldflags.extend(TO_LIST('-Wl,-z,relro,-z,now'))
+
+ # first create a target for building the object files for this binary
+ # by separating in this way, we avoid recompiling the C files
+ # separately for the install binary and the build binary
+ bld.SAMBA_SUBSYSTEM(obj_target,
+ source = source,
+ deps = deps,
+ includes = includes,
+ cflags = pie_cflags,
+ cflags_end = cflags_end,
+ group = subsystem_group,
+ autoproto = autoproto,
+ subsystem_name = subsystem_name,
+ local_include = local_include,
+ global_include = global_include,
+ use_hostcc = use_hostcc,
+ pyext = pyembed,
+ allow_warnings = allow_warnings,
+ use_global_deps= use_global_deps)
+
+ bld.SET_BUILD_GROUP(group)
+
+ # the binary itself will depend on that object target
+ deps = TO_LIST(deps)
+ deps.append(obj_target)
+
+ t = bld(
+ features = features,
+ source = [],
+ target = binname,
+ samba_deps = deps,
+ samba_includes = includes,
+ local_include = local_include,
+ global_include = global_include,
+ samba_modules = modules,
+ top = True,
+ samba_subsystem= subsystem_name,
+ install_path = None,
+ samba_inst_path= install_path,
+ samba_install = install,
+ samba_ldflags = pie_ldflags
+ )
+
+ if manpages is not None and 'XSLTPROC_MANPAGES' in bld.env and bld.env['XSLTPROC_MANPAGES']:
+ bld.MANPAGES(manpages, install)
+
+Build.BuildContext.SAMBA_BINARY = SAMBA_BINARY
+
+
+#################################################################
+def SAMBA_MODULE(bld, modname, source,
+ deps='',
+ includes='',
+ subsystem=None,
+ init_function=None,
+ module_init_name='samba_init_module',
+ autoproto=None,
+ autoproto_extra_source='',
+ cflags='',
+ cflags_end=None,
+ internal_module=True,
+ local_include=True,
+ global_include=True,
+ vars=None,
+ subdir=None,
+ enabled=True,
+ pyembed=False,
+ manpages=None,
+ allow_undefined_symbols=False,
+ allow_warnings=False,
+ install=True
+ ):
+ '''define a Samba module.'''
+
+ bld.ASSERT(subsystem, "You must specify a subsystem for SAMBA_MODULE(%s)" % modname)
+
+ source = bld.EXPAND_VARIABLES(source, vars=vars)
+ if subdir:
+ source = bld.SUBDIR(subdir, source)
+
+ if internal_module or BUILTIN_LIBRARY(bld, modname):
+ # Do not create modules for disabled subsystems
+ if GET_TARGET_TYPE(bld, subsystem) == 'DISABLED':
+ return
+ bld.SAMBA_SUBSYSTEM(modname, source,
+ deps=deps,
+ includes=includes,
+ autoproto=autoproto,
+ autoproto_extra_source=autoproto_extra_source,
+ cflags=cflags,
+ cflags_end=cflags_end,
+ local_include=local_include,
+ global_include=global_include,
+ allow_warnings=allow_warnings,
+ enabled=enabled)
+
+ bld.ADD_INIT_FUNCTION(subsystem, modname, init_function)
+ return
+
+ if not enabled:
+ SET_TARGET_TYPE(bld, modname, 'DISABLED')
+ return
+
+ # Do not create modules for disabled subsystems
+ if GET_TARGET_TYPE(bld, subsystem) == 'DISABLED':
+ return
+
+ realname = modname
+ deps += ' ' + subsystem
+ while realname.startswith("lib"+subsystem+"_"):
+ realname = realname[len("lib"+subsystem+"_"):]
+ while realname.startswith(subsystem+"_"):
+ realname = realname[len(subsystem+"_"):]
+
+ build_name = "%s_module_%s" % (subsystem, realname)
+
+ realname = bld.make_libname(realname)
+ while realname.startswith("lib"):
+ realname = realname[len("lib"):]
+
+ build_link_name = "modules/%s/%s" % (subsystem, realname)
+
+ if init_function:
+ cflags += " -D%s=%s" % (init_function, module_init_name)
+
+ bld.SAMBA_LIBRARY(modname,
+ source,
+ deps=deps,
+ includes=includes,
+ cflags=cflags,
+ cflags_end=cflags_end,
+ realname = realname,
+ autoproto = autoproto,
+ local_include=local_include,
+ global_include=global_include,
+ vars=vars,
+ bundled_name=build_name,
+ link_name=build_link_name,
+ install_path="${MODULESDIR}/%s" % subsystem,
+ pyembed=pyembed,
+ manpages=manpages,
+ allow_undefined_symbols=allow_undefined_symbols,
+ allow_warnings=allow_warnings,
+ private_library=True,
+ install=install
+ )
+
+
+Build.BuildContext.SAMBA_MODULE = SAMBA_MODULE
+
+#################################################################
+def SAMBA_PLUGIN(bld, pluginname, source,
+ deps='',
+ includes='',
+ vnum=None,
+ soname=None,
+ cflags='',
+ ldflags='',
+ local_include=True,
+ global_include=True,
+ vars=None,
+ subdir=None,
+ realname=None,
+ autoproto=None,
+ autoproto_extra_source='',
+ install_path=None,
+ install=True,
+ manpages=None,
+ require_builtin_deps=True,
+ allow_undefined_symbols=False,
+ enabled=True):
+ '''define an external plugin.'''
+
+ bld.ASSERT(realname, "You must specify a realname for SAMBA_PLUGIN(%s)" % pluginname)
+
+ source = bld.EXPAND_VARIABLES(source, vars=vars)
+ if subdir:
+ source = bld.SUBDIR(subdir, source)
+
+ build_name = "_plugin_%s" % (pluginname)
+ build_link_name = "plugins/%s" % (realname)
+
+ bld.SAMBA_LIBRARY(pluginname,
+ source,
+ bundled_name=build_name,
+ link_name=build_link_name,
+ target_type='PLUGIN',
+ deps=deps,
+ includes=includes,
+ vnum=vnum,
+ soname=soname,
+ cflags=cflags,
+ ldflags=ldflags,
+ realname=realname,
+ autoproto=autoproto,
+ autoproto_extra_source=autoproto_extra_source,
+ local_include=local_include,
+ global_include=global_include,
+ vars=vars,
+ group='main',
+ install_path=install_path,
+ install=install,
+ manpages=manpages,
+ require_builtin_deps=require_builtin_deps,
+ builtin_cflags=cflags,
+ hide_symbols=True,
+ public_headers=[],
+ public_headers_install=False,
+ pc_files=[],
+ allow_undefined_symbols=allow_undefined_symbols,
+ allow_warnings=False,
+ enabled=enabled)
+Build.BuildContext.SAMBA_PLUGIN = SAMBA_PLUGIN
+
+def __SAMBA_SUBSYSTEM_BUILTIN(bld, builtin_target, source,
+ deps='',
+ public_deps='',
+ includes='',
+ public_headers=None,
+ public_headers_install=True,
+ private_headers=None,
+ header_path=None,
+ builtin_cflags='',
+ builtin_cflags_end=None,
+ group='main',
+ autoproto=None,
+ autoproto_extra_source='',
+ depends_on='',
+ local_include=True,
+ global_include=True,
+ allow_warnings=False):
+
+ bld.ASSERT(builtin_target.endswith('.builtin.objlist'),
+ "builtin_target[%s] does not end with '.builtin.objlist'" %
+ (builtin_target))
+ return bld.SAMBA_SUBSYSTEM(builtin_target, source,
+ deps=deps,
+ public_deps=public_deps,
+ includes=includes,
+ public_headers=public_headers,
+ public_headers_install=public_headers_install,
+ private_headers=private_headers,
+ header_path=header_path,
+ cflags=builtin_cflags,
+ cflags_end=builtin_cflags_end,
+ hide_symbols=True,
+ group=group,
+ target_type='BUILTIN',
+ autoproto=autoproto,
+ autoproto_extra_source=autoproto_extra_source,
+ depends_on=depends_on,
+ local_include=local_include,
+ global_include=global_include,
+ allow_warnings=allow_warnings,
+ __require_builtin_deps=True)
+
+#################################################################
+def SAMBA_SUBSYSTEM(bld, modname, source,
+ deps='',
+ public_deps='',
+ __force_empty=False,
+ includes='',
+ public_headers=None,
+ public_headers_install=True,
+ private_headers=None,
+ header_path=None,
+ cflags='',
+ cflags_end=None,
+ group='main',
+ target_type='SUBSYSTEM',
+ init_function_sentinel=None,
+ autoproto=None,
+ autoproto_extra_source='',
+ depends_on='',
+ local_include=True,
+ local_include_first=True,
+ global_include=True,
+ subsystem_name=None,
+ enabled=True,
+ use_hostcc=False,
+ use_global_deps=True,
+ vars=None,
+ subdir=None,
+ hide_symbols=False,
+ __require_builtin_deps=False,
+ provide_builtin_linking=False,
+ builtin_cflags='',
+ allow_warnings=False,
+ pyext=False,
+ pyembed=False):
+ '''define a Samba subsystem'''
+
+ # We support:
+ # - SUBSYSTEM: a normal subsystem from SAMBA_SUBSYSTEM()
+ # - BUILTIN: a hidden subsystem from __SAMBA_SUBSYSTEM_BUILTIN()
+ if target_type not in ['SUBSYSTEM', 'BUILTIN']:
+ raise Errors.WafError("target_type[%s] not supported in SAMBA_SUBSYSTEM('%s')" %
+ (target_type, modname))
+
+ if not enabled:
+ SET_TARGET_TYPE(bld, modname, 'DISABLED')
+ return
+
+ # remember empty subsystems, so we can strip the dependencies
+ if ((source == '') or (source == [])):
+ if not __force_empty and deps == '' and public_deps == '':
+ SET_TARGET_TYPE(bld, modname, 'EMPTY')
+ return
+ empty_c = modname + '.empty.c'
+ bld.SAMBA_GENERATOR('%s_empty_c' % modname,
+ rule=generate_empty_file,
+ target=empty_c)
+ source=empty_c
+
+ if not SET_TARGET_TYPE(bld, modname, target_type):
+ return
+
+ source = bld.EXPAND_VARIABLES(source, vars=vars)
+ if subdir:
+ source = bld.SUBDIR(subdir, source)
+ source = unique_list(TO_LIST(source))
+
+ deps += ' ' + public_deps
+
+ bld.SET_BUILD_GROUP(group)
+
+ features = 'c'
+ if pyext:
+ features += ' pyext'
+ if pyembed:
+ features += ' pyembed'
+
+ t = bld(
+ features = features,
+ source = source,
+ target = modname,
+ samba_cflags = CURRENT_CFLAGS(bld, modname, cflags,
+ allow_warnings=allow_warnings,
+ use_hostcc=use_hostcc,
+ hide_symbols=hide_symbols),
+ depends_on = depends_on,
+ samba_deps = TO_LIST(deps),
+ samba_includes = includes,
+ local_include = local_include,
+ local_include_first = local_include_first,
+ global_include = global_include,
+ samba_subsystem= subsystem_name,
+ samba_use_hostcc = use_hostcc,
+ samba_use_global_deps = use_global_deps,
+ samba_require_builtin_deps = __require_builtin_deps,
+ samba_builtin_subsystem = None,
+ )
+
+ if cflags_end is not None:
+ t.samba_cflags.extend(TO_LIST(cflags_end))
+
+ if autoproto is not None:
+ bld.SAMBA_AUTOPROTO(autoproto, source + TO_LIST(autoproto_extra_source))
+ if public_headers is not None:
+ bld.PUBLIC_HEADERS(public_headers, header_path=header_path,
+ public_headers_install=public_headers_install)
+
+ if provide_builtin_linking:
+
+ if use_hostcc:
+ raise Errors.WafError("subsystem[%s] provide_builtin_linking=True " +
+ "not allowed with use_hostcc=True" %
+ modname)
+
+ if pyext or pyembed:
+ raise Errors.WafError("subsystem[%s] provide_builtin_linking=True " +
+ "not allowed with pyext=True nor pyembed=True" %
+ modname)
+
+ if __require_builtin_deps:
+ raise Errors.WafError("subsystem[%s] provide_builtin_linking=True " +
+ "not allowed with __require_builtin_deps=True" %
+ modname)
+
+ builtin_target = modname + '.builtin.objlist'
+ tbuiltin = __SAMBA_SUBSYSTEM_BUILTIN(bld, builtin_target, source,
+ deps=deps,
+ public_deps=public_deps,
+ includes=includes,
+ header_path=header_path,
+ builtin_cflags=builtin_cflags,
+ builtin_cflags_end='-D_PUBLIC_=_PRIVATE_',
+ group=group,
+ depends_on=depends_on,
+ local_include=local_include,
+ global_include=global_include,
+ allow_warnings=allow_warnings)
+ t.samba_builtin_subsystem = tbuiltin
+
+ return t
+
+
+Build.BuildContext.SAMBA_SUBSYSTEM = SAMBA_SUBSYSTEM
+
+
+def SAMBA_GENERATOR(bld, name, rule, source='', target='',
+ group='generators', enabled=True,
+ public_headers=None,
+ public_headers_install=True,
+ private_headers=None,
+ header_path=None,
+ vars=None,
+ dep_vars=None,
+ always=False):
+ '''A generic source generator target'''
+
+ if dep_vars is None:
+ dep_vars = []
+ if not SET_TARGET_TYPE(bld, name, 'GENERATOR'):
+ return
+
+ if not enabled:
+ return
+
+ dep_vars = TO_LIST(dep_vars)
+ dep_vars.append('ruledeps')
+ dep_vars.append('SAMBA_GENERATOR_VARS')
+
+ shell=isinstance(rule, str)
+
+ # This ensures that if the command (executed in the shell) fails
+ # (returns non-zero), the build fails
+ if shell:
+ rule = "set -e; " + rule
+
+ bld.SET_BUILD_GROUP(group)
+ t = bld(
+ rule=rule,
+ source=bld.EXPAND_VARIABLES(source, vars=vars),
+ shell=shell,
+ target=target,
+ update_outputs=True,
+ before='c',
+ ext_out='.c',
+ samba_type='GENERATOR',
+ dep_vars = dep_vars,
+ name=name)
+
+ if vars is None:
+ vars = {}
+ t.env.SAMBA_GENERATOR_VARS = vars
+
+ if always:
+ t.always = True
+
+ if public_headers is not None:
+ bld.PUBLIC_HEADERS(public_headers, header_path=header_path,
+ public_headers_install=public_headers_install)
+ return t
+Build.BuildContext.SAMBA_GENERATOR = SAMBA_GENERATOR
+
+
+
+@Utils.run_once
+def SETUP_BUILD_GROUPS(bld):
+ '''setup build groups used to ensure that the different build
+ phases happen consecutively'''
+ bld.p_ln = bld.srcnode # we do want to see all targets!
+ bld.env['USING_BUILD_GROUPS'] = True
+ bld.add_group('setup')
+ bld.add_group('generators')
+ bld.add_group('hostcc_base_build_source')
+ bld.add_group('hostcc_base_build_main')
+ bld.add_group('hostcc_build_source')
+ bld.add_group('hostcc_build_main')
+ bld.add_group('vscripts')
+ bld.add_group('base_libraries')
+ bld.add_group('build_source')
+ bld.add_group('prototypes')
+ bld.add_group('headers')
+ bld.add_group('main')
+ bld.add_group('symbolcheck')
+ bld.add_group('syslibcheck')
+ bld.add_group('final')
+Build.BuildContext.SETUP_BUILD_GROUPS = SETUP_BUILD_GROUPS
+
+
+def SET_BUILD_GROUP(bld, group):
+ '''set the current build group'''
+ if not 'USING_BUILD_GROUPS' in bld.env:
+ return
+ bld.set_group(group)
+Build.BuildContext.SET_BUILD_GROUP = SET_BUILD_GROUP
+
+
+
+def SAMBA_SCRIPT(bld, name, pattern, installdir, installname=None):
+ '''used to copy scripts from the source tree into the build directory
+ for use by selftest'''
+
+ source = bld.path.ant_glob(pattern, flat=True)
+
+ bld.SET_BUILD_GROUP('build_source')
+ for s in TO_LIST(source):
+ iname = s
+ if installname is not None:
+ iname = installname
+ target = os.path.join(installdir, iname)
+ tgtdir = os.path.dirname(os.path.join(bld.srcnode.abspath(bld.env), '..', target))
+ mkdir_p(tgtdir)
+ link_src = os.path.normpath(os.path.join(bld.path.abspath(), s))
+ link_dst = os.path.join(tgtdir, os.path.basename(iname))
+ if os.path.islink(link_dst) and os.readlink(link_dst) == link_src:
+ continue
+ if os.path.islink(link_dst):
+ os.unlink(link_dst)
+ Logs.info("symlink: %s -> %s/%s" % (s, installdir, iname))
+ symlink(link_src, link_dst)
+Build.BuildContext.SAMBA_SCRIPT = SAMBA_SCRIPT
+
+
+def copy_and_fix_python_path(task):
+ pattern='sys.path.insert(0, "bin/python")'
+ if task.env["PYTHONARCHDIR"] in sys.path and task.env["PYTHONDIR"] in sys.path:
+ replacement = ""
+ elif task.env["PYTHONARCHDIR"] == task.env["PYTHONDIR"]:
+ replacement="""sys.path.insert(0, "%s")""" % task.env["PYTHONDIR"]
+ else:
+ replacement="""sys.path.insert(0, "%s")
+sys.path.insert(1, "%s")""" % (task.env["PYTHONARCHDIR"], task.env["PYTHONDIR"])
+
+ if task.env["PYTHON"][0].startswith("/"):
+ replacement_shebang = "#!%s\n" % task.env["PYTHON"][0]
+ else:
+ replacement_shebang = "#!/usr/bin/env %s\n" % task.env["PYTHON"][0]
+
+ installed_location=task.outputs[0].bldpath(task.env)
+ source_file = open(task.inputs[0].srcpath(task.env))
+ installed_file = open(installed_location, 'w')
+ lineno = 0
+ for line in source_file:
+ newline = line
+ if (lineno == 0 and
+ line[:2] == "#!"):
+ newline = replacement_shebang
+ elif pattern in line:
+ newline = line.replace(pattern, replacement)
+ installed_file.write(newline)
+ lineno = lineno + 1
+ installed_file.close()
+ os.chmod(installed_location, 0o755)
+ return 0
+
+def copy_and_fix_perl_path(task):
+ pattern='use lib "$RealBin/lib";'
+
+ replacement = ""
+ if not task.env["PERL_LIB_INSTALL_DIR"] in task.env["PERL_INC"]:
+ replacement = 'use lib "%s";' % task.env["PERL_LIB_INSTALL_DIR"]
+
+ if task.env["PERL"][0] == "/":
+ replacement_shebang = "#!%s\n" % task.env["PERL"]
+ else:
+ replacement_shebang = "#!/usr/bin/env %s\n" % task.env["PERL"]
+
+ installed_location=task.outputs[0].bldpath(task.env)
+ source_file = open(task.inputs[0].srcpath(task.env))
+ installed_file = open(installed_location, 'w')
+ lineno = 0
+ for line in source_file:
+ newline = line
+ if lineno == 0 and task.env["PERL_SPECIFIED"] is True and line[:2] == "#!":
+ newline = replacement_shebang
+ elif pattern in line:
+ newline = line.replace(pattern, replacement)
+ installed_file.write(newline)
+ lineno = lineno + 1
+ installed_file.close()
+ os.chmod(installed_location, 0o755)
+ return 0
+
+
+def install_file(bld, destdir, file, chmod=MODE_644, flat=False,
+ python_fixup=False, perl_fixup=False,
+ destname=None, base_name=None):
+ '''install a file'''
+ if not isinstance(file, str):
+ file = file.abspath()
+ destdir = bld.EXPAND_VARIABLES(destdir)
+ if not destname:
+ destname = file
+ if flat:
+ destname = os.path.basename(destname)
+ dest = os.path.join(destdir, destname)
+ if python_fixup:
+ # fix the path python will use to find Samba modules
+ inst_file = file + '.inst'
+ bld.SAMBA_GENERATOR('python_%s' % destname,
+ rule=copy_and_fix_python_path,
+ dep_vars=["PYTHON","PYTHON_SPECIFIED","PYTHONDIR","PYTHONARCHDIR"],
+ source=file,
+ target=inst_file)
+ file = inst_file
+ if perl_fixup:
+ # fix the path perl will use to find Samba modules
+ inst_file = file + '.inst'
+ bld.SAMBA_GENERATOR('perl_%s' % destname,
+ rule=copy_and_fix_perl_path,
+ dep_vars=["PERL","PERL_SPECIFIED","PERL_LIB_INSTALL_DIR"],
+ source=file,
+ target=inst_file)
+ file = inst_file
+ if base_name:
+ file = os.path.join(base_name, file)
+ bld.install_as(dest, file, chmod=chmod)
+
+
+def INSTALL_FILES(bld, destdir, files, chmod=MODE_644, flat=False,
+ python_fixup=False, perl_fixup=False,
+ destname=None, base_name=None):
+ '''install a set of files'''
+ for f in TO_LIST(files):
+ install_file(bld, destdir, f, chmod=chmod, flat=flat,
+ python_fixup=python_fixup, perl_fixup=perl_fixup,
+ destname=destname, base_name=base_name)
+Build.BuildContext.INSTALL_FILES = INSTALL_FILES
+
+
+def INSTALL_WILDCARD(bld, destdir, pattern, chmod=MODE_644, flat=False,
+ python_fixup=False, exclude=None, trim_path=None):
+ '''install a set of files matching a wildcard pattern'''
+ files=TO_LIST(bld.path.ant_glob(pattern, flat=True))
+ if trim_path:
+ files2 = []
+ for f in files:
+ files2.append(os.path.relpath(f, trim_path))
+ files = files2
+
+ if exclude:
+ for f in files[:]:
+ if fnmatch.fnmatch(f, exclude):
+ files.remove(f)
+ INSTALL_FILES(bld, destdir, files, chmod=chmod, flat=flat,
+ python_fixup=python_fixup, base_name=trim_path)
+Build.BuildContext.INSTALL_WILDCARD = INSTALL_WILDCARD
+
+def INSTALL_DIR(bld, path, chmod=0o755):
+ """Install a directory if it doesn't exist, always set permissions."""
+
+ if not path:
+ return []
+
+ destpath = bld.EXPAND_VARIABLES(path)
+ if Options.options.destdir:
+ destpath = os.path.join(Options.options.destdir, destpath.lstrip(os.sep))
+
+ if bld.is_install > 0:
+ if not os.path.isdir(destpath):
+ try:
+ Logs.info('* create %s', destpath)
+ os.makedirs(destpath)
+ os.chmod(destpath, chmod)
+ except OSError as e:
+ if not os.path.isdir(destpath):
+ raise Errors.WafError("Cannot create the folder '%s' (error: %s)" % (path, e))
+Build.BuildContext.INSTALL_DIR = INSTALL_DIR
+
+def INSTALL_DIRS(bld, destdir, dirs, chmod=0o755):
+ '''install a set of directories'''
+ destdir = bld.EXPAND_VARIABLES(destdir)
+ dirs = bld.EXPAND_VARIABLES(dirs)
+ for d in TO_LIST(dirs):
+ INSTALL_DIR(bld, os.path.join(destdir, d), chmod)
+Build.BuildContext.INSTALL_DIRS = INSTALL_DIRS
+
+
+def MANPAGES(bld, manpages, install):
+ '''build and install manual pages'''
+ bld.env.MAN_XSL = 'http://docbook.sourceforge.net/release/xsl/current/manpages/docbook.xsl'
+ for m in manpages.split():
+ source = m + '.xml'
+ bld.SAMBA_GENERATOR(m,
+ source=source,
+ target=m,
+ group='final',
+ rule='${XSLTPROC} --xinclude -o ${TGT} --nonet ${MAN_XSL} ${SRC}'
+ )
+ if install:
+ bld.INSTALL_FILES('${MANDIR}/man%s' % m[-1], m, flat=True)
+Build.BuildContext.MANPAGES = MANPAGES
+
+def SAMBAMANPAGES(bld, manpages, extra_source=None):
+ '''build and install manual pages'''
+ bld.env.SAMBA_EXPAND_XSL = bld.srcnode.abspath() + '/docs-xml/xslt/expand-sambadoc.xsl'
+ bld.env.SAMBA_MAN_XSL = bld.srcnode.abspath() + '/docs-xml/xslt/man.xsl'
+ bld.env.SAMBA_CATALOG = bld.bldnode.abspath() + '/docs-xml/build/catalog.xml'
+ bld.env.SAMBA_CATALOGS = os.getenv('XML_CATALOG_FILES', 'file:///etc/xml/catalog file:///usr/local/share/xml/catalog') + ' file://' + bld.env.SAMBA_CATALOG
+
+ for m in manpages.split():
+ source = [m + '.xml']
+ if extra_source is not None:
+ source = [source, extra_source]
+ # ${SRC[1]}, ${SRC[2]} and ${SRC[3]} are not referenced in the
+ # SAMBA_GENERATOR but trigger the dependency calculation so
+ # ensures that manpages are rebuilt when these change.
+ source += ['build/DTD/samba.build.pathconfig', 'build/DTD/samba.entities', 'build/DTD/samba.build.version']
+ bld.SAMBA_GENERATOR(m,
+ source=source,
+ target=m,
+ group='final',
+ dep_vars=['SAMBA_MAN_XSL', 'SAMBA_EXPAND_XSL', 'SAMBA_CATALOG'],
+ rule='''XML_CATALOG_FILES="${SAMBA_CATALOGS}"
+ export XML_CATALOG_FILES
+ ${XSLTPROC} --xinclude --stringparam noreference 0 -o ${TGT}.xml --nonet ${SAMBA_EXPAND_XSL} ${SRC[0].abspath(env)}
+ ${XSLTPROC} --nonet -o ${TGT} ${SAMBA_MAN_XSL} ${TGT}.xml'''
+ )
+ bld.INSTALL_FILES('${MANDIR}/man%s' % m[-1], m, flat=True)
+Build.BuildContext.SAMBAMANPAGES = SAMBAMANPAGES
+
+@after('apply_link')
+@feature('cshlib')
+def apply_bundle_remove_dynamiclib_patch(self):
+ if self.env['MACBUNDLE'] or getattr(self,'mac_bundle',False):
+ if not getattr(self,'vnum',None):
+ try:
+ self.env['LINKFLAGS'].remove('-dynamiclib')
+ self.env['LINKFLAGS'].remove('-single_module')
+ except ValueError:
+ pass
diff --git a/buildtools/wafsamba/wscript b/buildtools/wafsamba/wscript
new file mode 100644
index 0000000..99a6237
--- /dev/null
+++ b/buildtools/wafsamba/wscript
@@ -0,0 +1,737 @@
+#!/usr/bin/env python
+
+# this is a base set of waf rules that everything else pulls in first
+
+import os, sys
+from waflib import Configure, Logs, Options, Utils, Context, Errors
+import wafsamba
+from samba_utils import symlink
+from optparse import SUPPRESS_HELP
+
+phs = os.environ.get("PYTHONHASHSEED", None)
+if phs != "1":
+ raise Errors.WafError('''PYTHONHASHSEED=1 missing! Don't use waf directly, use ./configure and make!''')
+
+# this forces configure to be re-run if any of the configure
+# sections of the build scripts change. We have to check
+# for this in sys.argv as options have not yet been parsed when
+# we need to set this. This is off by default until some issues
+# are resolved related to WAFCACHE. It will need a lot of testing
+# before it is enabled by default.
+if '--enable-auto-reconfigure' in sys.argv:
+ Configure.autoconfig = 'clobber'
+
+def default_value(option, default=''):
+ if option in Options.options.__dict__:
+ return Options.options.__dict__[option]
+ return default
+
+def options(opt):
+ opt.load('compiler_cc')
+
+ opt.load('gnu_dirs')
+
+ gr = opt.option_group('library handling options')
+
+ gr.add_option('--bundled-libraries',
+ help=(f'''comma separated list of bundled libraries.
+
+{Context.g_module.APPNAME} includes copies of externally maintained
+system libraries (such as popt, cmocka) as well as Samba-maintained
+libraries that can be found on the system already (such as talloc,
+tdb).
+
+This option, most useful for packagers, controls if each library
+should be forced to be obtained from inside Samba (bundled), forced to
+be obtained from the system (bundling disabled, ensuing that
+dependency errors are not silently missed) or if that choice should be
+automatic (best for end users).
+
+May include !LIBNAME to disable bundling a library.
+
+Can be 'NONE' or 'ALL' [auto]'''),
+ action="store", dest='BUNDLED_LIBS', default='')
+
+ gr.add_option('--private-libraries',
+ help=(f'''comma separated list of normally public libraries to build instead as private libraries.
+
+By default {Context.g_module.APPNAME} will publish a number of public
+libraries for use by other software. For Samba this would include
+libwbclient, libsmbclient and others.
+
+This allows that to be disabled, to ensure that other software does
+not use these libraries and they are placed in a private filesystem
+prefix.
+
+May include !LIBNAME to disable making a library private in order to
+limit the effect of 'ALL' '''),
+ action="store", dest='PRIVATE_LIBS', default='')
+
+ extension_default = default_value('PRIVATE_EXTENSION_DEFAULT')
+ gr.add_option('--private-library-extension',
+ help=("name extension for private libraries [%s]" % extension_default),
+ action="store", dest='PRIVATE_EXTENSION', default=extension_default)
+
+ extension_exception = default_value('PRIVATE_EXTENSION_EXCEPTION')
+ gr.add_option('--private-extension-exception',
+ help=("comma separated list of libraries to not apply extension to [%s]" % extension_exception),
+ action="store", dest='PRIVATE_EXTENSION_EXCEPTION', default=extension_exception)
+
+ builtin_default = default_value('BUILTIN_LIBRARIES_DEFAULT')
+ gr.add_option('--builtin-libraries', help=(
+f'''comma separated list of libraries to build directly into binaries.
+
+By default {Context.g_module.APPNAME} will build a large number of
+shared libraries, to reduce binary size. This overrides this
+behaviour and essentially statically links the specified libraries into
+each binary [{builtin_default}]'''),
+ action="store",
+ dest='BUILTIN_LIBRARIES', default=builtin_default)
+
+ gr.add_option('--minimum-library-version',
+ help=(
+f'''list of minimum system library versions for otherwise bundled
+libraries.
+
+{Context.g_module.APPNAME} by default requires that, in order to match
+what is tested in our continuous integration (CI) test-suite, that the
+versions of libraries that we include match that found on the system,
+before we will select not to 'bundle'.
+
+This option, possibly useful for packagers, allows that specified
+version to be overridden (say, if it is absolutely known that the
+newer version included in this tarball has no relevant changes).
+
+Use this with extreme care
+
+(LIBNAME1:version,LIBNAME2:version)'''),
+ action="store", dest='MINIMUM_LIBRARY_VERSION', default='')
+
+ gr.add_option('--disable-rpath',
+ help=("Disable use of rpath for build binaries"),
+ action="store_true", dest='disable_rpath_build', default=False)
+ gr.add_option('--disable-rpath-install',
+ help=("Disable use of rpath for library path in installed files"),
+ action="store_true", dest='disable_rpath_install', default=False)
+ gr.add_option('--disable-rpath-private-install',
+ help=("Disable use of rpath for private library path in installed files"),
+ action="store_true", dest='disable_rpath_private_install', default=False)
+ gr.add_option('--nonshared-binary',
+ help=(
+f'''Disable use of shared libraries internal to {Context.g_module.APPNAME} for the listed binaries.
+
+The resulting binaries are 'statically linked' with regard to components provided by
+{Context.g_module.APPNAME}, but remain dynamically linked to (eg) libc.so and libgnutls.so
+
+Currently the only tested value is 'smbtorture,smbd/smbd' for Samba'''),
+ action="store", dest='NONSHARED_BINARIES', default='')
+ gr.add_option('--disable-symbol-versions',
+ help=("Disable use of the --version-script linker option"),
+ action="store_true", dest='disable_symbol_versions', default=False)
+
+ opt.add_option('--with-modulesdir',
+ help=("modules directory [PREFIX/modules]"),
+ action="store", dest='MODULESDIR', default='${PREFIX}/modules')
+
+ opt.add_option('--with-privatelibdir',
+ help=("private library directory [PREFIX/lib/%s]" % Context.g_module.APPNAME),
+ action="store", dest='PRIVATELIBDIR', default=None)
+
+ opt.add_option('--with-libiconv',
+ help='additional directory to search for libiconv',
+ action='store', dest='iconv_open', default='/usr/local',
+ match = ['Checking for library iconv', 'Checking for iconv_open', 'Checking for header iconv.h'])
+ opt.add_option('--without-gettext',
+ help=("Disable use of gettext"),
+ action="store_true", dest='disable_gettext', default=False)
+
+ gr = opt.option_group('developer options')
+
+ gr.add_option('-C',
+ help='enable configure caching',
+ action='store_true', dest='enable_configure_cache')
+ gr.add_option('--enable-auto-reconfigure',
+ help='enable automatic reconfigure on build',
+ action='store_true', dest='enable_auto_reconfigure')
+ gr.add_option('--enable-debug',
+ help=("Turn on debugging symbols"),
+ action="store_true", dest='debug', default=False)
+ gr.add_option('--enable-developer',
+ help=("Turn on developer warnings and debugging"),
+ action="store_true", dest='developer', default=False)
+ gr.add_option('--pidl-developer',
+ help=("annotate PIDL-generated code for developers"),
+ action="store_true", dest='pidl_developer', default=False)
+ gr.add_option('--disable-warnings-as-errors',
+ help=("Do not treat all warnings as errors (disable -Werror)"),
+ action="store_true", dest='disable_warnings_as_errors', default=False)
+ opt.add_option('--enable-coverage',
+ help=("enable options necessary for code coverage "
+ "reporting on selftest (default=no)"),
+ action="store_true", dest='enable_coverage', default=False)
+ gr.add_option('--fatal-errors',
+ help=("Stop compilation on first error (enable -Wfatal-errors)"),
+ action="store_true", dest='fatal_errors', default=False)
+ gr.add_option('--enable-gccdeps',
+ help=("Enable use of gcc -MD dependency module"),
+ action="store_true", dest='enable_gccdeps', default=True)
+ gr.add_option('--pedantic',
+ help=("Enable even more compiler warnings"),
+ action='store_true', dest='pedantic', default=False)
+ gr.add_option('--git-local-changes',
+ help=("mark version with + if local git changes"),
+ action='store_true', dest='GIT_LOCAL_CHANGES', default=False)
+ gr.add_option('--address-sanitizer',
+ help=("Enable address sanitizer compile and linker flags"),
+ action="store_true", dest='address_sanitizer', default=False)
+ gr.add_option('--undefined-sanitizer',
+ help=("Enable undefined behaviour sanitizer compile and linker flags"),
+ action="store_true",
+ dest='undefined_sanitizer',
+ default=False)
+ gr.add_option('--memory-sanitizer',
+ help=("Enable memory behaviour sanitizer compile and linker flags"),
+ action="store_true",
+ dest='memory_sanitizer',
+ default=False)
+ gr.add_option('--enable-libfuzzer',
+ help=("Build fuzzing binaries (use ADDITIONAL_CFLAGS to specify compiler options for libFuzzer or use CC=honggfuzz/hfuzz-cc)"),
+ action="store_true", dest='enable_libfuzzer', default=False)
+ gr.add_option('--enable-afl-fuzzer',
+ help=("Build fuzzing binaries AFL-style (typically use with CC=afl-gcc)"),
+ action="store_true", dest='enable_afl_fuzzer', default=False)
+
+ # Fuzz targets may need additional LDFLAGS that we can't use on
+ # internal binaries like asn1_compile
+
+ gr.add_option('--fuzz-target-ldflags',
+ help=("Linker flags to be used when building fuzz targets"),
+ action="store", dest='FUZZ_TARGET_LDFLAGS', default='')
+
+ gr.add_option('--abi-check',
+ help=("Check ABI signatures for libraries"),
+ action='store_true', dest='ABI_CHECK', default=False)
+ gr.add_option('--abi-check-disable',
+ help=("Disable ABI checking (used with --enable-developer)"),
+ action='store_true', dest='ABI_CHECK_DISABLE', default=False)
+ gr.add_option('--abi-update',
+ help=("Update ABI signature files for libraries"),
+ action='store_true', dest='ABI_UPDATE', default=False)
+
+ gr.add_option('--show-deps',
+ help=("Show dependency tree for the given target"),
+ dest='SHOWDEPS', default='')
+
+ gr.add_option('--symbol-check',
+ help=("check symbols in object files against project rules"),
+ action='store_true', dest='SYMBOLCHECK', default=False)
+
+ gr.add_option('--dup-symbol-check',
+ help=("check for duplicate symbols in object files and system libs (must be configured with --enable-developer)"),
+ action='store_true', dest='DUP_SYMBOLCHECK', default=False)
+
+ gr.add_option('--why-needed',
+ help=("TARGET:DEPENDENCY check why TARGET needs DEPENDENCY"),
+ action='store', type='str', dest='WHYNEEDED', default=None)
+
+ gr.add_option('--show-duplicates',
+ help=("Show objects which are included in multiple binaries or libraries"),
+ action='store_true', dest='SHOW_DUPLICATES', default=False)
+
+ gr = opt.add_option_group('cross compilation options')
+
+ gr.add_option('--cross-compile',
+ help=("configure for cross-compilation"),
+ action='store_true', dest='CROSS_COMPILE', default=False)
+ gr.add_option('--cross-execute',
+ help=("command prefix to use for cross-execution in configure"),
+ action='store', dest='CROSS_EXECUTE', default='')
+ gr.add_option('--cross-answers',
+ help=("answers to cross-compilation configuration (auto modified)"),
+ action='store', dest='CROSS_ANSWERS', default='')
+ gr.add_option('--hostcc',
+ help=("set host compiler when cross compiling"),
+ action='store', dest='HOSTCC', default=False)
+
+ # we use SUPPRESS_HELP for these, as they are ignored, and are there only
+ # to allow existing RPM spec files to work
+ opt.add_option('--build',
+ help=SUPPRESS_HELP,
+ action='store', dest='AUTOCONF_BUILD', default='')
+ opt.add_option('--host',
+ help=SUPPRESS_HELP,
+ action='store', dest='AUTOCONF_HOST', default='')
+ opt.add_option('--target',
+ help=SUPPRESS_HELP,
+ action='store', dest='AUTOCONF_TARGET', default='')
+ opt.add_option('--program-prefix',
+ help=SUPPRESS_HELP,
+ action='store', dest='AUTOCONF_PROGRAM_PREFIX', default='')
+ opt.add_option('--disable-dependency-tracking',
+ help=SUPPRESS_HELP,
+ action='store_true', dest='AUTOCONF_DISABLE_DEPENDENCY_TRACKING', default=False)
+ opt.add_option('--disable-silent-rules',
+ help=SUPPRESS_HELP,
+ action='store_true', dest='AUTOCONF_DISABLE_SILENT_RULES', default=False)
+
+ gr = opt.option_group('dist options')
+ gr.add_option('--sign-release',
+ help='sign the release tarball created by waf dist',
+ action='store_true', dest='SIGN_RELEASE')
+ gr.add_option('--tag',
+ help='tag release in git at the same time',
+ type='string', action='store', dest='TAG_RELEASE')
+
+ opt.add_option('--disable-python',
+ help='do not generate python modules',
+ action='store_true', dest='disable_python', default=False)
+
+
+@Utils.run_once
+def configure(conf):
+ conf.env.hlist = []
+ conf.env.srcdir = conf.srcnode.abspath()
+
+ conf.define('SRCDIR', conf.env['srcdir'])
+
+ conf.SETUP_CONFIGURE_CACHE(Options.options.enable_configure_cache)
+
+ # load our local waf extensions
+ conf.load('gnu_dirs')
+ conf.load('wafsamba')
+
+ conf.CHECK_CC_ENV()
+
+ conf.load('compiler_c')
+
+ conf.CHECK_STANDARD_LIBPATH()
+
+ # we need git for 'waf dist'
+ conf.find_program('git', var='GIT')
+
+ # older gcc versions (< 4.4) does not work with gccdeps, so we have to see if the .d file is generated
+ if Options.options.enable_gccdeps:
+ # stale file removal - the configuration may pick up the old .pyc file
+ p = os.path.join(conf.env.srcdir, 'buildtools/wafsamba/gccdeps.pyc')
+ if os.path.exists(p):
+ os.remove(p)
+ conf.load('gccdeps')
+
+ # make the install paths available in environment
+ conf.env.LIBDIR = Options.options.LIBDIR or '${PREFIX}/lib'
+ conf.env.BINDIR = Options.options.BINDIR or '${PREFIX}/bin'
+ conf.env.SBINDIR = Options.options.SBINDIR or '${PREFIX}/sbin'
+ conf.env.MODULESDIR = Options.options.MODULESDIR
+ conf.env.PRIVATELIBDIR = Options.options.PRIVATELIBDIR
+ conf.env.BUNDLED_LIBS = Options.options.BUNDLED_LIBS.split(',')
+ conf.env.SYSTEM_LIBS = ()
+ conf.env.PRIVATE_LIBS = Options.options.PRIVATE_LIBS.split(',')
+ conf.env.BUILTIN_LIBRARIES = Options.options.BUILTIN_LIBRARIES.split(',')
+ conf.env.NONSHARED_BINARIES = Options.options.NONSHARED_BINARIES.split(',')
+
+ conf.env.PRIVATE_EXTENSION = Options.options.PRIVATE_EXTENSION
+ conf.env.PRIVATE_EXTENSION_EXCEPTION = Options.options.PRIVATE_EXTENSION_EXCEPTION.split(',')
+ conf.env.PRIVATE_VERSION = "%s_%s_%s" % (Context.g_module.APPNAME,
+ Context.g_module.VERSION, conf.env.PRIVATE_EXTENSION)
+
+ conf.env.CROSS_COMPILE = Options.options.CROSS_COMPILE
+ conf.env.CROSS_EXECUTE = Options.options.CROSS_EXECUTE
+ conf.env.CROSS_ANSWERS = Options.options.CROSS_ANSWERS
+ conf.env.HOSTCC = Options.options.HOSTCC
+
+ conf.env.AUTOCONF_BUILD = Options.options.AUTOCONF_BUILD
+ conf.env.AUTOCONF_HOST = Options.options.AUTOCONF_HOST
+ conf.env.AUTOCONF_PROGRAM_PREFIX = Options.options.AUTOCONF_PROGRAM_PREFIX
+
+ conf.env.disable_python = Options.options.disable_python
+
+ if (conf.env.AUTOCONF_HOST and
+ conf.env.AUTOCONF_BUILD and
+ conf.env.AUTOCONF_BUILD != conf.env.AUTOCONF_HOST):
+ Logs.error('ERROR: Mismatch between --build and --host. Please use --cross-compile instead')
+ sys.exit(1)
+ if conf.env.AUTOCONF_PROGRAM_PREFIX:
+ Logs.error('ERROR: --program-prefix not supported')
+ sys.exit(1)
+
+ # enable ABI checking for developers
+ conf.env.ABI_CHECK = Options.options.ABI_CHECK or Options.options.developer
+ if Options.options.ABI_CHECK_DISABLE:
+ conf.env.ABI_CHECK = False
+ try:
+ conf.find_program('gdb', mandatory=True)
+ except:
+ conf.env.ABI_CHECK = False
+
+ conf.env.enable_coverage = Options.options.enable_coverage
+ if conf.env.enable_coverage:
+ conf.ADD_LDFLAGS('-lgcov', testflags=True)
+ conf.ADD_CFLAGS('--coverage', testflags=True)
+ # disable abi check for coverage, otherwise ld will fail
+ conf.env.ABI_CHECK = False
+
+ conf.env.GIT_LOCAL_CHANGES = Options.options.GIT_LOCAL_CHANGES
+
+ conf.CHECK_UNAME()
+
+ # see if we can compile and run a simple C program
+ conf.CHECK_CODE('printf("hello world")',
+ define='HAVE_SIMPLE_C_PROG',
+ mandatory=True,
+ execute=not conf.env.CROSS_COMPILE,
+ headers='stdio.h',
+ msg='Checking simple C program')
+
+ # Try to find the right extra flags for -Werror behaviour
+ for f in ["-Werror", # GCC
+ "-errwarn=%all", # Sun Studio
+ "-qhalt=w", # IBM xlc
+ "-w2", # Tru64
+ ]:
+ if conf.CHECK_CFLAGS([f]):
+ if not 'WERROR_CFLAGS' in conf.env:
+ conf.env['WERROR_CFLAGS'] = []
+ conf.env['WERROR_CFLAGS'].extend([f])
+ break
+
+ # check which compiler/linker flags are needed for rpath support
+ if conf.CHECK_LDFLAGS(['-Wl,-rpath,.']):
+ conf.env['RPATH_ST'] = '-Wl,-rpath,%s'
+ elif conf.CHECK_LDFLAGS(['-Wl,-R,.']):
+ conf.env['RPATH_ST'] = '-Wl,-R,%s'
+
+ # check for rpath
+ if conf.CHECK_LIBRARY_SUPPORT(rpath=True):
+ support_rpath = True
+ conf.env.RPATH_ON_BUILD = not Options.options.disable_rpath_build
+ conf.env.RPATH_ON_INSTALL = (conf.env.RPATH_ON_BUILD and
+ not Options.options.disable_rpath_install)
+ if not conf.env.PRIVATELIBDIR:
+ conf.env.PRIVATELIBDIR = '%s/%s' % (conf.env.LIBDIR, Context.g_module.APPNAME)
+ conf.env.RPATH_ON_INSTALL_PRIVATE = (
+ not Options.options.disable_rpath_private_install)
+ else:
+ support_rpath = False
+ conf.env.RPATH_ON_INSTALL = False
+ conf.env.RPATH_ON_BUILD = False
+ conf.env.RPATH_ON_INSTALL_PRIVATE = False
+ if not conf.env.PRIVATELIBDIR:
+ # rpath is not possible so there is no sense in having a
+ # private library directory by default.
+ # the user can of course always override it.
+ conf.env.PRIVATELIBDIR = conf.env.LIBDIR
+
+ if (not Options.options.disable_symbol_versions and
+ conf.CHECK_LIBRARY_SUPPORT(rpath=support_rpath,
+ version_script=True,
+ msg='-Wl,--version-script support')):
+ conf.env.HAVE_LD_VERSION_SCRIPT = True
+ else:
+ conf.env.HAVE_LD_VERSION_SCRIPT = False
+
+ if conf.CHECK_CFLAGS(['-fvisibility=hidden']):
+ conf.env.VISIBILITY_CFLAGS = '-fvisibility=hidden'
+ conf.CHECK_CODE('''int main(void) { return 0; }
+ __attribute__((visibility("default"))) void vis_foo2(void) {}\n''',
+ cflags=conf.env.VISIBILITY_CFLAGS,
+ strict=True,
+ define='HAVE_VISIBILITY_ATTR', addmain=False)
+
+ # check HAVE_CONSTRUCTOR_ATTRIBUTE
+ conf.CHECK_CODE('''
+ void test_constructor_attribute(void) __attribute__ ((constructor));
+
+ void test_constructor_attribute(void)
+ {
+ return;
+ }
+
+ int main(void) {
+ return 0;
+ }
+ ''',
+ 'HAVE_CONSTRUCTOR_ATTRIBUTE',
+ addmain=False,
+ strict=True,
+ msg='Checking for library constructor support')
+
+ # check HAVE_PRAGMA_INIT alternatively
+ if not conf.env.HAVE_CONSTRUCTOR_ATTRIBUTE:
+ conf.CHECK_CODE('''
+ #pragma init (test_init)
+
+ void test_init(void)
+ {
+ return;
+ }
+
+ int main(void) {
+ return 0;
+ }
+ ''',
+ 'HAVE_PRAGMA_INIT',
+ addmain=False,
+ strict=True,
+ msg='Checking for pragma init support')
+
+ # check HAVE_DESTRUCTOR_ATTRIBUTE
+ conf.CHECK_CODE('''
+ void test_destructor_attribute(void) __attribute__ ((destructor));
+
+ void test_destructor_attribute(void)
+ {
+ return;
+ }
+
+ int main(void) {
+ return 0;
+ }
+ ''',
+ 'HAVE_DESTRUCTOR_ATTRIBUTE',
+ addmain=False,
+ strict=True,
+ msg='Checking for library destructor support')
+
+ # check HAVE_PRAGMA_FINI alternatively
+ if not conf.env.HAVE_DESTRUCTOR_ATTRIBUTE:
+ conf.CHECK_CODE('''
+ #pragma fini (test_fini)
+
+ void test_fini(void)
+ {
+ return;
+ }
+
+ int main(void) {
+ return 0;
+ }
+ ''',
+ 'HAVE_PRAGMA_FINI',
+ addmain=False,
+ strict=True,
+ msg='Checking for pragma fini support')
+
+ conf.CHECK_CODE('''
+ void test_attribute(void) __attribute__ (());
+
+ void test_attribute(void)
+ {
+ return;
+ }
+
+ int main(void) {
+ return 0;
+ }
+ ''',
+ 'HAVE___ATTRIBUTE__',
+ addmain=False,
+ strict=True,
+ msg='Checking for __attribute__')
+
+ # Solaris by default uses draft versions of some functions unless you set
+ # _POSIX_PTHREAD_SEMANTICS
+ if sys.platform.startswith('sunos'):
+ conf.DEFINE('_POSIX_PTHREAD_SEMANTICS', 1)
+
+ if sys.platform.startswith('aix'):
+ conf.DEFINE('_ALL_SOURCE', 1, add_to_cflags=True)
+ # Might not be needed if ALL_SOURCE is defined
+ # conf.DEFINE('_XOPEN_SOURCE', 600, add_to_cflags=True)
+
+ # we should use the PIC options in waf instead
+ # Some compiler didn't support -fPIC but just print a warning
+ if conf.env['COMPILER_CC'] == "suncc":
+ conf.ADD_CFLAGS('-KPIC', testflags=True)
+ # we really want define here as we need to have this
+ # define even during the tests otherwise detection of
+ # boolean is broken
+ conf.DEFINE('_STDC_C99', 1, add_to_cflags=True)
+ conf.DEFINE('_XPG6', 1, add_to_cflags=True)
+ else:
+ conf.ADD_CFLAGS('-fPIC', testflags=True)
+
+ # On Solaris 8 with suncc (at least) the flags for the linker to define the name of the
+ # library are not always working (if the command line is very very long and with a lot
+ # of files)
+
+ if conf.env['COMPILER_CC'] == "suncc":
+ save = conf.env['SONAME_ST']
+ conf.env['SONAME_ST'] = '-Wl,-h,%s'
+ if not conf.CHECK_SHLIB_INTRASINC_NAME_FLAGS("Checking if flags %s are ok" % conf.env['SONAME_ST']):
+ conf.env['SONAME_ST'] = save
+
+ conf.CHECK_INLINE()
+
+ # check for pkgconfig
+ conf.CHECK_CFG(atleast_pkgconfig_version='0.0.0')
+
+ conf.DEFINE('_GNU_SOURCE', 1, add_to_cflags=True)
+ conf.DEFINE('_XOPEN_SOURCE_EXTENDED', 1, add_to_cflags=True)
+
+ #
+ # Needs to be defined before std*.h and string*.h are included
+ # As Python.h already brings string.h we need it in CFLAGS.
+ # See memset_s() details here:
+ # https://en.cppreference.com/w/c/string/byte/memset
+ #
+ if conf.CHECK_CFLAGS(['-D__STDC_WANT_LIB_EXT1__=1']):
+ conf.ADD_CFLAGS('-D__STDC_WANT_LIB_EXT1__=1')
+
+ # on Tru64 certain features are only available with _OSF_SOURCE set to 1
+ # and _XOPEN_SOURCE set to 600
+ if conf.env['SYSTEM_UNAME_SYSNAME'] == 'OSF1':
+ conf.DEFINE('_OSF_SOURCE', 1, add_to_cflags=True)
+ conf.DEFINE('_XOPEN_SOURCE', 600, add_to_cflags=True)
+
+ # SCM_RIGHTS is only avail if _XOPEN_SOURCE iѕ defined on IRIX
+ if conf.env['SYSTEM_UNAME_SYSNAME'] == 'IRIX':
+ conf.DEFINE('_XOPEN_SOURCE', 600, add_to_cflags=True)
+ conf.DEFINE('_BSD_TYPES', 1, add_to_cflags=True)
+
+ # Try to find the right extra flags for C99 initialisers
+ for f in ["", "-AC99", "-qlanglvl=extc99", "-qlanglvl=stdc99", "-c99"]:
+ if conf.CHECK_CFLAGS([f], '''
+struct foo {int x;char y;};
+struct foo bar = { .y = 'X', .x = 1 };
+'''):
+ if f != "":
+ conf.ADD_CFLAGS(f)
+ break
+
+ # get the base headers we'll use for the rest of the tests
+ conf.CHECK_HEADERS('stdio.h sys/types.h sys/stat.h stdlib.h stddef.h memory.h string.h',
+ add_headers=True)
+ conf.CHECK_HEADERS('strings.h inttypes.h stdint.h unistd.h minix/config.h', add_headers=True)
+ conf.CHECK_HEADERS('ctype.h', add_headers=True)
+
+ if sys.platform == 'darwin':
+ conf.DEFINE('_DARWIN_C_SOURCE', 1, add_to_cflags=True)
+ conf.DEFINE('_DARWIN_UNLIMITED_GETGROUPS', 1, add_to_cflags=True)
+ else:
+ conf.CHECK_HEADERS('standards.h', add_headers=True)
+
+ conf.CHECK_HEADERS('stdbool.h stdint.h stdarg.h vararg.h', add_headers=True)
+ conf.CHECK_HEADERS('limits.h assert.h')
+
+ # see if we need special largefile flags
+ if not conf.CHECK_LARGEFILE():
+ raise Errors.WafError('Samba requires large file support, but not available on this platform: sizeof(off_t) < 8')
+
+ if conf.env.HAVE_STDDEF_H and conf.env.HAVE_STDLIB_H:
+ conf.DEFINE('STDC_HEADERS', 1)
+
+ conf.CHECK_HEADERS('sys/time.h time.h', together=True)
+
+ if conf.env.HAVE_SYS_TIME_H and conf.env.HAVE_TIME_H:
+ conf.DEFINE('TIME_WITH_SYS_TIME', 1)
+
+ # cope with different extensions for libraries
+ (root, ext) = os.path.splitext(conf.env.cshlib_PATTERN)
+ if ext[0] == '.':
+ conf.define('SHLIBEXT', ext[1:], quote=True)
+ else:
+ conf.define('SHLIBEXT', "so", quote=True)
+
+ # First try a header check for cross-compile friendliness
+ conf.CHECK_CODE(code = """#ifdef __BYTE_ORDER
+ #define B __BYTE_ORDER
+ #elif defined(BYTE_ORDER)
+ #define B BYTE_ORDER
+ #endif
+
+ #ifdef __LITTLE_ENDIAN
+ #define LITTLE __LITTLE_ENDIAN
+ #elif defined(LITTLE_ENDIAN)
+ #define LITTLE LITTLE_ENDIAN
+ #endif
+
+ #if !defined(LITTLE) || !defined(B) || LITTLE != B
+ #error Not little endian.
+ #endif
+ int main(void) { return 0; }\n""",
+ addmain=False,
+ headers="endian.h sys/endian.h",
+ define="HAVE_LITTLE_ENDIAN")
+ conf.CHECK_CODE(code = """#ifdef __BYTE_ORDER
+ #define B __BYTE_ORDER
+ #elif defined(BYTE_ORDER)
+ #define B BYTE_ORDER
+ #endif
+
+ #ifdef __BIG_ENDIAN
+ #define BIG __BIG_ENDIAN
+ #elif defined(BIG_ENDIAN)
+ #define BIG BIG_ENDIAN
+ #endif
+
+ #if !defined(BIG) || !defined(B) || BIG != B
+ #error Not big endian.
+ #endif
+ int main(void) { return 0; }\n""",
+ addmain=False,
+ headers="endian.h sys/endian.h",
+ define="HAVE_BIG_ENDIAN")
+
+ if not conf.CONFIG_SET("HAVE_BIG_ENDIAN") and not conf.CONFIG_SET("HAVE_LITTLE_ENDIAN"):
+ # That didn't work! Do runtime test.
+ conf.CHECK_CODE("""union { int i; char c[sizeof(int)]; } u;
+ u.i = 0x01020304;
+ return u.c[0] == 0x04 && u.c[1] == 0x03 && u.c[2] == 0x02 && u.c[3] == 0x01 ? 0 : 1;""",
+ addmain=True, execute=True,
+ define='HAVE_LITTLE_ENDIAN',
+ msg="Checking for HAVE_LITTLE_ENDIAN - runtime")
+ conf.CHECK_CODE("""union { int i; char c[sizeof(int)]; } u;
+ u.i = 0x01020304;
+ return u.c[0] == 0x01 && u.c[1] == 0x02 && u.c[2] == 0x03 && u.c[3] == 0x04 ? 0 : 1;""",
+ addmain=True, execute=True,
+ define='HAVE_BIG_ENDIAN',
+ msg="Checking for HAVE_BIG_ENDIAN - runtime")
+
+ # Extra sanity check.
+ if conf.CONFIG_SET("HAVE_BIG_ENDIAN") == conf.CONFIG_SET("HAVE_LITTLE_ENDIAN"):
+ Logs.error("Failed endian determination. The PDP-11 is back?")
+ sys.exit(1)
+ else:
+ if conf.CONFIG_SET("HAVE_BIG_ENDIAN"):
+ conf.DEFINE('WORDS_BIGENDIAN', 1)
+
+ # check if signal() takes a void function
+ if conf.CHECK_CODE('return *(signal (0, 0)) (0) == 1',
+ define='RETSIGTYPE_INT',
+ execute=False,
+ headers='signal.h',
+ msg='Checking if signal handlers return int'):
+ conf.DEFINE('RETSIGTYPE', 'int')
+ else:
+ conf.DEFINE('RETSIGTYPE', 'void')
+
+ conf.CHECK_VARIABLE('__FUNCTION__', define='HAVE_FUNCTION_MACRO')
+
+ conf.CHECK_CODE('va_list ap1,ap2; va_copy(ap1,ap2)',
+ define="HAVE_VA_COPY",
+ msg="Checking for va_copy")
+
+ conf.env.enable_fuzzing = False
+
+ conf.env.enable_libfuzzer = Options.options.enable_libfuzzer
+ conf.env.enable_afl_fuzzer = Options.options.enable_afl_fuzzer
+ if conf.env.enable_libfuzzer or conf.env.enable_afl_fuzzer:
+ conf.env.enable_fuzzing = True
+ conf.DEFINE('FUZZING_BUILD_MODE_UNSAFE_FOR_PRODUCTION', 1)
+ conf.env.FUZZ_TARGET_LDFLAGS = Options.options.FUZZ_TARGET_LDFLAGS
+
+ conf.SAMBA_BUILD_ENV()
+
+
+def build(bld):
+ # give a more useful message if the source directory has moved
+ curdir = bld.path.abspath()
+ srcdir = bld.srcnode.abspath()
+ relpath = os.path.relpath(curdir, srcdir)
+ if relpath.find('../') != -1:
+ Logs.error('bld.path %s is not a child of %s' % (curdir, srcdir))
+ raise Errors.WafError('''The top source directory has moved. Please run distclean and reconfigure''')
+
+ bld.SETUP_BUILD_GROUPS()
+ bld.ENFORCE_GROUP_ORDERING()
+ bld.CHECK_PROJECT_RULES()