Merge ~ahasenack/ubuntu/+source/tdb:disco-tdb-1.3.18 into ubuntu/+source/tdb:ubuntu/devel

Proposed by Andreas Hasenack
Status: Merged
Approved by: Andreas Hasenack
Approved revision: cbfc32075322ceab4847ec1d185429090232c13b
Merged at revision: cbfc32075322ceab4847ec1d185429090232c13b
Proposed branch: ~ahasenack/ubuntu/+source/tdb:disco-tdb-1.3.18
Merge into: ubuntu/+source/tdb:ubuntu/devel
Diff against target: 67291 lines (+42355/-1419)
258 files modified
ABI/tdb-1.3.17.sigs (+73/-0)
ABI/tdb-1.3.18.sigs (+73/-0)
Makefile (+3/-1)
_tdb_text.py (+0/-1)
buildtools/bin/waf (+111/-21)
buildtools/examples/run_on_target.py (+1/-1)
buildtools/scripts/abi_gen.sh (+1/-1)
buildtools/wafsamba/configure_file.py (+4/-2)
buildtools/wafsamba/generic_cc.py (+51/-52)
buildtools/wafsamba/pkgconfig.py (+2/-2)
buildtools/wafsamba/samba3.py (+4/-5)
buildtools/wafsamba/samba_abi.py (+22/-14)
buildtools/wafsamba/samba_autoconf.py (+99/-73)
buildtools/wafsamba/samba_autoproto.py (+3/-3)
buildtools/wafsamba/samba_bundled.py (+11/-11)
buildtools/wafsamba/samba_conftests.py (+34/-31)
buildtools/wafsamba/samba_cross.py (+14/-13)
buildtools/wafsamba/samba_deps.py (+23/-14)
buildtools/wafsamba/samba_dist.py (+42/-9)
buildtools/wafsamba/samba_git.py (+1/-1)
buildtools/wafsamba/samba_headers.py (+7/-6)
buildtools/wafsamba/samba_install.py (+6/-5)
buildtools/wafsamba/samba_patterns.py (+10/-3)
buildtools/wafsamba/samba_perl.py (+8/-5)
buildtools/wafsamba/samba_pidl.py (+18/-12)
buildtools/wafsamba/samba_python.py (+31/-21)
buildtools/wafsamba/samba_third_party.py (+7/-7)
buildtools/wafsamba/samba_utils.py (+178/-111)
buildtools/wafsamba/samba_version.py (+8/-5)
buildtools/wafsamba/samba_waf18.py (+429/-0)
buildtools/wafsamba/samba_wildcard.py (+8/-9)
buildtools/wafsamba/stale_files.py (+4/-2)
buildtools/wafsamba/symbols.py (+19/-17)
buildtools/wafsamba/test_duplicate_symbol.sh (+1/-1)
buildtools/wafsamba/tests/test_abi.py (+15/-1)
buildtools/wafsamba/wafsamba.py (+48/-118)
buildtools/wafsamba/wscript (+47/-58)
common/dump.c (+10/-1)
common/freelist.c (+34/-59)
common/io.c (+2/-2)
common/lock.c (+2/-2)
common/open.c (+48/-55)
common/summary.c (+8/-0)
common/tdb.c (+170/-105)
common/tdb_private.h (+11/-2)
common/traverse.c (+110/-6)
configure (+1/-1)
debian/changelog (+15/-0)
debian/libtdb-dev.install (+1/-0)
debian/libtdb1.symbols (+4/-0)
debian/patches/40_test_transaction_expand_non_fatal.diff (+4/-4)
debian/rules (+2/-7)
dev/null (+0/-129)
include/tdb.h (+68/-1)
lib/replace/Makefile (+2/-1)
lib/replace/README (+1/-0)
lib/replace/configure (+1/-1)
lib/replace/getifaddrs.c (+1/-1)
lib/replace/replace.c (+109/-0)
lib/replace/replace.h (+52/-27)
lib/replace/snprintf.c (+72/-72)
lib/replace/system/capability.h (+2/-0)
lib/replace/system/dir.h (+2/-2)
lib/replace/system/filesys.h (+4/-12)
lib/replace/system/gssapi.h (+6/-6)
lib/replace/system/kerberos.h (+2/-2)
lib/replace/system/readline.h (+1/-1)
lib/replace/system/threads.h (+27/-0)
lib/replace/wscript (+71/-18)
pytdb.c (+34/-18)
python/tdbdump.py (+1/-1)
python/tests/simple.py (+1/-1)
test/run-circular-chain.c (+42/-0)
test/run-circular-freelist.c (+50/-0)
test/run-marklock-deadlock.c (+1/-1)
test/run-mutex-openflags2.c (+0/-7)
test/run-traverse-chain.c (+94/-0)
test/test_tdbbackup.sh (+54/-0)
third_party/waf/waflib/Build.py (+1474/-0)
third_party/waf/waflib/ConfigSet.py (+361/-0)
third_party/waf/waflib/Configure.py (+638/-0)
third_party/waf/waflib/Context.py (+737/-0)
third_party/waf/waflib/Errors.py (+68/-0)
third_party/waf/waflib/Logs.py (+379/-0)
third_party/waf/waflib/Node.py (+970/-0)
third_party/waf/waflib/Options.py (+342/-0)
third_party/waf/waflib/Runner.py (+586/-0)
third_party/waf/waflib/Scripting.py (+613/-0)
third_party/waf/waflib/Task.py (+1281/-0)
third_party/waf/waflib/TaskGen.py (+917/-0)
third_party/waf/waflib/Tools/__init__.py (+1/-1)
third_party/waf/waflib/Tools/ar.py (+24/-0)
third_party/waf/waflib/Tools/asm.py (+73/-0)
third_party/waf/waflib/Tools/bison.py (+49/-0)
third_party/waf/waflib/Tools/c.py (+39/-0)
third_party/waf/waflib/Tools/c_aliases.py (+144/-0)
third_party/waf/waflib/Tools/c_config.py (+1352/-0)
third_party/waf/waflib/Tools/c_osx.py (+193/-0)
third_party/waf/waflib/Tools/c_preproc.py (+1091/-0)
third_party/waf/waflib/Tools/c_tests.py (+229/-0)
third_party/waf/waflib/Tools/ccroot.py (+775/-0)
third_party/waf/waflib/Tools/clang.py (+29/-0)
third_party/waf/waflib/Tools/clangxx.py (+30/-0)
third_party/waf/waflib/Tools/compiler_c.py (+110/-0)
third_party/waf/waflib/Tools/compiler_cxx.py (+111/-0)
third_party/waf/waflib/Tools/compiler_d.py (+85/-0)
third_party/waf/waflib/Tools/compiler_fc.py (+73/-0)
third_party/waf/waflib/Tools/cs.py (+211/-0)
third_party/waf/waflib/Tools/cxx.py (+40/-0)
third_party/waf/waflib/Tools/d.py (+97/-0)
third_party/waf/waflib/Tools/d_config.py (+64/-0)
third_party/waf/waflib/Tools/d_scan.py (+211/-0)
third_party/waf/waflib/Tools/dbus.py (+70/-0)
third_party/waf/waflib/Tools/dmd.py (+80/-0)
third_party/waf/waflib/Tools/errcheck.py (+237/-0)
third_party/waf/waflib/Tools/fc.py (+187/-0)
third_party/waf/waflib/Tools/fc_config.py (+488/-0)
third_party/waf/waflib/Tools/fc_scan.py (+114/-0)
third_party/waf/waflib/Tools/flex.py (+62/-0)
third_party/waf/waflib/Tools/g95.py (+66/-0)
third_party/waf/waflib/Tools/gas.py (+18/-0)
third_party/waf/waflib/Tools/gcc.py (+156/-0)
third_party/waf/waflib/Tools/gdc.py (+55/-0)
third_party/waf/waflib/Tools/gfortran.py (+93/-0)
third_party/waf/waflib/Tools/glib2.py (+489/-0)
third_party/waf/waflib/Tools/gnu_dirs.py (+131/-0)
third_party/waf/waflib/Tools/gxx.py (+157/-0)
third_party/waf/waflib/Tools/icc.py (+30/-0)
third_party/waf/waflib/Tools/icpc.py (+30/-0)
third_party/waf/waflib/Tools/ifort.py (+413/-0)
third_party/waf/waflib/Tools/intltool.py (+231/-0)
third_party/waf/waflib/Tools/irixcc.py (+66/-0)
third_party/waf/waflib/Tools/javaw.py (+464/-0)
third_party/waf/waflib/Tools/ldc2.py (+56/-0)
third_party/waf/waflib/Tools/lua.py (+38/-0)
third_party/waf/waflib/Tools/md5_tstamp.py (+39/-0)
third_party/waf/waflib/Tools/msvc.py (+1020/-0)
third_party/waf/waflib/Tools/nasm.py (+26/-0)
third_party/waf/waflib/Tools/nobuild.py (+24/-0)
third_party/waf/waflib/Tools/perl.py (+156/-0)
third_party/waf/waflib/Tools/python.py (+627/-0)
third_party/waf/waflib/Tools/qt5.py (+796/-0)
third_party/waf/waflib/Tools/ruby.py (+186/-0)
third_party/waf/waflib/Tools/suncc.py (+67/-0)
third_party/waf/waflib/Tools/suncxx.py (+67/-0)
third_party/waf/waflib/Tools/tex.py (+543/-0)
third_party/waf/waflib/Tools/vala.py (+355/-0)
third_party/waf/waflib/Tools/waf_unit_test.py (+296/-0)
third_party/waf/waflib/Tools/winres.py (+78/-0)
third_party/waf/waflib/Tools/xlc.py (+65/-0)
third_party/waf/waflib/Tools/xlcxx.py (+65/-0)
third_party/waf/waflib/Utils.py (+1021/-0)
third_party/waf/waflib/__init__.py (+1/-1)
third_party/waf/waflib/ansiterm.py (+342/-0)
third_party/waf/waflib/extras/__init__.py (+3/-0)
third_party/waf/waflib/extras/batched_cc.py (+173/-0)
third_party/waf/waflib/extras/biber.py (+58/-0)
third_party/waf/waflib/extras/bjam.py (+128/-0)
third_party/waf/waflib/extras/blender.py (+108/-0)
third_party/waf/waflib/extras/boo.py (+81/-0)
third_party/waf/waflib/extras/boost.py (+525/-0)
third_party/waf/waflib/extras/build_file_tracker.py (+28/-0)
third_party/waf/waflib/extras/build_logs.py (+110/-0)
third_party/waf/waflib/extras/buildcopy.py (+82/-0)
third_party/waf/waflib/extras/c_bgxlc.py (+32/-0)
third_party/waf/waflib/extras/c_dumbpreproc.py (+72/-0)
third_party/waf/waflib/extras/c_emscripten.py (+87/-0)
third_party/waf/waflib/extras/c_nec.py (+74/-0)
third_party/waf/waflib/extras/cabal.py (+152/-0)
third_party/waf/waflib/extras/cfg_altoptions.py (+110/-0)
third_party/waf/waflib/extras/clang_compilation_database.py (+85/-0)
third_party/waf/waflib/extras/codelite.py (+875/-0)
third_party/waf/waflib/extras/color_gcc.py (+39/-0)
third_party/waf/waflib/extras/color_rvct.py (+51/-0)
third_party/waf/waflib/extras/compat15.py (+406/-0)
third_party/waf/waflib/extras/cppcheck.py (+585/-0)
third_party/waf/waflib/extras/cpplint.py (+222/-0)
third_party/waf/waflib/extras/cross_gnu.py (+227/-0)
third_party/waf/waflib/extras/cython.py (+146/-0)
third_party/waf/waflib/extras/dcc.py (+72/-0)
third_party/waf/waflib/extras/distnet.py (+430/-0)
third_party/waf/waflib/extras/doxygen.py (+227/-0)
third_party/waf/waflib/extras/dpapi.py (+87/-0)
third_party/waf/waflib/extras/eclipse.py (+431/-0)
third_party/waf/waflib/extras/erlang.py (+110/-0)
third_party/waf/waflib/extras/fast_partial.py (+518/-0)
third_party/waf/waflib/extras/fc_bgxlf.py (+32/-0)
third_party/waf/waflib/extras/fc_cray.py (+51/-0)
third_party/waf/waflib/extras/fc_nag.py (+61/-0)
third_party/waf/waflib/extras/fc_nec.py (+60/-0)
third_party/waf/waflib/extras/fc_open64.py (+58/-0)
third_party/waf/waflib/extras/fc_pgfortran.py (+68/-0)
third_party/waf/waflib/extras/fc_solstudio.py (+62/-0)
third_party/waf/waflib/extras/fc_xlf.py (+63/-0)
third_party/waf/waflib/extras/file_to_object.py (+137/-0)
third_party/waf/waflib/extras/fluid.py (+13/-9)
third_party/waf/waflib/extras/freeimage.py (+74/-0)
third_party/waf/waflib/extras/fsb.py (+31/-0)
third_party/waf/waflib/extras/fsc.py (+64/-0)
third_party/waf/waflib/extras/gccdeps.py (+214/-0)
third_party/waf/waflib/extras/gdbus.py (+87/-0)
third_party/waf/waflib/extras/gob2.py (+5/-5)
third_party/waf/waflib/extras/halide.py (+151/-0)
third_party/waf/waflib/extras/javatest.py (+118/-0)
third_party/waf/waflib/extras/kde4.py (+93/-0)
third_party/waf/waflib/extras/local_rpath.py (+19/-0)
third_party/waf/waflib/extras/make.py (+142/-0)
third_party/waf/waflib/extras/midl.py (+69/-0)
third_party/waf/waflib/extras/msvcdeps.py (+256/-0)
third_party/waf/waflib/extras/msvs.py (+1048/-0)
third_party/waf/waflib/extras/netcache_client.py (+390/-0)
third_party/waf/waflib/extras/objcopy.py (+50/-0)
third_party/waf/waflib/extras/ocaml.py (+142/-91)
third_party/waf/waflib/extras/package.py (+76/-0)
third_party/waf/waflib/extras/parallel_debug.py (+459/-0)
third_party/waf/waflib/extras/pch.py (+148/-0)
third_party/waf/waflib/extras/pep8.py (+106/-0)
third_party/waf/waflib/extras/pgicc.py (+75/-0)
third_party/waf/waflib/extras/pgicxx.py (+20/-0)
third_party/waf/waflib/extras/proc.py (+54/-0)
third_party/waf/waflib/extras/protoc.py (+243/-0)
third_party/waf/waflib/extras/pyqt5.py (+241/-0)
third_party/waf/waflib/extras/pytest.py (+225/-0)
third_party/waf/waflib/extras/qnxnto.py (+72/-0)
third_party/waf/waflib/extras/qt4.py (+695/-0)
third_party/waf/waflib/extras/relocation.py (+85/-0)
third_party/waf/waflib/extras/remote.py (+327/-0)
third_party/waf/waflib/extras/resx.py (+35/-0)
third_party/waf/waflib/extras/review.py (+325/-0)
third_party/waf/waflib/extras/rst.py (+260/-0)
third_party/waf/waflib/extras/run_do_script.py (+139/-0)
third_party/waf/waflib/extras/run_m_script.py (+88/-0)
third_party/waf/waflib/extras/run_py_script.py (+104/-0)
third_party/waf/waflib/extras/run_r_script.py (+86/-0)
third_party/waf/waflib/extras/sas.py (+71/-0)
third_party/waf/waflib/extras/satellite_assembly.py (+57/-0)
third_party/waf/waflib/extras/scala.py (+128/-0)
third_party/waf/waflib/extras/slow_qt4.py (+96/-0)
third_party/waf/waflib/extras/softlink_libs.py (+76/-0)
third_party/waf/waflib/extras/stale.py (+98/-0)
third_party/waf/waflib/extras/stracedeps.py (+174/-0)
third_party/waf/waflib/extras/swig.py (+237/-0)
third_party/waf/waflib/extras/syms.py (+84/-0)
third_party/waf/waflib/extras/ticgt.py (+300/-0)
third_party/waf/waflib/extras/unity.py (+108/-0)
third_party/waf/waflib/extras/use_config.py (+185/-0)
third_party/waf/waflib/extras/valadoc.py (+127/-99)
third_party/waf/waflib/extras/waf_xattr.py (+150/-0)
third_party/waf/waflib/extras/why.py (+78/-0)
third_party/waf/waflib/extras/win32_opts.py (+170/-0)
third_party/waf/waflib/extras/wix.py (+87/-0)
third_party/waf/waflib/extras/xcode6.py (+727/-0)
third_party/waf/waflib/fixpy2.py (+64/-0)
third_party/waf/waflib/processor.py (+64/-0)
tools/tdbbackup.c (+28/-7)
tools/tdbdump.c (+3/-1)
tools/tdbtorture.c (+17/-6)
wscript (+33/-18)
Reviewer Review Type Date Requested Status
Christian Ehrhardt  (community) Approve
Canonical Server Pending
Review via email: mp+364086@code.launchpad.net

Description of the change

PPA with builds: ppa:ahasenack/samba-4.10
https://launchpad.net/~ahasenack/+archive/ubuntu/samba-4.10/

Bileto ticket:
https://bileto.ubuntu.com/#/ticket/3672

It was green, but recent uploads removed the dep8 test history. I have to copy the packages over again.
This is part of the effort to remove python2 from the desktop iso.

That requires samba to be updated to 4.10 (bug #1818518), and that update requires a new tdb, which is that this branch is about.

We are going ahead of Debian.

Upstream switched from python 2 to using python 3 by default, so that required a few changes to the ./configure call in d/rules, and more cleanups in dh_clean.

I don't know why debian is skipping generating a symbols file for the python{,3}-tdb packages (the -N option):
override_dh_makeshlibs:
        dh_makeshlibs -Npython-tdb -Npython3-tdb -- -c4

I also don't know why debian is manually building a static library in override_dh_auto_install, but it was not being listed in the corresponding d/libtdb-dev.install file, which I "fixed". I can bring this up with debian if you want.

I didn't clean up the commit tree, because there has been some back and forth here about dropping or not the py2 packages entirely. Foundations wanted the python-* (py2) packages entirely dropped, but at least python-tdb is still needed by bzr-git (Recommends). I'll keep building it, but we might be asked to drop it and somehow fix bzr-git. Anyway, I think that can be done later if needed.

To post a comment you must log in.
Revision history for this message
Andreas Hasenack (ahasenack) wrote :

From IRC:
<cjwatson> bzr-git's tdb thing is used by LP
<cjwatson> for git-to-bzr code imports
<cjwatson> this doesn't mean you should block on it, since we maintain our own copies of the modules we need, just FYI on what sorts of things it's used for

Revision history for this message
Christian Ehrhardt  (paelzer) wrote :

probably bikeshedding, but all the changes in the changelog are "due to" the New upstream version.
I'd therefore have expected to find them indented as

   * New upstream version: 1.3.18 (LP: #1818520)
     - d/p/40_test_transaction_expand_non_fatal.diff: refresh to get rid of
       fuzz
     - ...

up to you

Revision history for this message
Andreas Hasenack (ahasenack) wrote :

I can improve that

Revision history for this message
Christian Ehrhardt  (paelzer) wrote :

waf cleanup is fine, but given how much you list don't you want to convert it to just
  $ find ... -name .pyc -delete
or something like that?
Considering that you also clear directories maybe:
 $ find -name ".pyc" -o -name "__pycache__" -exec rm {} \;

Revision history for this message
Christian Ehrhardt  (paelzer) wrote :

Other than the nit picks mentioned the commits LGTM, I'll go check the build log now ...

Revision history for this message
Christian Ehrhardt  (paelzer) wrote :

I wondered if [1] would cause trouble for the py2 packages, but after reviewing what ends up in those packages I think you are good.

[1]: https://gitlab.com/samba-team/samba/commit/a07279b78a83d7e62eb0c58a2ac989407a40fb6b

Revision history for this message
Christian Ehrhardt  (paelzer) wrote :

I'm done parsing the upstream changes towards 1.3.18 and the build log not much showing up there that needs work.

The one thing I wondered is (probably unimportant):
   dh_gencontrol
dpkg-gencontrol: warning: package python3-tdb: substitution variable ${python3:Versions} unused, but is defined
dpkg-gencontrol: warning: package python3-tdb: substitution variable ${python3:Versions} unused, but is defined
dpkg-gencontrol: warning: package python-tdb: substitution variable ${python:Versions} unused, but is defined
dpkg-gencontrol: warning: package python-tdb: substitution variable ${python:Versions} unused, but is defined

The field [1] is optional after all.
An entry would look loke that on the py3 packages (py2 isn't needed since py2.7 is the only one)
XS-Python-Version: ${python3:Versions}

OTOH it might future transitions easier (or harder??)
See also man dh_python3 for that.

Since I have not seen you iterating over python3 versions in d/rules it might be safer to explicitly define this to be for the current py3.7.
I have seen builds crash in -devel in times there were two py3 (e.g. 3.6&3.7) in -release, and as far as i understand the flag here it might help with that.

And adding those lines should be easier than modifying more of d/rules to build potentially multiply pytohn modules iterating over `py3versions -vr`

[1]: https://www.debian.org/doc/packaging-manuals/python-policy/ch-module_packages.html

Revision history for this message
Christian Ehrhardt  (paelzer) wrote :

All that said, nothing I found is critical - the changes LGTM.
Do you want to change the MP state to ack now or only after a re-review once you are finished?
(I must admit I squashed the commits for reviewability)

review: Approve
Revision history for this message
Andreas Hasenack (ahasenack) wrote :

I'll look at the python issues you pointed at, and maybe use that for the other packages too. Let's leave the MP state as is for now.

Revision history for this message
Andreas Hasenack (ahasenack) wrote :

Even when using X-Python3-Version: ${python3:Versions} in d/control, I still get the warning, specifically in the binary packages. I'll remove it for now, pending a better understanding of how this works.

Revision history for this message
Andreas Hasenack (ahasenack) wrote :

- changelog updated
- I also squashed the revert commit I had

tdb will build py2 and py3 packages. py2 because of bzr-git, even though the FFe bug said it would have been ok to drop python-tdb (py2). We can still do that later.

I'll move on to the other MPs now, get all consistent.

Revision history for this message
Andreas Hasenack (ahasenack) wrote :

Tagged and uploaded:

$ git push pkg upload/1.3.18-0ubuntu1
Enumerating objects: 395, done.
Counting objects: 100% (395/395), done.
Delta compression using up to 4 threads
Compressing objects: 100% (208/208), done.
Writing objects: 100% (301/301), 438.24 KiB | 245.00 KiB/s, done.
Total 301 (delta 101), reused 277 (delta 86)
To ssh://git.launchpad.net/~usd-import-team/ubuntu/+source/tdb
 * [new tag] upload/1.3.18-0ubuntu1 -> upload/1.3.18-0ubuntu1

$ dput ubuntu ../tdb_1.3.18-0ubuntu1_source.changes
Checking signature on .changes
gpg: ../tdb_1.3.18-0ubuntu1_source.changes: Valid signature from AC983EB5BF6BCBA9
Checking signature on .dsc
gpg: ../tdb_1.3.18-0ubuntu1.dsc: Valid signature from AC983EB5BF6BCBA9
Uploading to ubuntu (via ftp to upload.ubuntu.com):
  Uploading tdb_1.3.18-0ubuntu1.dsc: done.
  Uploading tdb_1.3.18-0ubuntu1.debian.tar.xz: done.
  Uploading tdb_1.3.18-0ubuntu1_source.buildinfo: done.
  Uploading tdb_1.3.18-0ubuntu1_source.changes: done.
Successfully uploaded packages.

Revision history for this message
Andreas Hasenack (ahasenack) wrote :

Now uploading with the orig tarball included:

$ dput -f ubuntu ../tdb_1.3.18-0ubuntu1_source.changes
Checking signature on .changes
gpg: ../tdb_1.3.18-0ubuntu1_source.changes: Valid signature from AC983EB5BF6BCBA9
Checking signature on .dsc
gpg: ../tdb_1.3.18-0ubuntu1.dsc: Valid signature from AC983EB5BF6BCBA9
Uploading to ubuntu (via ftp to upload.ubuntu.com):
  Uploading tdb_1.3.18-0ubuntu1.dsc: done.
  Uploading tdb_1.3.18.orig.tar.gz: done.
  Uploading tdb_1.3.18-0ubuntu1.debian.tar.xz: done.
  Uploading tdb_1.3.18-0ubuntu1_source.buildinfo: done.
  Uploading tdb_1.3.18-0ubuntu1_source.changes: done.
Successfully uploaded packages.

Preview Diff

[H/L] Next/Prev Comment, [J/K] Next/Prev File, [N/P] Next/Prev Hunk
diff --git a/ABI/tdb-1.3.17.sigs b/ABI/tdb-1.3.17.sigs
0new file mode 1006440new file mode 100644
index 0000000..e2b0427
--- /dev/null
+++ b/ABI/tdb-1.3.17.sigs
@@ -0,0 +1,73 @@
1tdb_add_flags: void (struct tdb_context *, unsigned int)
2tdb_append: int (struct tdb_context *, TDB_DATA, TDB_DATA)
3tdb_chainlock: int (struct tdb_context *, TDB_DATA)
4tdb_chainlock_mark: int (struct tdb_context *, TDB_DATA)
5tdb_chainlock_nonblock: int (struct tdb_context *, TDB_DATA)
6tdb_chainlock_read: int (struct tdb_context *, TDB_DATA)
7tdb_chainlock_read_nonblock: int (struct tdb_context *, TDB_DATA)
8tdb_chainlock_unmark: int (struct tdb_context *, TDB_DATA)
9tdb_chainunlock: int (struct tdb_context *, TDB_DATA)
10tdb_chainunlock_read: int (struct tdb_context *, TDB_DATA)
11tdb_check: int (struct tdb_context *, int (*)(TDB_DATA, TDB_DATA, void *), void *)
12tdb_close: int (struct tdb_context *)
13tdb_delete: int (struct tdb_context *, TDB_DATA)
14tdb_dump_all: void (struct tdb_context *)
15tdb_enable_seqnum: void (struct tdb_context *)
16tdb_error: enum TDB_ERROR (struct tdb_context *)
17tdb_errorstr: const char *(struct tdb_context *)
18tdb_exists: int (struct tdb_context *, TDB_DATA)
19tdb_fd: int (struct tdb_context *)
20tdb_fetch: TDB_DATA (struct tdb_context *, TDB_DATA)
21tdb_firstkey: TDB_DATA (struct tdb_context *)
22tdb_freelist_size: int (struct tdb_context *)
23tdb_get_flags: int (struct tdb_context *)
24tdb_get_logging_private: void *(struct tdb_context *)
25tdb_get_seqnum: int (struct tdb_context *)
26tdb_hash_size: int (struct tdb_context *)
27tdb_increment_seqnum_nonblock: void (struct tdb_context *)
28tdb_jenkins_hash: unsigned int (TDB_DATA *)
29tdb_lock_nonblock: int (struct tdb_context *, int, int)
30tdb_lockall: int (struct tdb_context *)
31tdb_lockall_mark: int (struct tdb_context *)
32tdb_lockall_nonblock: int (struct tdb_context *)
33tdb_lockall_read: int (struct tdb_context *)
34tdb_lockall_read_nonblock: int (struct tdb_context *)
35tdb_lockall_unmark: int (struct tdb_context *)
36tdb_log_fn: tdb_log_func (struct tdb_context *)
37tdb_map_size: size_t (struct tdb_context *)
38tdb_name: const char *(struct tdb_context *)
39tdb_nextkey: TDB_DATA (struct tdb_context *, TDB_DATA)
40tdb_null: dptr = 0xXXXX, dsize = 0
41tdb_open: struct tdb_context *(const char *, int, int, int, mode_t)
42tdb_open_ex: struct tdb_context *(const char *, int, int, int, mode_t, const struct tdb_logging_context *, tdb_hash_func)
43tdb_parse_record: int (struct tdb_context *, TDB_DATA, int (*)(TDB_DATA, TDB_DATA, void *), void *)
44tdb_printfreelist: int (struct tdb_context *)
45tdb_remove_flags: void (struct tdb_context *, unsigned int)
46tdb_reopen: int (struct tdb_context *)
47tdb_reopen_all: int (int)
48tdb_repack: int (struct tdb_context *)
49tdb_rescue: int (struct tdb_context *, void (*)(TDB_DATA, TDB_DATA, void *), void *)
50tdb_runtime_check_for_robust_mutexes: bool (void)
51tdb_set_logging_function: void (struct tdb_context *, const struct tdb_logging_context *)
52tdb_set_max_dead: void (struct tdb_context *, int)
53tdb_setalarm_sigptr: void (struct tdb_context *, volatile sig_atomic_t *)
54tdb_store: int (struct tdb_context *, TDB_DATA, TDB_DATA, int)
55tdb_storev: int (struct tdb_context *, TDB_DATA, const TDB_DATA *, int, int)
56tdb_summary: char *(struct tdb_context *)
57tdb_transaction_active: bool (struct tdb_context *)
58tdb_transaction_cancel: int (struct tdb_context *)
59tdb_transaction_commit: int (struct tdb_context *)
60tdb_transaction_prepare_commit: int (struct tdb_context *)
61tdb_transaction_start: int (struct tdb_context *)
62tdb_transaction_start_nonblock: int (struct tdb_context *)
63tdb_transaction_write_lock_mark: int (struct tdb_context *)
64tdb_transaction_write_lock_unmark: int (struct tdb_context *)
65tdb_traverse: int (struct tdb_context *, tdb_traverse_func, void *)
66tdb_traverse_chain: int (struct tdb_context *, unsigned int, tdb_traverse_func, void *)
67tdb_traverse_key_chain: int (struct tdb_context *, TDB_DATA, tdb_traverse_func, void *)
68tdb_traverse_read: int (struct tdb_context *, tdb_traverse_func, void *)
69tdb_unlock: int (struct tdb_context *, int, int)
70tdb_unlockall: int (struct tdb_context *)
71tdb_unlockall_read: int (struct tdb_context *)
72tdb_validate_freelist: int (struct tdb_context *, int *)
73tdb_wipe_all: int (struct tdb_context *)
diff --git a/ABI/tdb-1.3.18.sigs b/ABI/tdb-1.3.18.sigs
0new file mode 10064474new file mode 100644
index 0000000..e2b0427
--- /dev/null
+++ b/ABI/tdb-1.3.18.sigs
@@ -0,0 +1,73 @@
1tdb_add_flags: void (struct tdb_context *, unsigned int)
2tdb_append: int (struct tdb_context *, TDB_DATA, TDB_DATA)
3tdb_chainlock: int (struct tdb_context *, TDB_DATA)
4tdb_chainlock_mark: int (struct tdb_context *, TDB_DATA)
5tdb_chainlock_nonblock: int (struct tdb_context *, TDB_DATA)
6tdb_chainlock_read: int (struct tdb_context *, TDB_DATA)
7tdb_chainlock_read_nonblock: int (struct tdb_context *, TDB_DATA)
8tdb_chainlock_unmark: int (struct tdb_context *, TDB_DATA)
9tdb_chainunlock: int (struct tdb_context *, TDB_DATA)
10tdb_chainunlock_read: int (struct tdb_context *, TDB_DATA)
11tdb_check: int (struct tdb_context *, int (*)(TDB_DATA, TDB_DATA, void *), void *)
12tdb_close: int (struct tdb_context *)
13tdb_delete: int (struct tdb_context *, TDB_DATA)
14tdb_dump_all: void (struct tdb_context *)
15tdb_enable_seqnum: void (struct tdb_context *)
16tdb_error: enum TDB_ERROR (struct tdb_context *)
17tdb_errorstr: const char *(struct tdb_context *)
18tdb_exists: int (struct tdb_context *, TDB_DATA)
19tdb_fd: int (struct tdb_context *)
20tdb_fetch: TDB_DATA (struct tdb_context *, TDB_DATA)
21tdb_firstkey: TDB_DATA (struct tdb_context *)
22tdb_freelist_size: int (struct tdb_context *)
23tdb_get_flags: int (struct tdb_context *)
24tdb_get_logging_private: void *(struct tdb_context *)
25tdb_get_seqnum: int (struct tdb_context *)
26tdb_hash_size: int (struct tdb_context *)
27tdb_increment_seqnum_nonblock: void (struct tdb_context *)
28tdb_jenkins_hash: unsigned int (TDB_DATA *)
29tdb_lock_nonblock: int (struct tdb_context *, int, int)
30tdb_lockall: int (struct tdb_context *)
31tdb_lockall_mark: int (struct tdb_context *)
32tdb_lockall_nonblock: int (struct tdb_context *)
33tdb_lockall_read: int (struct tdb_context *)
34tdb_lockall_read_nonblock: int (struct tdb_context *)
35tdb_lockall_unmark: int (struct tdb_context *)
36tdb_log_fn: tdb_log_func (struct tdb_context *)
37tdb_map_size: size_t (struct tdb_context *)
38tdb_name: const char *(struct tdb_context *)
39tdb_nextkey: TDB_DATA (struct tdb_context *, TDB_DATA)
40tdb_null: dptr = 0xXXXX, dsize = 0
41tdb_open: struct tdb_context *(const char *, int, int, int, mode_t)
42tdb_open_ex: struct tdb_context *(const char *, int, int, int, mode_t, const struct tdb_logging_context *, tdb_hash_func)
43tdb_parse_record: int (struct tdb_context *, TDB_DATA, int (*)(TDB_DATA, TDB_DATA, void *), void *)
44tdb_printfreelist: int (struct tdb_context *)
45tdb_remove_flags: void (struct tdb_context *, unsigned int)
46tdb_reopen: int (struct tdb_context *)
47tdb_reopen_all: int (int)
48tdb_repack: int (struct tdb_context *)
49tdb_rescue: int (struct tdb_context *, void (*)(TDB_DATA, TDB_DATA, void *), void *)
50tdb_runtime_check_for_robust_mutexes: bool (void)
51tdb_set_logging_function: void (struct tdb_context *, const struct tdb_logging_context *)
52tdb_set_max_dead: void (struct tdb_context *, int)
53tdb_setalarm_sigptr: void (struct tdb_context *, volatile sig_atomic_t *)
54tdb_store: int (struct tdb_context *, TDB_DATA, TDB_DATA, int)
55tdb_storev: int (struct tdb_context *, TDB_DATA, const TDB_DATA *, int, int)
56tdb_summary: char *(struct tdb_context *)
57tdb_transaction_active: bool (struct tdb_context *)
58tdb_transaction_cancel: int (struct tdb_context *)
59tdb_transaction_commit: int (struct tdb_context *)
60tdb_transaction_prepare_commit: int (struct tdb_context *)
61tdb_transaction_start: int (struct tdb_context *)
62tdb_transaction_start_nonblock: int (struct tdb_context *)
63tdb_transaction_write_lock_mark: int (struct tdb_context *)
64tdb_transaction_write_lock_unmark: int (struct tdb_context *)
65tdb_traverse: int (struct tdb_context *, tdb_traverse_func, void *)
66tdb_traverse_chain: int (struct tdb_context *, unsigned int, tdb_traverse_func, void *)
67tdb_traverse_key_chain: int (struct tdb_context *, TDB_DATA, tdb_traverse_func, void *)
68tdb_traverse_read: int (struct tdb_context *, tdb_traverse_func, void *)
69tdb_unlock: int (struct tdb_context *, int, int)
70tdb_unlockall: int (struct tdb_context *)
71tdb_unlockall_read: int (struct tdb_context *)
72tdb_validate_freelist: int (struct tdb_context *, int *)
73tdb_wipe_all: int (struct tdb_context *)
diff --git a/Makefile b/Makefile
index fe44ff6..8fd56c8 100644
--- a/Makefile
+++ b/Makefile
@@ -1,6 +1,8 @@
1# simple makefile wrapper to run waf1# simple makefile wrapper to run waf
22
3WAF=WAF_MAKE=1 PATH=buildtools/bin:../../buildtools/bin:$$PATH waf3WAF_BIN=`PATH=buildtools/bin:../../buildtools/bin:$$PATH which waf`
4WAF_BINARY=$(PYTHON) $(WAF_BIN)
5WAF=PYTHONHASHSEED=1 WAF_MAKE=1 $(WAF_BINARY)
46
5all:7all:
6 $(WAF) build8 $(WAF) build
diff --git a/_tdb_text.py b/_tdb_text.py
index c823bf8..f3caa53 100644
--- a/_tdb_text.py
+++ b/_tdb_text.py
@@ -4,7 +4,6 @@
4# Published under the GNU LGPLv3 or later4# Published under the GNU LGPLv3 or later
55
6import sys6import sys
7import functools
87
9import tdb8import tdb
109
diff --git a/buildtools/bin/waf b/buildtools/bin/waf
index 1b0f466..3ee4d5b 100755
--- a/buildtools/bin/waf
+++ b/buildtools/bin/waf
@@ -1,7 +1,7 @@
1#!/usr/bin/env python1#!/usr/bin/env python3
2# encoding: ISO-8859-12# encoding: latin-1
3# Thomas Nagy, 2005-20103# Thomas Nagy, 2005-2018
44#
5"""5"""
6Redistribution and use in source and binary forms, with or without6Redistribution and use in source and binary forms, with or without
7modification, are permitted provided that the following conditions7modification, are permitted provided that the following conditions
@@ -30,25 +30,24 @@ IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
30POSSIBILITY OF SUCH DAMAGE.30POSSIBILITY OF SUCH DAMAGE.
31"""31"""
3232
33import os, sys33import os, sys, inspect
34if sys.hexversion<0x203000f: raise ImportError("Waf requires Python >= 2.3")
35
36if 'PSYCOWAF' in os.environ:
37 try:import psyco;psyco.full()
38 except:pass
3934
40VERSION="1.5.19"35VERSION="2.0.8"
41REVISION="x"36REVISION="x"
37GIT="x"
42INSTALL="x"38INSTALL="x"
43C1='x'39C1='x'
44C2='x'40C2='x'
41C3='x'
45cwd = os.getcwd()42cwd = os.getcwd()
46join = os.path.join43join = os.path.join
4744
45if sys.hexversion<0x206000f:
46 raise ImportError('Python >= 2.6 is required to create the waf file')
47
48WAF='waf'48WAF='waf'
49def b(x):49def b(x):
50 return x50 return x
51
52if sys.hexversion>0x300000f:51if sys.hexversion>0x300000f:
53 WAF='waf3'52 WAF='waf3'
54 def b(x):53 def b(x):
@@ -58,20 +57,111 @@ def err(m):
58 print(('\033[91mError: %s\033[0m' % m))57 print(('\033[91mError: %s\033[0m' % m))
59 sys.exit(1)58 sys.exit(1)
6059
61def test(dir):60def unpack_wafdir(dir, src):
62 try: os.stat(join(dir, 'wafadmin')); return os.path.abspath(dir)61 f = open(src,'rb')
62 c = 'corrupt archive (%d)'
63 while 1:
64 line = f.readline()
65 if not line: err('run waf-light from a folder containing waflib')
66 if line == b('#==>\n'):
67 txt = f.readline()
68 if not txt: err(c % 1)
69 if f.readline() != b('#<==\n'): err(c % 2)
70 break
71 if not txt: err(c % 3)
72 txt = txt[1:-1].replace(b(C1), b('\n')).replace(b(C2), b('\r')).replace(b(C3), b('\x00'))
73
74 import shutil, tarfile
75 try: shutil.rmtree(dir)
63 except OSError: pass76 except OSError: pass
77 try:
78 for x in ('Tools', 'extras'):
79 os.makedirs(join(dir, 'waflib', x))
80 except OSError:
81 err("Cannot unpack waf lib into %s\nMove waf in a writable directory" % dir)
82
83 os.chdir(dir)
84 tmp = 't.bz2'
85 t = open(tmp,'wb')
86 try: t.write(txt)
87 finally: t.close()
88
89 try:
90 t = tarfile.open(tmp)
91 except:
92 try:
93 os.system('bunzip2 t.bz2')
94 t = tarfile.open('t')
95 tmp = 't'
96 except:
97 os.chdir(cwd)
98 try: shutil.rmtree(dir)
99 except OSError: pass
100 err("Waf cannot be unpacked, check that bzip2 support is present")
101
102 try:
103 for x in t: t.extract(x)
104 finally:
105 t.close()
106
107 for x in ('Tools', 'extras'):
108 os.chmod(join('waflib',x), 493)
109
110 if sys.hexversion<0x300000f:
111 sys.path = [join(dir, 'waflib')] + sys.path
112 import fixpy2
113 fixpy2.fixdir(dir)
114
115 os.remove(tmp)
116 os.chdir(cwd)
117
118 try: dir = unicode(dir, 'mbcs')
119 except: pass
120 try:
121 from ctypes import windll
122 windll.kernel32.SetFileAttributesW(dir, 2)
123 except:
124 pass
125
126def test(dir):
127 try:
128 os.stat(join(dir, 'waflib'))
129 return os.path.abspath(dir)
130 except OSError:
131 pass
64132
65def find_lib():133def find_lib():
66 return os.path.abspath(os.path.join(os.path.dirname(__file__), '../../third_party/waf'))134 path = '../../third_party/waf'
135 paths = [path, path+'/waflib']
136 return [os.path.abspath(os.path.join(os.path.dirname(__file__), x)) for x in paths]
67137
68wafdir = find_lib()138wafdir = find_lib()
69w = join(wafdir, 'wafadmin')139for p in wafdir:
70t = join(w, 'Tools')140 sys.path.insert(0, p)
71f = join(w, '3rdparty')
72sys.path = [w, t, f] + sys.path
73141
74if __name__ == '__main__':142if __name__ == '__main__':
75 import Scripting143 #import extras.compat15#PRELUDE
76 Scripting.prepare(t, cwd, VERSION, wafdir)144 import sys
145
146 from waflib.Tools import ccroot, c, ar, compiler_c, gcc
147 sys.modules['cc'] = c
148 sys.modules['ccroot'] = ccroot
149 sys.modules['ar'] = ar
150 sys.modules['compiler_cc'] = compiler_c
151 sys.modules['gcc'] = gcc
152
153 from waflib import Options
154 Options.lockfile = os.environ.get('WAFLOCK', '.lock-wscript')
155 if os.path.isfile(Options.lockfile) and os.stat(Options.lockfile).st_size == 0:
156 os.environ['NOCLIMB'] = "1"
157 # there is a single top-level, but libraries must build independently
158 os.environ['NO_LOCK_IN_TOP'] = "1"
159
160 from waflib import Task
161 class o(object):
162 display = None
163 Task.classes['cc_link'] = o
164
165 from waflib import Scripting
166 Scripting.waf_entry_point(cwd, VERSION, wafdir[0])
77167
diff --git a/buildtools/examples/run_on_target.py b/buildtools/examples/run_on_target.py
index 8322759..79c5730 100755
--- a/buildtools/examples/run_on_target.py
+++ b/buildtools/examples/run_on_target.py
@@ -1,4 +1,4 @@
1#!/usr/bin/env python1#!/usr/bin/env python3
22
3#3#
4# Sample run-on-target script4# Sample run-on-target script
diff --git a/buildtools/scripts/abi_gen.sh b/buildtools/scripts/abi_gen.sh
index 787718c..6dd6d32 100755
--- a/buildtools/scripts/abi_gen.sh
+++ b/buildtools/scripts/abi_gen.sh
@@ -17,5 +17,5 @@ done
17) > $GDBSCRIPT17) > $GDBSCRIPT
1818
19# forcing the terminal avoids a problem on Fedora1219# forcing the terminal avoids a problem on Fedora12
20TERM=none gdb -batch -x $GDBSCRIPT "$SHAREDLIB" < /dev/null20TERM=none gdb -n -batch -x $GDBSCRIPT "$SHAREDLIB" < /dev/null
21rm -f $GDBSCRIPT21rm -f $GDBSCRIPT
diff --git a/buildtools/wafsamba/configure_file.py b/buildtools/wafsamba/configure_file.py
index e28282b..6ad4354 100644
--- a/buildtools/wafsamba/configure_file.py
+++ b/buildtools/wafsamba/configure_file.py
@@ -1,7 +1,9 @@
1# handle substitution of variables in .in files1# handle substitution of variables in .in files
22
3import re, os3import sys
4import Build, sys, Logs4import re
5import os
6from waflib import Build, Logs
5from samba_utils import SUBST_VARS_RECURSIVE7from samba_utils import SUBST_VARS_RECURSIVE
68
7def subst_at_vars(task):9def subst_at_vars(task):
diff --git a/buildtools/wafsamba/generic_cc.py b/buildtools/wafsamba/generic_cc.py
index 504e902..1352c54 100644
--- a/buildtools/wafsamba/generic_cc.py
+++ b/buildtools/wafsamba/generic_cc.py
@@ -3,69 +3,68 @@
3# based on suncc.py from waf3# based on suncc.py from waf
44
5import os, optparse5import os, optparse
6import Utils, Options, Configure6from waflib import Errors
7import ccroot, ar7from waflib.Tools import ccroot, ar
8from Configure import conftest8from waflib.Configure import conf
99
10from compiler_cc import c_compiler10#
11# Let waflib provide useful defaults, but
12# provide generic_cc as last resort fallback on
13# all platforms
14#
15from waflib.Tools.compiler_c import c_compiler
16for key in c_compiler.keys():
17 c_compiler[key].append('generic_cc')
1118
12c_compiler['default'] = ['gcc', 'generic_cc']19@conf
13c_compiler['hpux'] = ['gcc', 'generic_cc']
14
15@conftest
16def find_generic_cc(conf):20def find_generic_cc(conf):
17 v = conf.env21 v = conf.env
18 cc = None22 cc = None
19 if v['CC']: cc = v['CC']23 if v.CC:
20 elif 'CC' in conf.environ: cc = conf.environ['CC']24 cc = v.CC
21 if not cc: cc = conf.find_program('cc', var='CC')25 elif 'CC' in conf.environ:
22 if not cc: conf.fatal('generic_cc was not found')26 cc = conf.environ['CC']
23 cc = conf.cmd_to_list(cc)27 if not cc:
24 v['CC'] = cc28 cc = conf.find_program('cc', var='CC')
25 v['CC_NAME'] = 'generic'29 if not cc:
2630 conf.fatal('generic_cc was not found')
27@conftest
28def generic_cc_common_flags(conf):
29 v = conf.env
3031
31 v['CC_SRC_F'] = ''32 try:
32 v['CC_TGT_F'] = ['-c', '-o', '']33 conf.cmd_and_log(cc + ['--version'])
33 v['CPPPATH_ST'] = '-I%s' # template for adding include paths34 except Errors.WafError:
35 conf.fatal('%r --version could not be executed' % cc)
3436
35 # linker37 v.CC = cc
36 if not v['LINK_CC']: v['LINK_CC'] = v['CC']38 v.CC_NAME = 'generic_cc'
37 v['CCLNK_SRC_F'] = ''
38 v['CCLNK_TGT_F'] = ['-o', '']
3939
40 v['LIB_ST'] = '-l%s' # template for adding libs40@conf
41 v['LIBPATH_ST'] = '-L%s' # template for adding libpaths41def generic_cc_common_flags(conf):
42 v['STATICLIB_ST'] = '-l%s'42 v = conf.env
43 v['STATICLIBPATH_ST'] = '-L%s'
44 v['CCDEFINES_ST'] = '-D%s'
4543
46# v['SONAME_ST'] = '-Wl,-h -Wl,%s'44 v.CC_SRC_F = ''
47# v['SHLIB_MARKER'] = '-Bdynamic'45 v.CC_TGT_F = ['-c', '-o']
48# v['STATICLIB_MARKER'] = '-Bstatic'46 v.CPPPATH_ST = '-I%s'
47 v.DEFINES_ST = '-D%s'
4948
50 # program49 if not v.LINK_CC:
51 v['program_PATTERN'] = '%s'50 v.LINK_CC = v.CC
5251
53 # shared library52 v.CCLNK_SRC_F = ''
54# v['shlib_CCFLAGS'] = ['-Kpic', '-DPIC']53 v.CCLNK_TGT_F = ['-o']
55# v['shlib_LINKFLAGS'] = ['-G']
56 v['shlib_PATTERN'] = 'lib%s.so'
5754
58 # static lib55 v.LIB_ST = '-l%s' # template for adding libs
59# v['staticlib_LINKFLAGS'] = ['-Bstatic']56 v.LIBPATH_ST = '-L%s' # template for adding libpaths
60# v['staticlib_PATTERN'] = 'lib%s.a'57 v.STLIB_ST = '-l%s'
58 v.STLIBPATH_ST = '-L%s'
6159
62detect = '''60 v.cprogram_PATTERN = '%s'
63find_generic_cc61 v.cshlib_PATTERN = 'lib%s.so'
64find_cpp62 v.cstlib_PATTERN = 'lib%s.a'
65find_ar
66generic_cc_common_flags
67cc_load_tools
68cc_add_flags
69link_add_flags
70'''
7163
64def configure(conf):
65 conf.find_generic_cc()
66 conf.find_ar()
67 conf.generic_cc_common_flags()
68 conf.cc_load_tools()
69 conf.cc_add_flags()
70 conf.link_add_flags()
diff --git a/buildtools/wafsamba/hpuxcc.py b/buildtools/wafsamba/hpuxcc.py
72deleted file mode 10064471deleted file mode 100644
index c263556..0000000
--- a/buildtools/wafsamba/hpuxcc.py
+++ /dev/null
@@ -1,56 +0,0 @@
1# compiler definition for HPUX
2# based on suncc.py from waf
3
4import os, optparse, sys
5import Utils, Options, Configure
6import ccroot, ar
7from Configure import conftest
8import gcc
9
10
11@conftest
12def gcc_modifier_hpux(conf):
13 v=conf.env
14 v['CCFLAGS_DEBUG']=['-g']
15 v['CCFLAGS_RELEASE']=['-O2']
16 v['CC_SRC_F']=''
17 v['CC_TGT_F']=['-c','-o','']
18 v['CPPPATH_ST']='-I%s'
19 if not v['LINK_CC']:v['LINK_CC']=v['CC']
20 v['CCLNK_SRC_F']=''
21 v['CCLNK_TGT_F']=['-o','']
22 v['LIB_ST']='-l%s'
23 v['LIBPATH_ST']='-L%s'
24 v['STATICLIB_ST']='-l%s'
25 v['STATICLIBPATH_ST']='-L%s'
26 v['RPATH_ST']='-Wl,-rpath,%s'
27 v['CCDEFINES_ST']='-D%s'
28 v['SONAME_ST']='-Wl,-h,%s'
29 v['SHLIB_MARKER']=[]
30# v['STATICLIB_MARKER']='-Wl,-Bstatic'
31 v['FULLSTATIC_MARKER']='-static'
32 v['program_PATTERN']='%s'
33 v['shlib_CCFLAGS']=['-fPIC','-DPIC']
34 v['shlib_LINKFLAGS']=['-shared']
35 v['shlib_PATTERN']='lib%s.sl'
36# v['staticlib_LINKFLAGS']=['-Wl,-Bstatic']
37 v['staticlib_PATTERN']='lib%s.a'
38
39gcc.gcc_modifier_hpux = gcc_modifier_hpux
40
41from TaskGen import feature, after
42@feature('cprogram', 'cshlib')
43@after('apply_link', 'apply_lib_vars', 'apply_obj_vars')
44def hpux_addfullpath(self):
45 if sys.platform == 'hp-ux11':
46 link = getattr(self, 'link_task', None)
47 if link:
48 lst = link.env.LINKFLAGS
49 buf = []
50 for x in lst:
51 if x.startswith('-L'):
52 p2 = x[2:]
53 if not os.path.isabs(p2):
54 x = x[:2] + self.bld.srcnode.abspath(link.env) + "/../" + x[2:].lstrip('.')
55 buf.append(x)
56 link.env.LINKFLAGS = buf
diff --git a/buildtools/wafsamba/irixcc.py b/buildtools/wafsamba/irixcc.py
57deleted file mode 1006440deleted file mode 100644
index f3cb451..0000000
--- a/buildtools/wafsamba/irixcc.py
+++ /dev/null
@@ -1,79 +0,0 @@
1
2# compiler definition for irix/MIPSpro cc compiler
3# based on suncc.py from waf
4
5import os, optparse
6import Utils, Options, Configure
7import ccroot, ar
8from Configure import conftest
9
10from compiler_cc import c_compiler
11
12c_compiler['irix'] = ['gcc', 'irixcc']
13
14@conftest
15def find_irixcc(conf):
16 v = conf.env
17 cc = None
18 if v['CC']: cc = v['CC']
19 elif 'CC' in conf.environ: cc = conf.environ['CC']
20 if not cc: cc = conf.find_program('cc', var='CC')
21 if not cc: conf.fatal('irixcc was not found')
22 cc = conf.cmd_to_list(cc)
23
24 try:
25 if Utils.cmd_output(cc + ['-c99'] + ['-version']) != '':
26 conf.fatal('irixcc %r was not found' % cc)
27 except ValueError:
28 conf.fatal('irixcc -v could not be executed')
29
30 conf.env.append_unique('CCFLAGS', '-c99')
31
32 v['CC'] = cc
33 v['CC_NAME'] = 'irix'
34
35@conftest
36def irixcc_common_flags(conf):
37 v = conf.env
38
39 v['CC_SRC_F'] = ''
40 v['CC_TGT_F'] = ['-c', '-o', '']
41 v['CPPPATH_ST'] = '-I%s' # template for adding include paths
42
43 # linker
44 if not v['LINK_CC']: v['LINK_CC'] = v['CC']
45 v['CCLNK_SRC_F'] = ''
46 v['CCLNK_TGT_F'] = ['-o', '']
47
48 v['LIB_ST'] = '-l%s' # template for adding libs
49 v['LIBPATH_ST'] = '-L%s' # template for adding libpaths
50 v['STATICLIB_ST'] = '-l%s'
51 v['STATICLIBPATH_ST'] = '-L%s'
52 v['CCDEFINES_ST'] = '-D%s'
53
54# v['SONAME_ST'] = '-Wl,-h -Wl,%s'
55# v['SHLIB_MARKER'] = '-Bdynamic'
56# v['STATICLIB_MARKER'] = '-Bstatic'
57
58 # program
59 v['program_PATTERN'] = '%s'
60
61 # shared library
62# v['shlib_CCFLAGS'] = ['-Kpic', '-DPIC']
63# v['shlib_LINKFLAGS'] = ['-G']
64 v['shlib_PATTERN'] = 'lib%s.so'
65
66 # static lib
67# v['staticlib_LINKFLAGS'] = ['-Bstatic']
68# v['staticlib_PATTERN'] = 'lib%s.a'
69
70detect = '''
71find_irixcc
72find_cpp
73find_ar
74irixcc_common_flags
75cc_load_tools
76cc_add_flags
77link_add_flags
78'''
79
diff --git a/buildtools/wafsamba/nothreads.py b/buildtools/wafsamba/nothreads.py
80deleted file mode 1006440deleted file mode 100644
index 9bd33e8..0000000
--- a/buildtools/wafsamba/nothreads.py
+++ /dev/null
@@ -1,219 +0,0 @@
1# encoding: utf-8
2# Thomas Nagy, 2005-2008 (ita)
3
4# this replaces the core of Runner.py in waf with a varient that works
5# on systems with completely broken threading (such as Python 2.5.x on
6# AIX). For simplicity we enable this when JOBS=1, which is triggered
7# by the compatibility makefile used for the waf build. That also ensures
8# this code is tested, as it means it is used in the build farm, and by
9# anyone using 'make' to build Samba with waf
10
11"Execute the tasks"
12
13import sys, random, threading
14try: from Queue import Queue
15except ImportError: from queue import Queue
16import Utils, Options
17from Constants import EXCEPTION, CRASHED, MAXJOBS, ASK_LATER, SKIPPED, SKIP_ME, SUCCESS
18
19GAP = 15
20
21run_old = threading.Thread.run
22def run(*args, **kwargs):
23 try:
24 run_old(*args, **kwargs)
25 except (KeyboardInterrupt, SystemExit):
26 raise
27 except:
28 sys.excepthook(*sys.exc_info())
29threading.Thread.run = run
30
31
32class TaskConsumer(object):
33 consumers = 1
34
35def process(tsk):
36 m = tsk.master
37 if m.stop:
38 m.out.put(tsk)
39 return
40
41 try:
42 tsk.generator.bld.printout(tsk.display())
43 if tsk.__class__.stat: ret = tsk.__class__.stat(tsk)
44 # actual call to task's run() function
45 else: ret = tsk.call_run()
46 except Exception as e:
47 tsk.err_msg = Utils.ex_stack()
48 tsk.hasrun = EXCEPTION
49
50 # TODO cleanup
51 m.error_handler(tsk)
52 m.out.put(tsk)
53 return
54
55 if ret:
56 tsk.err_code = ret
57 tsk.hasrun = CRASHED
58 else:
59 try:
60 tsk.post_run()
61 except Utils.WafError:
62 pass
63 except Exception:
64 tsk.err_msg = Utils.ex_stack()
65 tsk.hasrun = EXCEPTION
66 else:
67 tsk.hasrun = SUCCESS
68 if tsk.hasrun != SUCCESS:
69 m.error_handler(tsk)
70
71 m.out.put(tsk)
72
73class Parallel(object):
74 """
75 keep the consumer threads busy, and avoid consuming cpu cycles
76 when no more tasks can be added (end of the build, etc)
77 """
78 def __init__(self, bld, j=2):
79
80 # number of consumers
81 self.numjobs = j
82
83 self.manager = bld.task_manager
84 self.manager.current_group = 0
85
86 self.total = self.manager.total()
87
88 # tasks waiting to be processed - IMPORTANT
89 self.outstanding = []
90 self.maxjobs = MAXJOBS
91
92 # tasks that are awaiting for another task to complete
93 self.frozen = []
94
95 # tasks returned by the consumers
96 self.out = Queue(0)
97
98 self.count = 0 # tasks not in the producer area
99
100 self.processed = 1 # progress indicator
101
102 self.stop = False # error condition to stop the build
103 self.error = False # error flag
104
105 def get_next(self):
106 "override this method to schedule the tasks in a particular order"
107 if not self.outstanding:
108 return None
109 return self.outstanding.pop(0)
110
111 def postpone(self, tsk):
112 "override this method to schedule the tasks in a particular order"
113 # TODO consider using a deque instead
114 if random.randint(0, 1):
115 self.frozen.insert(0, tsk)
116 else:
117 self.frozen.append(tsk)
118
119 def refill_task_list(self):
120 "called to set the next group of tasks"
121
122 while self.count > self.numjobs + GAP or self.count >= self.maxjobs:
123 self.get_out()
124
125 while not self.outstanding:
126 if self.count:
127 self.get_out()
128
129 if self.frozen:
130 self.outstanding += self.frozen
131 self.frozen = []
132 elif not self.count:
133 (jobs, tmp) = self.manager.get_next_set()
134 if jobs is not None:
135 self.maxjobs = jobs
136 if tmp:
137 self.outstanding += tmp
138 break
139
140 def get_out(self):
141 "the tasks that are put to execute are all collected using get_out"
142 ret = self.out.get()
143 self.manager.add_finished(ret)
144 if not self.stop and getattr(ret, 'more_tasks', None):
145 self.outstanding += ret.more_tasks
146 self.total += len(ret.more_tasks)
147 self.count -= 1
148
149 def error_handler(self, tsk):
150 "by default, errors make the build stop (not thread safe so be careful)"
151 if not Options.options.keep:
152 self.stop = True
153 self.error = True
154
155 def start(self):
156 "execute the tasks"
157
158 while not self.stop:
159
160 self.refill_task_list()
161
162 # consider the next task
163 tsk = self.get_next()
164 if not tsk:
165 if self.count:
166 # tasks may add new ones after they are run
167 continue
168 else:
169 # no tasks to run, no tasks running, time to exit
170 break
171
172 if tsk.hasrun:
173 # if the task is marked as "run", just skip it
174 self.processed += 1
175 self.manager.add_finished(tsk)
176 continue
177
178 try:
179 st = tsk.runnable_status()
180 except Exception as e:
181 self.processed += 1
182 if self.stop and not Options.options.keep:
183 tsk.hasrun = SKIPPED
184 self.manager.add_finished(tsk)
185 continue
186 self.error_handler(tsk)
187 self.manager.add_finished(tsk)
188 tsk.hasrun = EXCEPTION
189 tsk.err_msg = Utils.ex_stack()
190 continue
191
192 if st == ASK_LATER:
193 self.postpone(tsk)
194 elif st == SKIP_ME:
195 self.processed += 1
196 tsk.hasrun = SKIPPED
197 self.manager.add_finished(tsk)
198 else:
199 # run me: put the task in ready queue
200 tsk.position = (self.processed, self.total)
201 self.count += 1
202 self.processed += 1
203 tsk.master = self
204
205 process(tsk)
206
207 # self.count represents the tasks that have been made available to the consumer threads
208 # collect all the tasks after an error else the message may be incomplete
209 while self.error and self.count:
210 self.get_out()
211
212 #print loop
213 assert (self.count == 0 or self.stop)
214
215
216# enable nothreads
217import Runner
218Runner.process = process
219Runner.Parallel = Parallel
diff --git a/buildtools/wafsamba/pkgconfig.py b/buildtools/wafsamba/pkgconfig.py
index 999bad4..b83d5f3 100644
--- a/buildtools/wafsamba/pkgconfig.py
+++ b/buildtools/wafsamba/pkgconfig.py
@@ -1,7 +1,7 @@
1# handle substitution of variables in pc files1# handle substitution of variables in pc files
22
3import os, re, sys3import os, re, sys
4import Build, Logs4from waflib import Build, Logs
5from samba_utils import SUBST_VARS_RECURSIVE, TO_LIST5from samba_utils import SUBST_VARS_RECURSIVE, TO_LIST
66
7def subst_at_vars(task):7def subst_at_vars(task):
@@ -52,7 +52,7 @@ def PKG_CONFIG_FILES(bld, pc_files, vnum=None, extra_name=None):
52 rule=subst_at_vars,52 rule=subst_at_vars,
53 source=f+'.in',53 source=f+'.in',
54 target=target)54 target=target)
55 bld.add_manual_dependency(bld.path.find_or_declare(f), bld.env['PREFIX'])55 bld.add_manual_dependency(bld.path.find_or_declare(f), bld.env['PREFIX'].encode('utf8'))
56 t.vars = []56 t.vars = []
57 if t.env.RPATH_ON_INSTALL:57 if t.env.RPATH_ON_INSTALL:
58 t.env.LIB_RPATH = t.env.RPATH_ST % t.env.LIBDIR58 t.env.LIB_RPATH = t.env.RPATH_ST % t.env.LIBDIR
diff --git a/buildtools/wafsamba/samba3.py b/buildtools/wafsamba/samba3.py
index 44daff9..5aab250 100644
--- a/buildtools/wafsamba/samba3.py
+++ b/buildtools/wafsamba/samba3.py
@@ -1,12 +1,11 @@
1# a waf tool to add autoconf-like macros to the configure section1# a waf tool to add autoconf-like macros to the configure section
2# and for SAMBA_ macros for building libraries, binaries etc2# and for SAMBA_ macros for building libraries, binaries etc
33
4import Options, Build, os4import os
5from samba_utils import os_path_relpath, TO_LIST, samba_add_onoff_option5from waflib import Build
6from samba_utils import os_path_relpath, TO_LIST
6from samba_autoconf import library_flags7from samba_autoconf import library_flags
78
8Options.Handler.SAMBA3_ADD_OPTION = samba_add_onoff_option
9
10def SAMBA3_IS_STATIC_MODULE(bld, module):9def SAMBA3_IS_STATIC_MODULE(bld, module):
11 '''Check whether module is in static list'''10 '''Check whether module is in static list'''
12 if module in bld.env['static_modules']:11 if module in bld.env['static_modules']:
@@ -32,7 +31,7 @@ def s3_fix_kwargs(bld, kwargs):
32 '''fix the build arguments for s3 build rules to include the31 '''fix the build arguments for s3 build rules to include the
33 necessary includes, subdir and cflags options '''32 necessary includes, subdir and cflags options '''
34 s3dir = os.path.join(bld.env.srcdir, 'source3')33 s3dir = os.path.join(bld.env.srcdir, 'source3')
35 s3reldir = os_path_relpath(s3dir, bld.curdir)34 s3reldir = os_path_relpath(s3dir, bld.path.abspath())
3635
37 # the extra_includes list is relative to the source3 directory36 # the extra_includes list is relative to the source3 directory
38 extra_includes = [ '.', 'include', 'lib' ]37 extra_includes = [ '.', 'include', 'lib' ]
diff --git a/buildtools/wafsamba/samba_abi.py b/buildtools/wafsamba/samba_abi.py
index 196b468..5e7686d 100644
--- a/buildtools/wafsamba/samba_abi.py
+++ b/buildtools/wafsamba/samba_abi.py
@@ -1,7 +1,13 @@
1# functions for handling ABI checking of libraries1# functions for handling ABI checking of libraries
22
3import Options, Utils, os, Logs, samba_utils, sys, Task, fnmatch, re, Build3import os
4from TaskGen import feature, before, after4import sys
5import re
6import fnmatch
7
8from waflib import Options, Utils, Logs, Task, Build, Errors
9from waflib.TaskGen import feature, before, after
10from wafsamba import samba_utils
511
6# these type maps cope with platform specific names for common types12# these type maps cope with platform specific names for common types
7# please add new type mappings into the list below13# please add new type mappings into the list below
@@ -10,7 +16,7 @@ abi_type_maps = {
10 'struct __va_list_tag *' : 'va_list'16 'struct __va_list_tag *' : 'va_list'
11 }17 }
1218
13version_key = lambda x: map(int, x.split("."))19version_key = lambda x: list(map(int, x.split(".")))
1420
15def normalise_signature(sig):21def normalise_signature(sig):
16 '''normalise a signature from gdb'''22 '''normalise a signature from gdb'''
@@ -79,7 +85,7 @@ def abi_check_task(self):
79 libpath = self.inputs[0].abspath(self.env)85 libpath = self.inputs[0].abspath(self.env)
80 libname = os.path.basename(libpath)86 libname = os.path.basename(libpath)
8187
82 sigs = Utils.cmd_output([abi_gen, libpath])88 sigs = samba_utils.get_string(Utils.cmd_output([abi_gen, libpath]))
83 parsed_sigs = parse_sigs(sigs, self.ABI_MATCH)89 parsed_sigs = parse_sigs(sigs, self.ABI_MATCH)
8490
85 sig_file = self.ABI_FILE91 sig_file = self.ABI_FILE
@@ -87,7 +93,7 @@ def abi_check_task(self):
87 old_sigs = samba_utils.load_file(sig_file)93 old_sigs = samba_utils.load_file(sig_file)
88 if old_sigs is None or Options.options.ABI_UPDATE:94 if old_sigs is None or Options.options.ABI_UPDATE:
89 if not save_sigs(sig_file, parsed_sigs):95 if not save_sigs(sig_file, parsed_sigs):
90 raise Utils.WafError('Failed to save ABI file "%s"' % sig_file)96 raise Errors.WafError('Failed to save ABI file "%s"' % sig_file)
91 Logs.warn('Generated ABI signatures %s' % sig_file)97 Logs.warn('Generated ABI signatures %s' % sig_file)
92 return98 return
9399
@@ -112,14 +118,14 @@ def abi_check_task(self):
112 got_error = True118 got_error = True
113119
114 if got_error:120 if got_error:
115 raise Utils.WafError('ABI for %s has changed - please fix library version then build with --abi-update\nSee http://wiki.samba.org/index.php/Waf#ABI_Checking for more information\nIf you have not changed any ABI, and your platform always gives this error, please configure with --abi-check-disable to skip this check' % libname)121 raise Errors.WafError('ABI for %s has changed - please fix library version then build with --abi-update\nSee http://wiki.samba.org/index.php/Waf#ABI_Checking for more information\nIf you have not changed any ABI, and your platform always gives this error, please configure with --abi-check-disable to skip this check' % libname)
116122
117123
118t = Task.task_type_from_func('abi_check', abi_check_task, color='BLUE', ext_in='.bin')124t = Task.task_factory('abi_check', abi_check_task, color='BLUE', ext_in='.bin')
119t.quiet = True125t.quiet = True
120# allow "waf --abi-check" to force re-checking the ABI126# allow "waf --abi-check" to force re-checking the ABI
121if '--abi-check' in sys.argv:127if '--abi-check' in sys.argv:
122 Task.always_run(t)128 t.always_run = True
123129
124@after('apply_link')130@after('apply_link')
125@feature('abi_check')131@feature('abi_check')
@@ -184,18 +190,20 @@ def abi_write_vscript(f, libname, current_version, versions, symmap, abi_match):
184 f.write("}%s;\n\n" % last_key)190 f.write("}%s;\n\n" % last_key)
185 last_key = " %s" % symver191 last_key = " %s" % symver
186 f.write("%s {\n" % current_version)192 f.write("%s {\n" % current_version)
187 local_abi = filter(lambda x: x[0] == '!', abi_match)193 local_abi = list(filter(lambda x: x[0] == '!', abi_match))
188 global_abi = filter(lambda x: x[0] != '!', abi_match)194 global_abi = list(filter(lambda x: x[0] != '!', abi_match))
189 f.write("\tglobal:\n")195 f.write("\tglobal:\n")
190 if len(global_abi) > 0:196 if len(global_abi) > 0:
191 for x in global_abi:197 for x in global_abi:
192 f.write("\t\t%s;\n" % x)198 f.write("\t\t%s;\n" % x)
193 else:199 else:
194 f.write("\t\t*;\n")200 f.write("\t\t*;\n")
195 if abi_match != ["*"]:201 # Always hide symbols that must be local if exist
196 f.write("\tlocal:\n")202 local_abi.extend(["!_end", "!__bss_start", "!_edata"])
197 for x in local_abi:203 f.write("\tlocal:\n")
198 f.write("\t\t%s;\n" % x[1:])204 for x in local_abi:
205 f.write("\t\t%s;\n" % x[1:])
206 if global_abi != ["*"]:
199 if len(global_abi) > 0:207 if len(global_abi) > 0:
200 f.write("\t\t*;\n")208 f.write("\t\t*;\n")
201 f.write("};\n")209 f.write("};\n")
diff --git a/buildtools/wafsamba/samba_autoconf.py b/buildtools/wafsamba/samba_autoconf.py
index c4391d0..ee1fc23 100644
--- a/buildtools/wafsamba/samba_autoconf.py
+++ b/buildtools/wafsamba/samba_autoconf.py
@@ -1,9 +1,10 @@
1# a waf tool to add autoconf-like macros to the configure section1# a waf tool to add autoconf-like macros to the configure section
22
3import os, sys3import os, sys
4import Build, Options, preproc, Logs4from waflib import Build, Options, Logs, Context
5from Configure import conf5from waflib.Configure import conf
6from TaskGen import feature6from waflib.TaskGen import feature
7from waflib.Tools import c_preproc as preproc
7from samba_utils import TO_LIST, GET_TARGET_TYPE, SET_TARGET_TYPE, unique_list, mkdir_p8from samba_utils import TO_LIST, GET_TARGET_TYPE, SET_TARGET_TYPE, unique_list, mkdir_p
89
9missing_headers = set()10missing_headers = set()
@@ -18,7 +19,7 @@ def DEFINE(conf, d, v, add_to_cflags=False, quote=False):
18 '''define a config option'''19 '''define a config option'''
19 conf.define(d, v, quote=quote)20 conf.define(d, v, quote=quote)
20 if add_to_cflags:21 if add_to_cflags:
21 conf.env.append_value('CCDEFINES', d + '=' + str(v))22 conf.env.append_value('CFLAGS', '-D%s=%s' % (d, str(v)))
2223
23def hlist_to_string(conf, headers=None):24def hlist_to_string(conf, headers=None):
24 '''convert a headers list to a set of #include lines'''25 '''convert a headers list to a set of #include lines'''
@@ -44,11 +45,11 @@ def COMPOUND_START(conf, msg):
44 if v != [] and v != 0:45 if v != [] and v != 0:
45 conf.env.in_compound = v + 146 conf.env.in_compound = v + 1
46 return47 return
47 conf.check_message_1(msg)48 conf.start_msg(msg)
48 conf.saved_check_message_1 = conf.check_message_149 conf.saved_check_message_1 = conf.start_msg
49 conf.check_message_1 = null_check_message_150 conf.start_msg = null_check_message_1
50 conf.saved_check_message_2 = conf.check_message_251 conf.saved_check_message_2 = conf.end_msg
51 conf.check_message_2 = null_check_message_252 conf.end_msg = null_check_message_2
52 conf.env.in_compound = 153 conf.env.in_compound = 1
5354
5455
@@ -58,9 +59,9 @@ def COMPOUND_END(conf, result):
58 conf.env.in_compound -= 159 conf.env.in_compound -= 1
59 if conf.env.in_compound != 0:60 if conf.env.in_compound != 0:
60 return61 return
61 conf.check_message_1 = conf.saved_check_message_162 conf.start_msg = conf.saved_check_message_1
62 conf.check_message_2 = conf.saved_check_message_263 conf.end_msg = conf.saved_check_message_2
63 p = conf.check_message_264 p = conf.end_msg
64 if result is True:65 if result is True:
65 p('ok')66 p('ok')
66 elif not result:67 elif not result:
@@ -96,10 +97,10 @@ def CHECK_HEADER(conf, h, add_headers=False, lib=None):
96 hdrs = hlist_to_string(conf, headers=h)97 hdrs = hlist_to_string(conf, headers=h)
97 if lib is None:98 if lib is None:
98 lib = ""99 lib = ""
99 ret = conf.check(fragment='%s\nint main(void) { return 0; }' % hdrs,100 ret = conf.check(fragment='%s\nint main(void) { return 0; }\n' % hdrs,
100 type='nolink',101 type='nolink',
101 execute=0,102 execute=0,
102 ccflags=ccflags,103 cflags=ccflags,
103 mandatory=False,104 mandatory=False,
104 includes=cpppath,105 includes=cpppath,
105 uselib=lib.upper(),106 uselib=lib.upper(),
@@ -250,7 +251,10 @@ def CHECK_FUNC(conf, f, link=True, lib=None, headers=None):
250251
251 ret = False252 ret = False
252253
253 conf.COMPOUND_START('Checking for %s' % f)254 in_lib_str = ""
255 if lib:
256 in_lib_str = " in %s" % lib
257 conf.COMPOUND_START('Checking for %s%s' % (f, in_lib_str))
254258
255 if link is None or link:259 if link is None or link:
256 ret = CHECK_CODE(conf,260 ret = CHECK_CODE(conf,
@@ -322,7 +326,7 @@ def CHECK_SIZEOF(conf, vars, headers=None, define=None, critical=True):
322 ret = False326 ret = False
323 if v_define is None:327 if v_define is None:
324 v_define = 'SIZEOF_%s' % v.upper().replace(' ', '_')328 v_define = 'SIZEOF_%s' % v.upper().replace(' ', '_')
325 for size in list((1, 2, 4, 8, 16, 32)):329 for size in list((1, 2, 4, 8, 16, 32, 64)):
326 if CHECK_CODE(conf,330 if CHECK_CODE(conf,
327 'static int test_array[1 - 2 * !(((long int)(sizeof(%s))) <= %d)];' % (v, size),331 'static int test_array[1 - 2 * !(((long int)(sizeof(%s))) <= %d)];' % (v, size),
328 define=v_define,332 define=v_define,
@@ -383,12 +387,10 @@ def CHECK_CODE(conf, code, define,
383 else:387 else:
384 execute = 0388 execute = 0
385389
386 defs = conf.get_config_header()
387
388 if addmain:390 if addmain:
389 fragment='%s\n%s\n int main(void) { %s; return 0; }\n' % (defs, hdrs, code)391 fragment='%s\n int main(void) { %s; return 0; }\n' % (hdrs, code)
390 else:392 else:
391 fragment='%s\n%s\n%s\n' % (defs, hdrs, code)393 fragment='%s\n%s\n' % (hdrs, code)
392394
393 if msg is None:395 if msg is None:
394 msg="Checking for %s" % define396 msg="Checking for %s" % define
@@ -398,15 +400,11 @@ def CHECK_CODE(conf, code, define,
398 # Be strict when relying on a compiler check400 # Be strict when relying on a compiler check
399 # Some compilers (e.g. xlc) ignore non-supported features as warnings401 # Some compilers (e.g. xlc) ignore non-supported features as warnings
400 if strict:402 if strict:
401 extra_cflags = None403 if 'WERROR_CFLAGS' in conf.env:
402 if conf.env["CC_NAME"] == "gcc":404 cflags.extend(conf.env['WERROR_CFLAGS'])
403 extra_cflags = "-Werror"
404 elif conf.env["CC_NAME"] == "xlc":
405 extra_cflags = "-qhalt=w"
406 cflags.append(extra_cflags)
407405
408 if local_include:406 if local_include:
409 cflags.append('-I%s' % conf.curdir)407 cflags.append('-I%s' % conf.path.abspath())
410408
411 if not link:409 if not link:
412 type='nolink'410 type='nolink'
@@ -431,11 +429,11 @@ def CHECK_CODE(conf, code, define,
431429
432 conf.COMPOUND_START(msg)430 conf.COMPOUND_START(msg)
433431
434 ret = conf.check(fragment=fragment,432 try:
433 ret = conf.check(fragment=fragment,
435 execute=execute,434 execute=execute,
436 define_name = define,435 define_name = define,
437 mandatory = mandatory,436 cflags=cflags,
438 ccflags=cflags,
439 ldflags=ldflags,437 ldflags=ldflags,
440 includes=includes,438 includes=includes,
441 uselib=uselib,439 uselib=uselib,
@@ -444,22 +442,30 @@ def CHECK_CODE(conf, code, define,
444 quote=quote,442 quote=quote,
445 exec_args=exec_args,443 exec_args=exec_args,
446 define_ret=define_ret)444 define_ret=define_ret)
447 if not ret and CONFIG_SET(conf, define):445 except Exception:
448 # sometimes conf.check() returns false, but it446 if always:
449 # sets the define. Maybe a waf bug?447 conf.DEFINE(define, 0)
450 ret = True448 else:
451 if ret:449 conf.undefine(define)
450 conf.COMPOUND_END(False)
451 if mandatory:
452 raise
453 return False
454 else:
455 # Success is indicated by ret but we should unset
456 # defines set by WAF's c_config.check() because it
457 # defines it to int(ret) and we want to undefine it
458 if not ret:
459 conf.undefine(define)
460 conf.COMPOUND_END(False)
461 return False
452 if not define_ret:462 if not define_ret:
453 conf.DEFINE(define, 1)463 conf.DEFINE(define, 1)
454 conf.COMPOUND_END(True)464 conf.COMPOUND_END(True)
455 else:465 else:
456 conf.COMPOUND_END(conf.env[define])466 conf.DEFINE(define, ret, quote=quote)
467 conf.COMPOUND_END(ret)
457 return True468 return True
458 if always:
459 conf.DEFINE(define, 0)
460 conf.COMPOUND_END(False)
461 return False
462
463469
464470
465@conf471@conf
@@ -490,8 +496,9 @@ def CHECK_CFLAGS(conf, cflags, fragment='int main(void) { return 0; }\n'):
490 check_cflags.extend(conf.env['WERROR_CFLAGS'])496 check_cflags.extend(conf.env['WERROR_CFLAGS'])
491 return conf.check(fragment=fragment,497 return conf.check(fragment=fragment,
492 execute=0,498 execute=0,
499 mandatory=False,
493 type='nolink',500 type='nolink',
494 ccflags=check_cflags,501 cflags=check_cflags,
495 msg="Checking compiler accepts %s" % cflags)502 msg="Checking compiler accepts %s" % cflags)
496503
497@conf504@conf
@@ -547,12 +554,15 @@ def library_flags(self, libs):
547 # note that we do not add the -I and -L in here, as that is added by the waf554 # note that we do not add the -I and -L in here, as that is added by the waf
548 # core. Adding it here would just change the order that it is put on the link line555 # core. Adding it here would just change the order that it is put on the link line
549 # which can cause system paths to be added before internal libraries556 # which can cause system paths to be added before internal libraries
550 extra_ccflags = TO_LIST(getattr(self.env, 'CCFLAGS_%s' % lib.upper(), []))557 extra_ccflags = TO_LIST(getattr(self.env, 'CFLAGS_%s' % lib.upper(), []))
551 extra_ldflags = TO_LIST(getattr(self.env, 'LDFLAGS_%s' % lib.upper(), []))558 extra_ldflags = TO_LIST(getattr(self.env, 'LDFLAGS_%s' % lib.upper(), []))
552 extra_cpppath = TO_LIST(getattr(self.env, 'CPPPATH_%s' % lib.upper(), []))559 extra_cpppath = TO_LIST(getattr(self.env, 'CPPPATH_%s' % lib.upper(), []))
553 ccflags.extend(extra_ccflags)560 ccflags.extend(extra_ccflags)
554 ldflags.extend(extra_ldflags)561 ldflags.extend(extra_ldflags)
555 cpppath.extend(extra_cpppath)562 cpppath.extend(extra_cpppath)
563
564 extra_cpppath = TO_LIST(getattr(self.env, 'INCLUDES_%s' % lib.upper(), []))
565 cpppath.extend(extra_cpppath)
556 if 'EXTRA_LDFLAGS' in self.env:566 if 'EXTRA_LDFLAGS' in self.env:
557 ldflags.extend(self.env['EXTRA_LDFLAGS'])567 ldflags.extend(self.env['EXTRA_LDFLAGS'])
558568
@@ -585,9 +595,9 @@ int foo()
585595
586 (ccflags, ldflags, cpppath) = library_flags(conf, lib)596 (ccflags, ldflags, cpppath) = library_flags(conf, lib)
587 if shlib:597 if shlib:
588 res = conf.check(features='c cshlib', fragment=fragment, lib=lib, uselib_store=lib, ccflags=ccflags, ldflags=ldflags, uselib=lib.upper(), mandatory=False)598 res = conf.check(features='c cshlib', fragment=fragment, lib=lib, uselib_store=lib, cflags=ccflags, ldflags=ldflags, uselib=lib.upper(), mandatory=False)
589 else:599 else:
590 res = conf.check(lib=lib, uselib_store=lib, ccflags=ccflags, ldflags=ldflags, uselib=lib.upper(), mandatory=False)600 res = conf.check(lib=lib, uselib_store=lib, cflags=ccflags, ldflags=ldflags, uselib=lib.upper(), mandatory=False)
591601
592 if not res:602 if not res:
593 if mandatory:603 if mandatory:
@@ -661,8 +671,8 @@ def CHECK_FUNCS_IN(conf, list, library, mandatory=False, checklibc=False,
661@conf671@conf
662def IN_LAUNCH_DIR(conf):672def IN_LAUNCH_DIR(conf):
663 '''return True if this rule is being run from the launch directory'''673 '''return True if this rule is being run from the launch directory'''
664 return os.path.realpath(conf.curdir) == os.path.realpath(Options.launch_dir)674 return os.path.realpath(conf.path.abspath()) == os.path.realpath(Context.launch_dir)
665Options.Handler.IN_LAUNCH_DIR = IN_LAUNCH_DIR675Options.OptionsContext.IN_LAUNCH_DIR = IN_LAUNCH_DIR
666676
667677
668@conf678@conf
@@ -674,23 +684,42 @@ def SAMBA_CONFIG_H(conf, path=None):
674 return684 return
675685
676 # we need to build real code that can't be optimized away to test686 # we need to build real code that can't be optimized away to test
677 if conf.check(fragment='''687 stack_protect_list = ['-fstack-protector-strong', '-fstack-protector']
678 #include <stdio.h>688 for stack_protect_flag in stack_protect_list:
679689 flag_supported = conf.check(fragment='''
680 int main(void)690 #include <stdio.h>
681 {691
682 char t[100000];692 int main(void)
683 while (fgets(t, sizeof(t), stdin));693 {
684 return 0;694 char t[100000];
685 }695 while (fgets(t, sizeof(t), stdin));
686 ''',696 return 0;
687 execute=0,697 }
688 ccflags='-fstack-protector',698 ''',
689 ldflags='-fstack-protector',699 execute=0,
690 mandatory=False,700 cflags=[ '-Werror', '-Wp,-D_FORTIFY_SOURCE=2', stack_protect_flag],
691 msg='Checking if toolchain accepts -fstack-protector'):701 mandatory=False,
692 conf.ADD_CFLAGS('-fstack-protector')702 msg='Checking if compiler accepts %s' % (stack_protect_flag))
693 conf.ADD_LDFLAGS('-fstack-protector')703 if flag_supported:
704 conf.ADD_CFLAGS('%s' % (stack_protect_flag))
705 break
706
707 flag_supported = conf.check(fragment='''
708 #include <stdio.h>
709
710 int main(void)
711 {
712 char t[100000];
713 while (fgets(t, sizeof(t), stdin));
714 return 0;
715 }
716 ''',
717 execute=0,
718 cflags=[ '-Werror', '-fstack-clash-protection'],
719 mandatory=False,
720 msg='Checking if compiler accepts -fstack-clash-protection')
721 if flag_supported:
722 conf.ADD_CFLAGS('-fstack-clash-protection')
694723
695 if Options.options.debug:724 if Options.options.debug:
696 conf.ADD_CFLAGS('-g', testflags=True)725 conf.ADD_CFLAGS('-g', testflags=True)
@@ -774,9 +803,12 @@ int main(void) {
774 conf.env['EXTRA_LDFLAGS'].extend(conf.env['ADDITIONAL_LDFLAGS'])803 conf.env['EXTRA_LDFLAGS'].extend(conf.env['ADDITIONAL_LDFLAGS'])
775804
776 if path is None:805 if path is None:
777 conf.write_config_header('config.h', top=True)806 conf.write_config_header('default/config.h', top=True, remove=False)
778 else:807 else:
779 conf.write_config_header(path)808 conf.write_config_header(os.path.join(conf.variant, path), remove=False)
809 for key in conf.env.define_key:
810 conf.undefine(key, from_env=False)
811 conf.env.define_key = []
780 conf.SAMBA_CROSS_CHECK_COMPLETE()812 conf.SAMBA_CROSS_CHECK_COMPLETE()
781813
782814
@@ -863,9 +895,6 @@ def CHECK_CC_ENV(conf):
863 The build farm sometimes puts a space at the start"""895 The build farm sometimes puts a space at the start"""
864 if os.environ.get('CC'):896 if os.environ.get('CC'):
865 conf.env.CC = TO_LIST(os.environ.get('CC'))897 conf.env.CC = TO_LIST(os.environ.get('CC'))
866 if len(conf.env.CC) == 1:
867 # make for nicer logs if just a single command
868 conf.env.CC = conf.env.CC[0]
869898
870899
871@conf900@conf
@@ -875,7 +904,7 @@ def SETUP_CONFIGURE_CACHE(conf, enable):
875 # when -C is chosen, we will use a private cache and will904 # when -C is chosen, we will use a private cache and will
876 # not look into system includes. This roughtly matches what905 # not look into system includes. This roughtly matches what
877 # autoconf does with -C906 # autoconf does with -C
878 cache_path = os.path.join(conf.blddir, '.confcache')907 cache_path = os.path.join(conf.bldnode.abspath(), '.confcache')
879 mkdir_p(cache_path)908 mkdir_p(cache_path)
880 Options.cache_global = os.environ['WAFCACHE'] = cache_path909 Options.cache_global = os.environ['WAFCACHE'] = cache_path
881 else:910 else:
@@ -899,6 +928,3 @@ def SAMBA_CHECK_UNDEFINED_SYMBOL_FLAGS(conf):
899 if conf.CHECK_LDFLAGS(['-undefined', 'dynamic_lookup']):928 if conf.CHECK_LDFLAGS(['-undefined', 'dynamic_lookup']):
900 conf.env.undefined_ignore_ldflags = ['-undefined', 'dynamic_lookup']929 conf.env.undefined_ignore_ldflags = ['-undefined', 'dynamic_lookup']
901930
902@conf
903def CHECK_CFG(self, *k, **kw):
904 return self.check_cfg(*k, **kw)
diff --git a/buildtools/wafsamba/samba_autoproto.py b/buildtools/wafsamba/samba_autoproto.py
index b2b5233..ace434f 100644
--- a/buildtools/wafsamba/samba_autoproto.py
+++ b/buildtools/wafsamba/samba_autoproto.py
@@ -1,13 +1,13 @@
1# waf build tool for building automatic prototypes from C source1# waf build tool for building automatic prototypes from C source
22
3import os3import os
4import Build4from waflib import Build
5from samba_utils import SET_TARGET_TYPE, os_path_relpath5from samba_utils import SET_TARGET_TYPE, os_path_relpath
66
7def SAMBA_AUTOPROTO(bld, header, source):7def SAMBA_AUTOPROTO(bld, header, source):
8 '''rule for samba prototype generation'''8 '''rule for samba prototype generation'''
9 bld.SET_BUILD_GROUP('prototypes')9 bld.SET_BUILD_GROUP('prototypes')
10 relpath = os_path_relpath(bld.curdir, bld.srcnode.abspath())10 relpath = os_path_relpath(bld.path.abspath(), bld.srcnode.abspath())
11 name = os.path.join(relpath, header)11 name = os.path.join(relpath, header)
12 SET_TARGET_TYPE(bld, name, 'PROTOTYPE')12 SET_TARGET_TYPE(bld, name, 'PROTOTYPE')
13 t = bld(13 t = bld(
@@ -16,7 +16,7 @@ def SAMBA_AUTOPROTO(bld, header, source):
16 target = header,16 target = header,
17 update_outputs=True,17 update_outputs=True,
18 ext_out='.c',18 ext_out='.c',
19 before ='cc',19 before ='c',
20 rule = '${PERL} "${SCRIPT}/mkproto.pl" --srcdir=.. --builddir=. --public=/dev/null --private="${TGT}" ${SRC}'20 rule = '${PERL} "${SCRIPT}/mkproto.pl" --srcdir=.. --builddir=. --public=/dev/null --private="${TGT}" ${SRC}'
21 )21 )
22 t.env.SCRIPT = os.path.join(bld.srcnode.abspath(), 'source4/script')22 t.env.SCRIPT = os.path.join(bld.srcnode.abspath(), 'source4/script')
diff --git a/buildtools/wafsamba/samba_bundled.py b/buildtools/wafsamba/samba_bundled.py
index 253d604..60ce7da 100644
--- a/buildtools/wafsamba/samba_bundled.py
+++ b/buildtools/wafsamba/samba_bundled.py
@@ -1,9 +1,9 @@
1# functions to support bundled libraries1# functions to support bundled libraries
22
3import sys3import sys
4import Build, Options, Logs4from waflib import Build, Options, Logs
5from Configure import conf5from waflib.Configure import conf
6from samba_utils import TO_LIST6from wafsamba import samba_utils
77
8def PRIVATE_NAME(bld, name, private_extension, private_library):8def PRIVATE_NAME(bld, name, private_extension, private_library):
9 '''possibly rename a library to include a bundled extension'''9 '''possibly rename a library to include a bundled extension'''
@@ -51,19 +51,19 @@ Build.BuildContext.BUILTIN_LIBRARY = BUILTIN_LIBRARY
5151
52def BUILTIN_DEFAULT(opt, builtins):52def BUILTIN_DEFAULT(opt, builtins):
53 '''set a comma separated default list of builtin libraries for this package'''53 '''set a comma separated default list of builtin libraries for this package'''
54 if 'BUILTIN_LIBRARIES_DEFAULT' in Options.options:54 if 'BUILTIN_LIBRARIES_DEFAULT' in Options.options.__dict__:
55 return55 return
56 Options.options['BUILTIN_LIBRARIES_DEFAULT'] = builtins56 Options.options.__dict__['BUILTIN_LIBRARIES_DEFAULT'] = builtins
57Options.Handler.BUILTIN_DEFAULT = BUILTIN_DEFAULT57Options.OptionsContext.BUILTIN_DEFAULT = BUILTIN_DEFAULT
5858
5959
60def PRIVATE_EXTENSION_DEFAULT(opt, extension, noextension=''):60def PRIVATE_EXTENSION_DEFAULT(opt, extension, noextension=''):
61 '''set a default private library extension'''61 '''set a default private library extension'''
62 if 'PRIVATE_EXTENSION_DEFAULT' in Options.options:62 if 'PRIVATE_EXTENSION_DEFAULT' in Options.options.__dict__:
63 return63 return
64 Options.options['PRIVATE_EXTENSION_DEFAULT'] = extension64 Options.options.__dict__['PRIVATE_EXTENSION_DEFAULT'] = extension
65 Options.options['PRIVATE_EXTENSION_EXCEPTION'] = noextension65 Options.options.__dict__['PRIVATE_EXTENSION_EXCEPTION'] = noextension
66Options.Handler.PRIVATE_EXTENSION_DEFAULT = PRIVATE_EXTENSION_DEFAULT66Options.OptionsContext.PRIVATE_EXTENSION_DEFAULT = PRIVATE_EXTENSION_DEFAULT
6767
6868
69def minimum_library_version(conf, libname, default):69def minimum_library_version(conf, libname, default):
@@ -139,7 +139,7 @@ def CHECK_BUNDLED_SYSTEM(conf, libname, minversion='0.0.0',
139 # We always do a logic validation of 'onlyif' first139 # We always do a logic validation of 'onlyif' first
140 missing = []140 missing = []
141 if onlyif:141 if onlyif:
142 for l in TO_LIST(onlyif):142 for l in samba_utils.TO_LIST(onlyif):
143 f = 'FOUND_SYSTEMLIB_%s' % l143 f = 'FOUND_SYSTEMLIB_%s' % l
144 if not f in conf.env:144 if not f in conf.env:
145 Logs.error('ERROR: CHECK_BUNDLED_SYSTEM(%s) - ' % (libname) +145 Logs.error('ERROR: CHECK_BUNDLED_SYSTEM(%s) - ' % (libname) +
diff --git a/buildtools/wafsamba/samba_conftests.py b/buildtools/wafsamba/samba_conftests.py
index b52727b..ef632ba 100644
--- a/buildtools/wafsamba/samba_conftests.py
+++ b/buildtools/wafsamba/samba_conftests.py
@@ -2,34 +2,35 @@
2# to test for commonly needed configuration options2# to test for commonly needed configuration options
33
4import os, shutil, re4import os, shutil, re
5import Build, Configure, Utils, Options, Logs5from waflib import Build, Configure, Utils, Options, Logs, Errors
6from Configure import conf6from waflib.Configure import conf
7from samba_utils import TO_LIST, ADD_LD_LIBRARY_PATH7from samba_utils import TO_LIST, ADD_LD_LIBRARY_PATH, get_string
88
99
10def add_option(self, *k, **kw):10def add_option(self, *k, **kw):
11 '''syntax help: provide the "match" attribute to opt.add_option() so that folders can be added to specific config tests'''11 '''syntax help: provide the "match" attribute to opt.add_option() so that folders can be added to specific config tests'''
12 Options.OptionsContext.parser = self
12 match = kw.get('match', [])13 match = kw.get('match', [])
13 if match:14 if match:
14 del kw['match']15 del kw['match']
15 opt = self.parser.add_option(*k, **kw)16 opt = self.parser.add_option(*k, **kw)
16 opt.match = match17 opt.match = match
17 return opt18 return opt
18Options.Handler.add_option = add_option19Options.OptionsContext.add_option = add_option
1920
20@conf21@conf
21def check(self, *k, **kw):22def check(self, *k, **kw):
22 '''Override the waf defaults to inject --with-directory options'''23 '''Override the waf defaults to inject --with-directory options'''
2324
24 if not 'env' in kw:25 if not 'env' in kw:
25 kw['env'] = self.env.copy()26 kw['env'] = self.env.derive()
2627
27 # match the configuration test with specific options, for example:28 # match the configuration test with specific options, for example:
28 # --with-libiconv -> Options.options.iconv_open -> "Checking for library iconv"29 # --with-libiconv -> Options.options.iconv_open -> "Checking for library iconv"
29 additional_dirs = []30 additional_dirs = []
30 if 'msg' in kw:31 if 'msg' in kw:
31 msg = kw['msg']32 msg = kw['msg']
32 for x in Options.Handler.parser.parser.option_list:33 for x in Options.OptionsContext.parser.parser.option_list:
33 if getattr(x, 'match', None) and msg in x.match:34 if getattr(x, 'match', None) and msg in x.match:
34 d = getattr(Options.options, x.dest, '')35 d = getattr(Options.options, x.dest, '')
35 if d:36 if d:
@@ -46,12 +47,12 @@ def check(self, *k, **kw):
46 add_options_dir(additional_dirs, kw['env'])47 add_options_dir(additional_dirs, kw['env'])
4748
48 self.validate_c(kw)49 self.validate_c(kw)
49 self.check_message_1(kw['msg'])50 self.start_msg(kw['msg'])
50 ret = None51 ret = None
51 try:52 try:
52 ret = self.run_c_code(*k, **kw)53 ret = self.run_c_code(*k, **kw)
53 except Configure.ConfigurationError as e:54 except Configure.ConfigurationError as e:
54 self.check_message_2(kw['errmsg'], 'YELLOW')55 self.end_msg(kw['errmsg'], 'YELLOW')
55 if 'mandatory' in kw and kw['mandatory']:56 if 'mandatory' in kw and kw['mandatory']:
56 if Logs.verbose > 1:57 if Logs.verbose > 1:
57 raise58 raise
@@ -59,7 +60,7 @@ def check(self, *k, **kw):
59 self.fatal('the configuration failed (see %r)' % self.log.name)60 self.fatal('the configuration failed (see %r)' % self.log.name)
60 else:61 else:
61 kw['success'] = ret62 kw['success'] = ret
62 self.check_message_2(self.ret_msg(kw['okmsg'], kw))63 self.end_msg(self.ret_msg(kw['okmsg'], kw))
6364
64 # success! keep the CPPPATH/LIBPATH65 # success! keep the CPPPATH/LIBPATH
65 add_options_dir(additional_dirs, self.env)66 add_options_dir(additional_dirs, self.env)
@@ -85,7 +86,7 @@ def CHECK_LARGEFILE(conf, define='HAVE_LARGEFILE'):
85 '''see what we need for largefile support'''86 '''see what we need for largefile support'''
86 getconf_cflags = conf.CHECK_COMMAND(['getconf', 'LFS_CFLAGS']);87 getconf_cflags = conf.CHECK_COMMAND(['getconf', 'LFS_CFLAGS']);
87 if getconf_cflags is not False:88 if getconf_cflags is not False:
88 if (conf.CHECK_CODE('return !(sizeof(off_t) >= 8)',89 if (conf.CHECK_CODE('if (sizeof(off_t) < 8) return 1',
89 define='WORKING_GETCONF_LFS_CFLAGS',90 define='WORKING_GETCONF_LFS_CFLAGS',
90 execute=True,91 execute=True,
91 cflags=getconf_cflags,92 cflags=getconf_cflags,
@@ -100,13 +101,13 @@ def CHECK_LARGEFILE(conf, define='HAVE_LARGEFILE'):
100 else:101 else:
101 conf.DEFINE(flag_split[0], flag_split[1])102 conf.DEFINE(flag_split[0], flag_split[1])
102103
103 if conf.CHECK_CODE('return !(sizeof(off_t) >= 8)',104 if conf.CHECK_CODE('if (sizeof(off_t) < 8) return 1',
104 define,105 define,
105 execute=True,106 execute=True,
106 msg='Checking for large file support without additional flags'):107 msg='Checking for large file support without additional flags'):
107 return True108 return True
108109
109 if conf.CHECK_CODE('return !(sizeof(off_t) >= 8)',110 if conf.CHECK_CODE('if (sizeof(off_t) < 8) return 1',
110 define,111 define,
111 execute=True,112 execute=True,
112 cflags='-D_FILE_OFFSET_BITS=64',113 cflags='-D_FILE_OFFSET_BITS=64',
@@ -114,7 +115,7 @@ def CHECK_LARGEFILE(conf, define='HAVE_LARGEFILE'):
114 conf.DEFINE('_FILE_OFFSET_BITS', 64)115 conf.DEFINE('_FILE_OFFSET_BITS', 64)
115 return True116 return True
116117
117 if conf.CHECK_CODE('return !(sizeof(off_t) >= 8)',118 if conf.CHECK_CODE('if (sizeof(off_t) < 8) return 1',
118 define,119 define,
119 execute=True,120 execute=True,
120 cflags='-D_LARGE_FILES',121 cflags='-D_LARGE_FILES',
@@ -162,7 +163,7 @@ def find_config_dir(conf):
162 '''find a directory to run tests in'''163 '''find a directory to run tests in'''
163 k = 0164 k = 0
164 while k < 10000:165 while k < 10000:
165 dir = os.path.join(conf.blddir, '.conf_check_%d' % k)166 dir = os.path.join(conf.bldnode.abspath(), '.conf_check_%d' % k)
166 try:167 try:
167 shutil.rmtree(dir)168 shutil.rmtree(dir)
168 except OSError:169 except OSError:
@@ -257,7 +258,8 @@ int foo(int v) {
257 environ[0] = 1;258 environ[0] = 1;
258 ldb_module = PyImport_ImportModule("ldb");259 ldb_module = PyImport_ImportModule("ldb");
259 return v * 2;260 return v * 2;
260}'''261}
262'''
261 return conf.check(features='c cshlib',uselib='PYEMBED',fragment=snip,msg=msg, mandatory=False)263 return conf.check(features='c cshlib',uselib='PYEMBED',fragment=snip,msg=msg, mandatory=False)
262264
263# this one is quite complex, and should probably be broken up265# this one is quite complex, and should probably be broken up
@@ -337,7 +339,8 @@ def CHECK_LIBRARY_SUPPORT(conf, rpath=False, version_script=False, msg=None):
337339
338 # we need to run the program, try to get its result340 # we need to run the program, try to get its result
339 args = conf.SAMBA_CROSS_ARGS(msg=msg)341 args = conf.SAMBA_CROSS_ARGS(msg=msg)
340 proc = Utils.pproc.Popen([lastprog] + args, stdout=Utils.pproc.PIPE, stderr=Utils.pproc.PIPE)342 proc = Utils.subprocess.Popen([lastprog] + args,
343 stdout=Utils.subprocess.PIPE, stderr=Utils.subprocess.PIPE)
341 (out, err) = proc.communicate()344 (out, err) = proc.communicate()
342 w = conf.log.write345 w = conf.log.write
343 w(str(out))346 w(str(out))
@@ -364,7 +367,7 @@ def CHECK_PERL_MANPAGE(conf, msg=None, section=None):
364 else:367 else:
365 msg = "perl manpage generation"368 msg = "perl manpage generation"
366369
367 conf.check_message_1(msg)370 conf.start_msg(msg)
368371
369 dir = find_config_dir(conf)372 dir = find_config_dir(conf)
370373
@@ -381,28 +384,28 @@ WriteMakefile(
381""")384""")
382 back = os.path.abspath('.')385 back = os.path.abspath('.')
383 os.chdir(bdir)386 os.chdir(bdir)
384 proc = Utils.pproc.Popen(['perl', 'Makefile.PL'],387 proc = Utils.subprocess.Popen(['perl', 'Makefile.PL'],
385 stdout=Utils.pproc.PIPE,388 stdout=Utils.subprocess.PIPE,
386 stderr=Utils.pproc.PIPE)389 stderr=Utils.subprocess.PIPE)
387 (out, err) = proc.communicate()390 (out, err) = proc.communicate()
388 os.chdir(back)391 os.chdir(back)
389392
390 ret = (proc.returncode == 0)393 ret = (proc.returncode == 0)
391 if not ret:394 if not ret:
392 conf.check_message_2('not found', color='YELLOW')395 conf.end_msg('not found', color='YELLOW')
393 return396 return
394397
395 if section:398 if section:
396 man = Utils.readf(os.path.join(bdir,'Makefile'))399 man = Utils.readf(os.path.join(bdir,'Makefile'))
397 m = re.search('MAN%sEXT\s+=\s+(\w+)' % section, man)400 m = re.search('MAN%sEXT\s+=\s+(\w+)' % section, man)
398 if not m:401 if not m:
399 conf.check_message_2('not found', color='YELLOW')402 conf.end_msg('not found', color='YELLOW')
400 return403 return
401 ext = m.group(1)404 ext = m.group(1)
402 conf.check_message_2(ext)405 conf.end_msg(ext)
403 return ext406 return ext
404407
405 conf.check_message_2('ok')408 conf.end_msg('ok')
406 return True409 return True
407410
408411
@@ -416,7 +419,7 @@ def CHECK_COMMAND(conf, cmd, msg=None, define=None, on_target=True, boolean=Fals
416 if on_target:419 if on_target:
417 cmd.extend(conf.SAMBA_CROSS_ARGS(msg=msg))420 cmd.extend(conf.SAMBA_CROSS_ARGS(msg=msg))
418 try:421 try:
419 ret = Utils.cmd_output(cmd)422 ret = get_string(Utils.cmd_output(cmd))
420 except:423 except:
421 conf.COMPOUND_END(False)424 conf.COMPOUND_END(False)
422 return False425 return False
@@ -461,7 +464,7 @@ def CHECK_INLINE(conf):
461 ret = conf.CHECK_CODE('''464 ret = conf.CHECK_CODE('''
462 typedef int foo_t;465 typedef int foo_t;
463 static %s foo_t static_foo () {return 0; }466 static %s foo_t static_foo () {return 0; }
464 %s foo_t foo () {return 0; }''' % (i, i),467 %s foo_t foo () {return 0; }\n''' % (i, i),
465 define='INLINE_MACRO',468 define='INLINE_MACRO',
466 addmain=False,469 addmain=False,
467 link=False)470 link=False)
@@ -486,13 +489,13 @@ def CHECK_XSLTPROC_MANPAGES(conf):
486 return False489 return False
487490
488 s='http://docbook.sourceforge.net/release/xsl/current/manpages/docbook.xsl'491 s='http://docbook.sourceforge.net/release/xsl/current/manpages/docbook.xsl'
489 conf.CHECK_COMMAND('%s --nonet %s 2> /dev/null' % (conf.env.XSLTPROC, s),492 conf.CHECK_COMMAND('%s --nonet %s 2> /dev/null' % (conf.env.get_flat('XSLTPROC'), s),
490 msg='Checking for stylesheet %s' % s,493 msg='Checking for stylesheet %s' % s,
491 define='XSLTPROC_MANPAGES', on_target=False,494 define='XSLTPROC_MANPAGES', on_target=False,
492 boolean=True)495 boolean=True)
493 if not conf.CONFIG_SET('XSLTPROC_MANPAGES'):496 if not conf.CONFIG_SET('XSLTPROC_MANPAGES'):
494 print "A local copy of the docbook.xsl wasn't found on your system" \497 print("A local copy of the docbook.xsl wasn't found on your system" \
495 " consider installing package like docbook-xsl"498 " consider installing package like docbook-xsl")
496499
497#500#
498# Determine the standard libpath for the used compiler,501# Determine the standard libpath for the used compiler,
@@ -506,12 +509,12 @@ def CHECK_STANDARD_LIBPATH(conf):
506 # at least gcc and clang support this:509 # at least gcc and clang support this:
507 try:510 try:
508 cmd = conf.env.CC + ['-print-search-dirs']511 cmd = conf.env.CC + ['-print-search-dirs']
509 out = Utils.cmd_output(cmd).split('\n')512 out = get_string(Utils.cmd_output(cmd)).split('\n')
510 except ValueError:513 except ValueError:
511 # option not supported by compiler - use a standard list of directories514 # option not supported by compiler - use a standard list of directories
512 dirlist = [ '/usr/lib', '/usr/lib64' ]515 dirlist = [ '/usr/lib', '/usr/lib64' ]
513 except:516 except:
514 raise Utils.WafError('Unexpected error running "%s"' % (cmd))517 raise Errors.WafError('Unexpected error running "%s"' % (cmd))
515 else:518 else:
516 dirlist = []519 dirlist = []
517 for line in out:520 for line in out:
diff --git a/buildtools/wafsamba/samba_cross.py b/buildtools/wafsamba/samba_cross.py
index b8f2000..8863c2c 100644
--- a/buildtools/wafsamba/samba_cross.py
+++ b/buildtools/wafsamba/samba_cross.py
@@ -1,8 +1,9 @@
1# functions for handling cross-compilation1# functions for handling cross-compilation
22
3import os, sys, re, shlex3import os, sys, re, shlex
4import Utils, Logs, Options4from waflib import Utils, Logs, Options, Errors, Context
5from Configure import conf5from waflib.Configure import conf
6from wafsamba import samba_utils
67
7real_Popen = None8real_Popen = None
89
@@ -81,12 +82,12 @@ def cross_answer(ca_file, msg):
81 f.close()82 f.close()
82 return (int(m.group(1)), m.group(2))83 return (int(m.group(1)), m.group(2))
83 else:84 else:
84 raise Utils.WafError("Bad answer format '%s' in %s" % (line, ca_file))85 raise Errors.WafError("Bad answer format '%s' in %s" % (line, ca_file))
85 f.close()86 f.close()
86 return ANSWER_UNKNOWN87 return ANSWER_UNKNOWN
8788
8889
89class cross_Popen(Utils.pproc.Popen):90class cross_Popen(Utils.subprocess.Popen):
90 '''cross-compilation wrapper for Popen'''91 '''cross-compilation wrapper for Popen'''
91 def __init__(*k, **kw):92 def __init__(*k, **kw):
92 (obj, args) = k93 (obj, args) = k
@@ -118,10 +119,10 @@ class cross_Popen(Utils.pproc.Popen):
118 newargs.extend(args[0:i])119 newargs.extend(args[0:i])
119 if use_answers:120 if use_answers:
120 p = real_Popen(newargs,121 p = real_Popen(newargs,
121 stdout=Utils.pproc.PIPE,122 stdout=Utils.subprocess.PIPE,
122 stderr=Utils.pproc.PIPE)123 stderr=Utils.subprocess.PIPE)
123 ce_out, ce_err = p.communicate()124 ce_out, ce_err = p.communicate()
124 ans = (p.returncode, ce_out)125 ans = (p.returncode, samba_utils.get_string(ce_out))
125 add_answer(ca_file, msg, ans)126 add_answer(ca_file, msg, ans)
126 else:127 else:
127 args = newargs128 args = newargs
@@ -144,8 +145,8 @@ def SAMBA_CROSS_ARGS(conf, msg=None):
144145
145 global real_Popen146 global real_Popen
146 if real_Popen is None:147 if real_Popen is None:
147 real_Popen = Utils.pproc.Popen148 real_Popen = Utils.subprocess.Popen
148 Utils.pproc.Popen = cross_Popen149 Utils.subprocess.Popen = cross_Popen
149150
150 ret = []151 ret = []
151152
@@ -154,11 +155,11 @@ def SAMBA_CROSS_ARGS(conf, msg=None):
154155
155 if conf.env.CROSS_ANSWERS:156 if conf.env.CROSS_ANSWERS:
156 if msg is None:157 if msg is None:
157 raise Utils.WafError("Cannot have NULL msg in cross-answers")158 raise Errors.WafError("Cannot have NULL msg in cross-answers")
158 ret.extend(['--cross-answers', os.path.join(Options.launch_dir, conf.env.CROSS_ANSWERS), msg])159 ret.extend(['--cross-answers', os.path.join(Context.launch_dir, conf.env.CROSS_ANSWERS), msg])
159160
160 if ret == []:161 if ret == []:
161 raise Utils.WafError("Cannot cross-compile without either --cross-execute or --cross-answers")162 raise Errors.WafError("Cannot cross-compile without either --cross-execute or --cross-answers")
162163
163 return ret164 return ret
164165
@@ -167,5 +168,5 @@ def SAMBA_CROSS_CHECK_COMPLETE(conf):
167 '''check if we have some unanswered questions'''168 '''check if we have some unanswered questions'''
168 global cross_answers_incomplete169 global cross_answers_incomplete
169 if conf.env.CROSS_COMPILE and cross_answers_incomplete:170 if conf.env.CROSS_COMPILE and cross_answers_incomplete:
170 raise Utils.WafError("Cross answers file %s is incomplete" % conf.env.CROSS_ANSWERS)171 raise Errors.WafError("Cross answers file %s is incomplete" % conf.env.CROSS_ANSWERS)
171 return True172 return True
diff --git a/buildtools/wafsamba/samba_deps.py b/buildtools/wafsamba/samba_deps.py
index 978a5e9..f8c3880 100644
--- a/buildtools/wafsamba/samba_deps.py
+++ b/buildtools/wafsamba/samba_deps.py
@@ -2,9 +2,10 @@
22
3import os, sys, re, time3import os, sys, re, time
44
5import Build, Environment, Options, Logs, Utils5from waflib import Build, Options, Logs, Utils, Errors
6from Logs import debug6from waflib.Logs import debug
7from Configure import conf7from waflib.Configure import conf
8from waflib import ConfigSet
89
9from samba_bundled import BUILTIN_LIBRARY10from samba_bundled import BUILTIN_LIBRARY
10from samba_utils import LOCAL_CACHE, TO_LIST, get_tgt_list, unique_list, os_path_relpath11from samba_utils import LOCAL_CACHE, TO_LIST, get_tgt_list, unique_list, os_path_relpath
@@ -85,7 +86,7 @@ def build_dependencies(self):
85 # extra link flags from pkg_config86 # extra link flags from pkg_config
86 libs = self.final_syslibs.copy()87 libs = self.final_syslibs.copy()
8788
88 (ccflags, ldflags, cpppath) = library_flags(self, list(libs))89 (cflags, ldflags, cpppath) = library_flags(self, list(libs))
89 new_ldflags = getattr(self, 'samba_ldflags', [])[:]90 new_ldflags = getattr(self, 'samba_ldflags', [])[:]
90 new_ldflags.extend(ldflags)91 new_ldflags.extend(ldflags)
91 self.ldflags = new_ldflags92 self.ldflags = new_ldflags
@@ -102,7 +103,7 @@ def build_dependencies(self):
102 self.sname, self.uselib, self.uselib_local, self.add_objects)103 self.sname, self.uselib, self.uselib_local, self.add_objects)
103104
104 if self.samba_type in ['SUBSYSTEM']:105 if self.samba_type in ['SUBSYSTEM']:
105 # this is needed for the ccflags of libs that come from pkg_config106 # this is needed for the cflags of libs that come from pkg_config
106 self.uselib = list(self.final_syslibs)107 self.uselib = list(self.final_syslibs)
107 self.uselib.extend(list(self.direct_syslibs))108 self.uselib.extend(list(self.direct_syslibs))
108 for lib in self.final_libs:109 for lib in self.final_libs:
@@ -235,7 +236,7 @@ def add_init_functions(self):
235 if sentinel == 'NULL':236 if sentinel == 'NULL':
236 proto = "extern void __%s_dummy_module_proto(void)" % (sname)237 proto = "extern void __%s_dummy_module_proto(void)" % (sname)
237 cflags.append('-DSTATIC_%s_MODULES_PROTO=%s' % (sname, proto))238 cflags.append('-DSTATIC_%s_MODULES_PROTO=%s' % (sname, proto))
238 self.ccflags = cflags239 self.cflags = cflags
239 return240 return
240241
241 for m in modules:242 for m in modules:
@@ -257,7 +258,7 @@ def add_init_functions(self):
257 proto += '_MODULE_PROTO(%s)' % f258 proto += '_MODULE_PROTO(%s)' % f
258 proto += "extern void __%s_dummy_module_proto(void)" % (m)259 proto += "extern void __%s_dummy_module_proto(void)" % (m)
259 cflags.append('-DSTATIC_%s_MODULES_PROTO=%s' % (m, proto))260 cflags.append('-DSTATIC_%s_MODULES_PROTO=%s' % (m, proto))
260 self.ccflags = cflags261 self.cflags = cflags
261262
262263
263def check_duplicate_sources(bld, tgt_list):264def check_duplicate_sources(bld, tgt_list):
@@ -271,6 +272,9 @@ def check_duplicate_sources(bld, tgt_list):
271 tpath = os.path.normpath(os_path_relpath(t.path.abspath(bld.env), t.env.BUILD_DIRECTORY + '/default'))272 tpath = os.path.normpath(os_path_relpath(t.path.abspath(bld.env), t.env.BUILD_DIRECTORY + '/default'))
272 obj_sources = set()273 obj_sources = set()
273 for s in source_list:274 for s in source_list:
275 if not isinstance(s, str):
276 print('strange path in check_duplicate_sources %r' % s)
277 s = s.abspath()
274 p = os.path.normpath(os.path.join(tpath, s))278 p = os.path.normpath(os.path.join(tpath, s))
275 if p in obj_sources:279 if p in obj_sources:
276 Logs.error("ERROR: source %s appears twice in target '%s'" % (p, t.sname))280 Logs.error("ERROR: source %s appears twice in target '%s'" % (p, t.sname))
@@ -299,7 +303,7 @@ def check_duplicate_sources(bld, tgt_list):
299 Logs.warn("WARNING: source %s is in more than one target: %s" % (s, subsystems[s].keys()))303 Logs.warn("WARNING: source %s is in more than one target: %s" % (s, subsystems[s].keys()))
300 for tname in subsystems[s]:304 for tname in subsystems[s]:
301 if len(subsystems[s][tname]) > 1:305 if len(subsystems[s][tname]) > 1:
302 raise Utils.WafError("ERROR: source %s is in more than one subsystem of target '%s': %s" % (s, tname, subsystems[s][tname]))306 raise Errors.WafError("ERROR: source %s is in more than one subsystem of target '%s': %s" % (s, tname, subsystems[s][tname]))
303307
304 return True308 return True
305309
@@ -372,7 +376,7 @@ def add_samba_attributes(bld, tgt_list):
372 t.samba_abspath = t.path.abspath(bld.env)376 t.samba_abspath = t.path.abspath(bld.env)
373 t.samba_deps_extended = t.samba_deps[:]377 t.samba_deps_extended = t.samba_deps[:]
374 t.samba_includes_extended = TO_LIST(t.samba_includes)[:]378 t.samba_includes_extended = TO_LIST(t.samba_includes)[:]
375 t.ccflags = getattr(t, 'samba_cflags', '')379 t.cflags = getattr(t, 'samba_cflags', '')
376380
377def replace_grouping_libraries(bld, tgt_list):381def replace_grouping_libraries(bld, tgt_list):
378 '''replace dependencies based on grouping libraries382 '''replace dependencies based on grouping libraries
@@ -715,6 +719,11 @@ def reduce_objects(bld, tgt_list):
715 if t.sname in rely_on:719 if t.sname in rely_on:
716 dup = dup.difference(rely_on[t.sname])720 dup = dup.difference(rely_on[t.sname])
717 if dup:721 if dup:
722 # Do not remove duplicates of BUILTINS
723 d = next(iter(dup))
724 if BUILTIN_LIBRARY(bld, d):
725 continue
726
718 debug('deps: removing dups from %s of type %s: %s also in %s %s',727 debug('deps: removing dups from %s of type %s: %s also in %s %s',
719 t.sname, t.samba_type, dup, t2.samba_type, l)728 t.sname, t.samba_type, dup, t2.samba_type, l)
720 new = new.difference(dup)729 new = new.difference(dup)
@@ -951,7 +960,7 @@ savedeps_inputs = ['samba_deps', 'samba_includes', 'local_include', 'local_incl
951 'source', 'grouping_library', 'samba_ldflags', 'allow_undefined_symbols',960 'source', 'grouping_library', 'samba_ldflags', 'allow_undefined_symbols',
952 'use_global_deps', 'global_include' ]961 'use_global_deps', 'global_include' ]
953savedeps_outputs = ['uselib', 'uselib_local', 'add_objects', 'includes',962savedeps_outputs = ['uselib', 'uselib_local', 'add_objects', 'includes',
954 'ccflags', 'ldflags', 'samba_deps_extended', 'final_libs']963 'cflags', 'ldflags', 'samba_deps_extended', 'final_libs']
955savedeps_outenv = ['INC_PATHS']964savedeps_outenv = ['INC_PATHS']
956savedeps_envvars = ['NONSHARED_BINARIES', 'GLOBAL_DEPENDENCIES', 'EXTRA_CFLAGS', 'EXTRA_LDFLAGS', 'EXTRA_INCLUDES' ]965savedeps_envvars = ['NONSHARED_BINARIES', 'GLOBAL_DEPENDENCIES', 'EXTRA_CFLAGS', 'EXTRA_LDFLAGS', 'EXTRA_INCLUDES' ]
957savedeps_caches = ['GLOBAL_DEPENDENCIES', 'TARGET_TYPE', 'INIT_FUNCTIONS', 'SYSLIB_DEPS']966savedeps_caches = ['GLOBAL_DEPENDENCIES', 'TARGET_TYPE', 'INIT_FUNCTIONS', 'SYSLIB_DEPS']
@@ -960,7 +969,7 @@ savedeps_files = ['buildtools/wafsamba/samba_deps.py']
960def save_samba_deps(bld, tgt_list):969def save_samba_deps(bld, tgt_list):
961 '''save the dependency calculations between builds, to make970 '''save the dependency calculations between builds, to make
962 further builds faster'''971 further builds faster'''
963 denv = Environment.Environment()972 denv = ConfigSet.ConfigSet()
964973
965 denv.version = savedeps_version974 denv.version = savedeps_version
966 denv.savedeps_inputs = savedeps_inputs975 denv.savedeps_inputs = savedeps_inputs
@@ -1007,15 +1016,15 @@ def save_samba_deps(bld, tgt_list):
1007 if tdeps != {}:1016 if tdeps != {}:
1008 denv.outenv[t.sname] = tdeps1017 denv.outenv[t.sname] = tdeps
10091018
1010 depsfile = os.path.join(bld.bdir, "sambadeps")1019 depsfile = os.path.join(bld.cache_dir, "sambadeps")
1011 denv.store_fast(depsfile)1020 denv.store_fast(depsfile)
10121021
10131022
10141023
1015def load_samba_deps(bld, tgt_list):1024def load_samba_deps(bld, tgt_list):
1016 '''load a previous set of build dependencies if possible'''1025 '''load a previous set of build dependencies if possible'''
1017 depsfile = os.path.join(bld.bdir, "sambadeps")1026 depsfile = os.path.join(bld.cache_dir, "sambadeps")
1018 denv = Environment.Environment()1027 denv = ConfigSet.ConfigSet()
1019 try:1028 try:
1020 debug('deps: checking saved dependencies')1029 debug('deps: checking saved dependencies')
1021 denv.load_fast(depsfile)1030 denv.load_fast(depsfile)
diff --git a/buildtools/wafsamba/samba_dist.py b/buildtools/wafsamba/samba_dist.py
index 8d51632..6af7bb4 100644
--- a/buildtools/wafsamba/samba_dist.py
+++ b/buildtools/wafsamba/samba_dist.py
@@ -2,13 +2,41 @@
2# uses git ls-files to get file lists2# uses git ls-files to get file lists
33
4import os, sys, tarfile4import os, sys, tarfile
5import Utils, Scripting, Logs, Options5from waflib import Utils, Scripting, Logs, Options
6from Configure import conf6from waflib.Configure import conf
7from samba_utils import os_path_relpath7from samba_utils import os_path_relpath, get_string
8from waflib import Context
89
9dist_dirs = None10dist_dirs = None
10dist_files = None11dist_files = None
11dist_blacklist = ""12dist_blacklist = ""
13dist_archive = None
14
15class Dist(Context.Context):
16 # TODO remove
17 cmd = 'dist'
18 fun = 'dist'
19 def execute(self):
20 Context.g_module.dist()
21
22class DistCheck(Scripting.DistCheck):
23 fun = 'distcheck'
24 cmd = 'distcheck'
25 def execute(self):
26 Options.options.distcheck_args = ''
27 if Context.g_module.distcheck is Scripting.distcheck:
28 # default
29 Context.g_module.distcheck(self)
30 else:
31 Context.g_module.distcheck()
32 Context.g_module.dist()
33 self.check()
34 def get_arch_name(self):
35 global dist_archive
36 return dist_archive
37 def make_distcheck_cmd(self, tmpdir):
38 waf = os.path.abspath(sys.argv[0])
39 return [sys.executable, waf, 'configure', 'build', 'install', 'uninstall', '--destdir=' + tmpdir]
1240
13def add_symlink(tar, fname, abspath, basedir):41def add_symlink(tar, fname, abspath, basedir):
14 '''handle symlinks to directories that may move during packaging'''42 '''handle symlinks to directories that may move during packaging'''
@@ -69,7 +97,7 @@ def add_tarfile(tar, fname, abspath, basedir):
69 tinfo.gid = 097 tinfo.gid = 0
70 tinfo.uname = 'root'98 tinfo.uname = 'root'
71 tinfo.gname = 'root'99 tinfo.gname = 'root'
72 fh = open(abspath)100 fh = open(abspath, "rb")
73 tar.addfile(tinfo, fileobj=fh)101 tar.addfile(tinfo, fileobj=fh)
74 fh.close()102 fh.close()
75103
@@ -91,7 +119,7 @@ def vcs_dir_contents(path):
91 repo = os.path.dirname(repo)119 repo = os.path.dirname(repo)
92 if repo == "/":120 if repo == "/":
93 raise Exception("unsupported or no vcs for %s" % path)121 raise Exception("unsupported or no vcs for %s" % path)
94 return Utils.cmd_output(ls_files_cmd, cwd=cwd, env=env).split()122 return get_string(Utils.cmd_output(ls_files_cmd, cwd=cwd, env=env)).split('\n')
95123
96124
97def dist(appname='', version=''):125def dist(appname='', version=''):
@@ -136,12 +164,14 @@ def dist(appname='', version=''):
136164
137 if not isinstance(appname, str) or not appname:165 if not isinstance(appname, str) or not appname:
138 # this copes with a mismatch in the calling arguments for dist()166 # this copes with a mismatch in the calling arguments for dist()
139 appname = Utils.g_module.APPNAME167 appname = Context.g_module.APPNAME
140 version = Utils.g_module.VERSION168 version = Context.g_module.VERSION
141 if not version:169 if not version:
142 version = Utils.g_module.VERSION170 version = Context.g_module.VERSION
143171
144 srcdir = os.path.normpath(os.path.join(os.path.dirname(Utils.g_module.root_path), Utils.g_module.srcdir))172 srcdir = os.path.normpath(
173 os.path.join(os.path.dirname(Context.g_module.root_path),
174 Context.g_module.top))
145175
146 if not dist_dirs:176 if not dist_dirs:
147 Logs.error('You must use samba_dist.DIST_DIRS() to set which directories to package')177 Logs.error('You must use samba_dist.DIST_DIRS() to set which directories to package')
@@ -218,6 +248,9 @@ def dist(appname='', version=''):
218 else:248 else:
219 Logs.info('Created %s' % dist_name)249 Logs.info('Created %s' % dist_name)
220250
251 # TODO use the ctx object instead
252 global dist_archive
253 dist_archive = dist_name
221 return dist_name254 return dist_name
222255
223256
diff --git a/buildtools/wafsamba/samba_git.py b/buildtools/wafsamba/samba_git.py
index c58a579..09a204f 100644
--- a/buildtools/wafsamba/samba_git.py
+++ b/buildtools/wafsamba/samba_git.py
@@ -4,7 +4,7 @@ import subprocess
4def find_git(env=None):4def find_git(env=None):
5 """Find the git binary."""5 """Find the git binary."""
6 if env is not None and 'GIT' in env:6 if env is not None and 'GIT' in env:
7 return env['GIT']7 return env.get_flat('GIT')
88
9 # Get version from GIT9 # Get version from GIT
10 if os.path.exists("/usr/bin/git"):10 if os.path.exists("/usr/bin/git"):
diff --git a/buildtools/wafsamba/samba_headers.py b/buildtools/wafsamba/samba_headers.py
index 0a80082..a268c01 100644
--- a/buildtools/wafsamba/samba_headers.py
+++ b/buildtools/wafsamba/samba_headers.py
@@ -1,7 +1,7 @@
1# specialist handling of header files for Samba1# specialist handling of header files for Samba
22
3import os, re, sys, fnmatch3import os, re, sys, fnmatch
4import Build, Logs, Utils4from waflib import Build, Logs, Utils, Errors
5from samba_utils import TO_LIST, os_path_relpath5from samba_utils import TO_LIST, os_path_relpath
66
77
@@ -99,7 +99,7 @@ def create_public_header(task):
99 os.unlink(tgt)99 os.unlink(tgt)
100 sys.stderr.write("%s:%u:Error: unable to resolve public header %s (maybe try one of %s)\n" % (100 sys.stderr.write("%s:%u:Error: unable to resolve public header %s (maybe try one of %s)\n" % (
101 os.path.relpath(src, os.getcwd()), linenumber, hpath, suggested))101 os.path.relpath(src, os.getcwd()), linenumber, hpath, suggested))
102 raise Utils.WafError("Unable to resolve header path '%s' in public header '%s' in directory %s" % (102 raise Errors.WafError("Unable to resolve header path '%s' in public header '%s' in directory %s" % (
103 hpath, relsrc, task.env.RELPATH))103 hpath, relsrc, task.env.RELPATH))
104 infile.close()104 infile.close()
105 outfile.close()105 outfile.close()
@@ -148,11 +148,12 @@ def PUBLIC_HEADERS(bld, public_headers, header_path=None, public_headers_install
148 else:148 else:
149 h_name = h149 h_name = h
150 inst_name = os.path.basename(h)150 inst_name = os.path.basename(h)
151 relpath1 = os_path_relpath(bld.srcnode.abspath(), bld.curdir)151 curdir = bld.path.abspath()
152 relpath2 = os_path_relpath(bld.curdir, bld.srcnode.abspath())152 relpath1 = os_path_relpath(bld.srcnode.abspath(), curdir)
153 relpath2 = os_path_relpath(curdir, bld.srcnode.abspath())
153 targetdir = os.path.normpath(os.path.join(relpath1, bld.env.build_public_headers, inst_path))154 targetdir = os.path.normpath(os.path.join(relpath1, bld.env.build_public_headers, inst_path))
154 if not os.path.exists(os.path.join(bld.curdir, targetdir)):155 if not os.path.exists(os.path.join(curdir, targetdir)):
155 raise Utils.WafError("missing source directory %s for public header %s" % (targetdir, inst_name))156 raise Errors.WafError("missing source directory %s for public header %s" % (targetdir, inst_name))
156 target = os.path.join(targetdir, inst_name)157 target = os.path.join(targetdir, inst_name)
157158
158 # the source path of the header, relative to the top of the source tree159 # the source path of the header, relative to the top of the source tree
diff --git a/buildtools/wafsamba/samba_install.py b/buildtools/wafsamba/samba_install.py
index 21035bf..47bc0cb 100644
--- a/buildtools/wafsamba/samba_install.py
+++ b/buildtools/wafsamba/samba_install.py
@@ -4,8 +4,8 @@
4# library use4# library use
55
6import os6import os
7import Utils7from waflib import Utils, Errors
8from TaskGen import feature, before, after8from waflib.TaskGen import feature, before, after
9from samba_utils import LIB_PATH, MODE_755, install_rpath, build_rpath9from samba_utils import LIB_PATH, MODE_755, install_rpath, build_rpath
1010
11@feature('install_bin')11@feature('install_bin')
@@ -45,7 +45,7 @@ def install_binary(self):
4545
46 # tell waf to install the right binary46 # tell waf to install the right binary
47 bld.install_as(os.path.join(install_path, orig_target),47 bld.install_as(os.path.join(install_path, orig_target),
48 os.path.join(self.path.abspath(bld.env), self.target),48 self.path.find_or_declare(self.target),
49 chmod=MODE_755)49 chmod=MODE_755)
5050
5151
@@ -143,8 +143,9 @@ def install_library(self):
143143
144 # tell waf to install the library144 # tell waf to install the library
145 bld.install_as(os.path.join(install_path, install_name),145 bld.install_as(os.path.join(install_path, install_name),
146 os.path.join(self.path.abspath(bld.env), inst_name),146 self.path.find_or_declare(inst_name),
147 chmod=MODE_755)147 chmod=MODE_755)
148
148 if install_link and install_link != install_name:149 if install_link and install_link != install_name:
149 # and the symlink if needed150 # and the symlink if needed
150 bld.symlink_as(os.path.join(install_path, install_link), os.path.basename(install_name))151 bld.symlink_as(os.path.join(install_path, install_link), os.path.basename(install_name))
@@ -227,7 +228,7 @@ def symlink_bin(self):
227 return228 return
228229
229 if not self.link_task.outputs or not self.link_task.outputs[0]:230 if not self.link_task.outputs or not self.link_task.outputs[0]:
230 raise Utils.WafError('no outputs found for %s in symlink_bin' % self.name)231 raise Errors.WafError('no outputs found for %s in symlink_bin' % self.name)
231 binpath = self.link_task.outputs[0].abspath(self.env)232 binpath = self.link_task.outputs[0].abspath(self.env)
232 bldpath = os.path.join(self.bld.env.BUILD_DIRECTORY, self.link_task.outputs[0].name)233 bldpath = os.path.join(self.bld.env.BUILD_DIRECTORY, self.link_task.outputs[0].name)
233234
diff --git a/buildtools/wafsamba/samba_optimisation.py b/buildtools/wafsamba/samba_optimisation.py
234deleted file mode 100644235deleted file mode 100644
index 5008f83..0000000
--- a/buildtools/wafsamba/samba_optimisation.py
+++ /dev/null
@@ -1,269 +0,0 @@
1# This file contains waf optimisations for Samba
2
3# most of these optimisations are possible because of the restricted build environment
4# that Samba has. For example, Samba doesn't attempt to cope with Win32 paths during the
5# build, and Samba doesn't need build varients
6
7# overall this makes some build tasks quite a bit faster
8
9import os
10import Build, Utils, Node
11from TaskGen import feature, after, before
12import preproc
13
14@feature('c', 'cc', 'cxx')
15@after('apply_type_vars', 'apply_lib_vars', 'apply_core')
16def apply_incpaths(self):
17 lst = []
18
19 try:
20 kak = self.bld.kak
21 except AttributeError:
22 kak = self.bld.kak = {}
23
24 # TODO move the uselib processing out of here
25 for lib in self.to_list(self.uselib):
26 for path in self.env['CPPPATH_' + lib]:
27 if not path in lst:
28 lst.append(path)
29 if preproc.go_absolute:
30 for path in preproc.standard_includes:
31 if not path in lst:
32 lst.append(path)
33
34 for path in self.to_list(self.includes):
35 if not path in lst:
36 if preproc.go_absolute or path[0] != '/': # os.path.isabs(path):
37 lst.append(path)
38 else:
39 self.env.prepend_value('CPPPATH', path)
40
41 for path in lst:
42 node = None
43 if path[0] == '/': # os.path.isabs(path):
44 if preproc.go_absolute:
45 node = self.bld.root.find_dir(path)
46 elif path[0] == '#':
47 node = self.bld.srcnode
48 if len(path) > 1:
49 try:
50 node = kak[path]
51 except KeyError:
52 kak[path] = node = node.find_dir(path[1:])
53 else:
54 try:
55 node = kak[(self.path.id, path)]
56 except KeyError:
57 kak[(self.path.id, path)] = node = self.path.find_dir(path)
58
59 if node:
60 self.env.append_value('INC_PATHS', node)
61
62@feature('c', 'cc')
63@after('apply_incpaths')
64def apply_obj_vars_cc(self):
65 """after apply_incpaths for INC_PATHS"""
66 env = self.env
67 app = env.append_unique
68 cpppath_st = env['CPPPATH_ST']
69
70 lss = env['_CCINCFLAGS']
71
72 try:
73 cac = self.bld.cac
74 except AttributeError:
75 cac = self.bld.cac = {}
76
77 # local flags come first
78 # set the user-defined includes paths
79 for i in env['INC_PATHS']:
80
81 try:
82 lss.extend(cac[i.id])
83 except KeyError:
84
85 cac[i.id] = [cpppath_st % i.bldpath(env), cpppath_st % i.srcpath(env)]
86 lss.extend(cac[i.id])
87
88 env['_CCINCFLAGS'] = lss
89 # set the library include paths
90 for i in env['CPPPATH']:
91 app('_CCINCFLAGS', cpppath_st % i)
92
93import Node, Environment
94
95def vari(self):
96 return "default"
97Environment.Environment.variant = vari
98
99def variant(self, env):
100 if not env: return 0
101 elif self.id & 3 == Node.FILE: return 0
102 else: return "default"
103Node.Node.variant = variant
104
105
106import TaskGen, Task
107
108def create_task(self, name, src=None, tgt=None):
109 task = Task.TaskBase.classes[name](self.env, generator=self)
110 if src:
111 task.set_inputs(src)
112 if tgt:
113 task.set_outputs(tgt)
114 return task
115TaskGen.task_gen.create_task = create_task
116
117def hash_constraints(self):
118 a = self.attr
119 sum = hash((str(a('before', '')),
120 str(a('after', '')),
121 str(a('ext_in', '')),
122 str(a('ext_out', '')),
123 self.__class__.maxjobs))
124 return sum
125Task.TaskBase.hash_constraints = hash_constraints
126
127def hash_env_vars(self, env, vars_lst):
128 idx = str(id(env)) + str(vars_lst)
129 try:
130 return self.cache_sig_vars[idx]
131 except KeyError:
132 pass
133
134 m = Utils.md5()
135 m.update(''.join([str(env[a]) for a in vars_lst]))
136
137 ret = self.cache_sig_vars[idx] = m.digest()
138 return ret
139Build.BuildContext.hash_env_vars = hash_env_vars
140
141
142def store_fast(self, filename):
143 file = open(filename, 'wb')
144 data = self.get_merged_dict()
145 try:
146 Build.cPickle.dump(data, file, -1)
147 finally:
148 file.close()
149Environment.Environment.store_fast = store_fast
150
151def load_fast(self, filename):
152 file = open(filename, 'rb')
153 try:
154 data = Build.cPickle.load(file)
155 finally:
156 file.close()
157 self.table.update(data)
158Environment.Environment.load_fast = load_fast
159
160def is_this_a_static_lib(self, name):
161 try:
162 cache = self.cache_is_this_a_static_lib
163 except AttributeError:
164 cache = self.cache_is_this_a_static_lib = {}
165 try:
166 return cache[name]
167 except KeyError:
168 ret = cache[name] = 'cstaticlib' in self.bld.get_tgen_by_name(name).features
169 return ret
170TaskGen.task_gen.is_this_a_static_lib = is_this_a_static_lib
171
172def shared_ancestors(self):
173 try:
174 cache = self.cache_is_this_a_static_lib
175 except AttributeError:
176 cache = self.cache_is_this_a_static_lib = {}
177 try:
178 return cache[id(self)]
179 except KeyError:
180
181 ret = []
182 if 'cshlib' in self.features: # or 'cprogram' in self.features:
183 if getattr(self, 'uselib_local', None):
184 lst = self.to_list(self.uselib_local)
185 ret = [x for x in lst if not self.is_this_a_static_lib(x)]
186 cache[id(self)] = ret
187 return ret
188TaskGen.task_gen.shared_ancestors = shared_ancestors
189
190@feature('c', 'cc', 'cxx')
191@after('apply_link', 'init_cc', 'init_cxx', 'apply_core')
192def apply_lib_vars(self):
193 """after apply_link because of 'link_task'
194 after default_cc because of the attribute 'uselib'"""
195
196 # after 'apply_core' in case if 'cc' if there is no link
197
198 env = self.env
199 app = env.append_value
200 seen_libpaths = set([])
201
202 # OPTIMIZATION 1: skip uselib variables already added (700ms)
203 seen_uselib = set([])
204
205 # 1. the case of the libs defined in the project (visit ancestors first)
206 # the ancestors external libraries (uselib) will be prepended
207 self.uselib = self.to_list(self.uselib)
208 names = self.to_list(self.uselib_local)
209
210 seen = set([])
211 tmp = Utils.deque(names) # consume a copy of the list of names
212 while tmp:
213 lib_name = tmp.popleft()
214 # visit dependencies only once
215 if lib_name in seen:
216 continue
217
218 y = self.get_tgen_by_name(lib_name)
219 if not y:
220 raise Utils.WafError('object %r was not found in uselib_local (required by %r)' % (lib_name, self.name))
221 y.post()
222 seen.add(lib_name)
223
224 # OPTIMIZATION 2: pre-compute ancestors shared libraries (100ms)
225 tmp.extend(y.shared_ancestors())
226
227 # link task and flags
228 if getattr(y, 'link_task', None):
229
230 link_name = y.target[y.target.rfind('/') + 1:]
231 if 'cstaticlib' in y.features:
232 app('STATICLIB', link_name)
233 elif 'cshlib' in y.features or 'cprogram' in y.features:
234 # WARNING some linkers can link against programs
235 app('LIB', link_name)
236
237 # the order
238 self.link_task.set_run_after(y.link_task)
239
240 # for the recompilation
241 dep_nodes = getattr(self.link_task, 'dep_nodes', [])
242 self.link_task.dep_nodes = dep_nodes + y.link_task.outputs
243
244 # OPTIMIZATION 3: reduce the amount of function calls
245 # add the link path too
246 par = y.link_task.outputs[0].parent
247 if id(par) not in seen_libpaths:
248 seen_libpaths.add(id(par))
249 tmp_path = par.bldpath(self.env)
250 if not tmp_path in env['LIBPATH']:
251 env.prepend_value('LIBPATH', tmp_path)
252
253
254 # add ancestors uselib too - but only propagate those that have no staticlib
255 for v in self.to_list(y.uselib):
256 if v not in seen_uselib:
257 seen_uselib.add(v)
258 if not env['STATICLIB_' + v]:
259 if not v in self.uselib:
260 self.uselib.insert(0, v)
261
262 # 2. the case of the libs defined outside
263 for x in self.uselib:
264 for v in self.p_flag_vars:
265 val = self.env[v + '_' + x]
266 if val:
267 self.env.append_value(v, val)
268
269
diff --git a/buildtools/wafsamba/samba_patterns.py b/buildtools/wafsamba/samba_patterns.py
index 2b93937..d0fe965 100644
--- a/buildtools/wafsamba/samba_patterns.py
+++ b/buildtools/wafsamba/samba_patterns.py
@@ -1,6 +1,7 @@
1# a waf tool to add extension based build patterns for Samba1# a waf tool to add extension based build patterns for Samba
22
3import Build3import sys
4from waflib import Build
4from wafsamba import samba_version_file5from wafsamba import samba_version_file
56
6def write_version_header(task):7def write_version_header(task):
@@ -146,13 +147,19 @@ def write_build_options_section(fp, keys, section):
146 fp.write("\n")147 fp.write("\n")
147148
148def write_build_options(task):149def write_build_options(task):
149 tbl = task.env['defines']150 tbl = task.env
150 keys_option_with = []151 keys_option_with = []
151 keys_option_utmp = []152 keys_option_utmp = []
152 keys_option_have = []153 keys_option_have = []
153 keys_header_sys = []154 keys_header_sys = []
154 keys_header_other = []155 keys_header_other = []
155 keys_misc = []156 keys_misc = []
157 if sys.hexversion>0x300000f:
158 trans_table = bytes.maketrans(b'.-()', b'____')
159 else:
160 import string
161 trans_table = string.maketrans('.-()', '____')
162
156 for key in tbl:163 for key in tbl:
157 if key.startswith("HAVE_UT_UT_") or key.find("UTMP") >= 0:164 if key.startswith("HAVE_UT_UT_") or key.find("UTMP") >= 0:
158 keys_option_utmp.append(key)165 keys_option_utmp.append(key)
@@ -169,7 +176,7 @@ def write_build_options(task):
169 l = key.split("(")176 l = key.split("(")
170 keys_misc.append(l[0])177 keys_misc.append(l[0])
171 else:178 else:
172 keys_misc.append(key)179 keys_misc.append(key.translate(trans_table))
173180
174 tgt = task.outputs[0].bldpath(task.env)181 tgt = task.outputs[0].bldpath(task.env)
175 f = open(tgt, 'w')182 f = open(tgt, 'w')
diff --git a/buildtools/wafsamba/samba_perl.py b/buildtools/wafsamba/samba_perl.py
index 2e9a53a..e019acb 100644
--- a/buildtools/wafsamba/samba_perl.py
+++ b/buildtools/wafsamba/samba_perl.py
@@ -1,6 +1,6 @@
1import Utils1from waflib import Utils
2from Configure import conf2from waflib.Configure import conf
33from samba_utils import get_string
4done = {}4done = {}
55
6@conf6@conf
@@ -9,13 +9,16 @@ def SAMBA_CHECK_PERL(conf, mandatory=True, version=(5,0,0)):
9 return9 return
10 done["done"] = True10 done["done"] = True
11 conf.find_program('perl', var='PERL', mandatory=mandatory)11 conf.find_program('perl', var='PERL', mandatory=mandatory)
12 conf.check_tool('perl')12 conf.load('perl')
13 path_perl = conf.find_program('perl')13 path_perl = conf.find_program('perl')
14 conf.env.PERL_SPECIFIED = (conf.env.PERL != path_perl)14 conf.env.PERL_SPECIFIED = (conf.env.PERL != path_perl)
15 conf.check_perl_version(version)15 conf.check_perl_version(version)
1616
17 def read_perl_config_var(cmd):17 def read_perl_config_var(cmd):
18 return Utils.to_list(Utils.cmd_output([conf.env.PERL, '-MConfig', '-e', cmd]))18 output = Utils.cmd_output([conf.env.get_flat('PERL'), '-MConfig', '-e', cmd])
19 if not isinstance(output, str):
20 output = get_string(output)
21 return Utils.to_list(output)
1922
20 def check_perl_config_var(var):23 def check_perl_config_var(var):
21 conf.start_msg("Checking for perl $Config{%s}:" % var)24 conf.start_msg("Checking for perl $Config{%s}:" % var)
diff --git a/buildtools/wafsamba/samba_pidl.py b/buildtools/wafsamba/samba_pidl.py
index 9651e4d..3fecfa9 100644
--- a/buildtools/wafsamba/samba_pidl.py
+++ b/buildtools/wafsamba/samba_pidl.py
@@ -1,8 +1,8 @@
1# waf build tool for building IDL files with pidl1# waf build tool for building IDL files with pidl
22
3import os3import os
4import Build4from waflib import Build, Utils
5from TaskGen import feature, before5from waflib.TaskGen import feature, before
6from samba_utils import SET_TARGET_TYPE, TO_LIST, LOCAL_CACHE6from samba_utils import SET_TARGET_TYPE, TO_LIST, LOCAL_CACHE
77
8def SAMBA_PIDL(bld, pname, source,8def SAMBA_PIDL(bld, pname, source,
@@ -76,9 +76,9 @@ def SAMBA_PIDL(bld, pname, source,
76 else:76 else:
77 cc = 'CC="%s"' % bld.CONFIG_GET("CC")77 cc = 'CC="%s"' % bld.CONFIG_GET("CC")
7878
79 t = bld(rule='cd .. && %s %s ${PERL} "${PIDL}" --quiet ${OPTIONS} --outputdir ${OUTPUTDIR} -- "${SRC[0].abspath(env)}"' % (cpp, cc),79 t = bld(rule='cd ${PIDL_LAUNCH_DIR} && %s %s ${PERL} ${PIDL} --quiet ${OPTIONS} --outputdir ${OUTPUTDIR} -- "${IDLSRC}"' % (cpp, cc),
80 ext_out = '.c',80 ext_out = '.c',
81 before = 'cc',81 before = 'c',
82 update_outputs = True,82 update_outputs = True,
83 shell = True,83 shell = True,
84 source = source,84 source = source,
@@ -86,18 +86,22 @@ def SAMBA_PIDL(bld, pname, source,
86 name = name,86 name = name,
87 samba_type = 'PIDL')87 samba_type = 'PIDL')
8888
89 # prime the list of nodes we are dependent on with the cached pidl sources
90 t.allnodes = pidl_src_nodes
9189
92 t.env.PIDL = os.path.join(bld.srcnode.abspath(), 'pidl/pidl')90 t.env.PIDL_LAUNCH_DIR = bld.srcnode.path_from(bld.bldnode)
91 pnode = bld.srcnode.find_resource('pidl/pidl')
92 t.env.PIDL = pnode.path_from(bld.srcnode)
93 t.env.OPTIONS = TO_LIST(options)93 t.env.OPTIONS = TO_LIST(options)
94 t.env.OUTPUTDIR = bld.bldnode.name + '/' + bld.path.find_dir(output_dir).bldpath(t.env)94 snode = t.path.find_resource(source[0])
95 t.env.IDLSRC = snode.path_from(bld.srcnode)
96 t.env.OUTPUTDIR = bld.bldnode.path_from(bld.srcnode) + '/' + bld.path.find_dir(output_dir).path_from(bld.srcnode)
97
98 bld.add_manual_dependency(snode, pidl_src_nodes)
9599
96 if generate_tables and table_header_idx is not None:100 if generate_tables and table_header_idx is not None:
97 pidl_headers = LOCAL_CACHE(bld, 'PIDL_HEADERS')101 pidl_headers = LOCAL_CACHE(bld, 'PIDL_HEADERS')
98 pidl_headers[name] = [bld.path.find_or_declare(out_files[table_header_idx])]102 pidl_headers[name] = [bld.path.find_or_declare(out_files[table_header_idx])]
99103
100 t.more_includes = '#' + bld.path.relpath_gen(bld.srcnode)104 t.more_includes = '#' + bld.path.path_from(bld.srcnode)
101Build.BuildContext.SAMBA_PIDL = SAMBA_PIDL105Build.BuildContext.SAMBA_PIDL = SAMBA_PIDL
102106
103107
@@ -117,13 +121,15 @@ Build.BuildContext.SAMBA_PIDL_LIST = SAMBA_PIDL_LIST
117@before('exec_rule')121@before('exec_rule')
118def collect(self):122def collect(self):
119 pidl_headers = LOCAL_CACHE(self.bld, 'PIDL_HEADERS')123 pidl_headers = LOCAL_CACHE(self.bld, 'PIDL_HEADERS')
124 # The first source is tables.pl itself
125 self.source = Utils.to_list(self.source)
120 for (name, hd) in pidl_headers.items():126 for (name, hd) in pidl_headers.items():
121 y = self.bld.get_tgen_by_name(name)127 y = self.bld.get_tgen_by_name(name)
122 self.bld.ASSERT(y is not None, 'Failed to find PIDL header %s' % name)128 self.bld.ASSERT(y is not None, 'Failed to find PIDL header %s' % name)
123 y.post()129 y.post()
124 for node in hd:130 for node in hd:
125 self.bld.ASSERT(node is not None, 'Got None as build node generating PIDL table for %s' % name)131 self.bld.ASSERT(node is not None, 'Got None as build node generating PIDL table for %s' % name)
126 self.source += " " + node.relpath_gen(self.path)132 self.source.append(node)
127133
128134
129def SAMBA_PIDL_TABLES(bld, name, target):135def SAMBA_PIDL_TABLES(bld, name, target):
@@ -131,9 +137,9 @@ def SAMBA_PIDL_TABLES(bld, name, target):
131 bld.SET_BUILD_GROUP('main')137 bld.SET_BUILD_GROUP('main')
132 t = bld(138 t = bld(
133 features = 'collect',139 features = 'collect',
134 rule = '${PERL} ${SRC} --output ${TGT} | sed "s|default/||" > ${TGT}',140 rule = '${PERL} ${SRC} > ${TGT}',
135 ext_out = '.c',141 ext_out = '.c',
136 before = 'cc',142 before = 'c',
137 update_outputs = True,143 update_outputs = True,
138 shell = True,144 shell = True,
139 source = '../../librpc/tables.pl',145 source = '../../librpc/tables.pl',
diff --git a/buildtools/wafsamba/samba_python.py b/buildtools/wafsamba/samba_python.py
index cb99fe9..fac0e34 100644
--- a/buildtools/wafsamba/samba_python.py
+++ b/buildtools/wafsamba/samba_python.py
@@ -1,11 +1,11 @@
1# waf build tool for building IDL files with pidl1# waf build tool for building IDL files with pidl
22
3import os3import os, sys
4import Build, Logs, Utils, Configure4from waflib import Build, Logs, Utils, Configure, Errors
5from Configure import conf5from waflib.Configure import conf
66
7@conf7@conf
8def SAMBA_CHECK_PYTHON(conf, mandatory=True, version=(2,4,2)):8def SAMBA_CHECK_PYTHON(conf, mandatory=True, version=(2,6,0)):
9 # enable tool to build python extensions9 # enable tool to build python extensions
10 if conf.env.HAVE_PYTHON_H:10 if conf.env.HAVE_PYTHON_H:
11 conf.check_python_version(version)11 conf.check_python_version(version)
@@ -14,23 +14,25 @@ def SAMBA_CHECK_PYTHON(conf, mandatory=True, version=(2,4,2)):
14 interpreters = []14 interpreters = []
1515
16 if conf.env['EXTRA_PYTHON']:16 if conf.env['EXTRA_PYTHON']:
17 conf.all_envs['extrapython'] = conf.env.copy()17 conf.all_envs['extrapython'] = conf.env.derive()
18 conf.setenv('extrapython')18 conf.setenv('extrapython')
19 conf.env['PYTHON'] = conf.env['EXTRA_PYTHON']19 conf.env['PYTHON'] = conf.env['EXTRA_PYTHON']
20 conf.env['IS_EXTRA_PYTHON'] = 'yes'20 conf.env['IS_EXTRA_PYTHON'] = 'yes'
21 conf.find_program('python', var='PYTHON', mandatory=True)21 conf.find_program('python', var='PYTHON', mandatory=True)
22 conf.check_tool('python')22 conf.load('python')
23 try:23 try:
24 conf.check_python_version((3, 3, 0))24 conf.check_python_version(version)
25 except Exception:25 except Exception:
26 Logs.warn('extra-python needs to be Python 3.3 or later')26 Logs.warn('extra-python needs to be Python %s.%s.%s or later' %
27 (version[0], version[1], version[2]))
27 raise28 raise
28 interpreters.append(conf.env['PYTHON'])29 interpreters.append(conf.env['PYTHON'])
29 conf.setenv('default')30 conf.setenv('default')
3031
31 conf.find_program('python', var='PYTHON', mandatory=mandatory)32 conf.find_program('python3', var='PYTHON', mandatory=mandatory)
32 conf.check_tool('python')33 conf.load('python')
33 path_python = conf.find_program('python')34 path_python = conf.find_program('python3')
35
34 conf.env.PYTHON_SPECIFIED = (conf.env.PYTHON != path_python)36 conf.env.PYTHON_SPECIFIED = (conf.env.PYTHON != path_python)
35 conf.check_python_version(version)37 conf.check_python_version(version)
3638
@@ -42,14 +44,16 @@ def SAMBA_CHECK_PYTHON(conf, mandatory=True, version=(2,4,2)):
42def SAMBA_CHECK_PYTHON_HEADERS(conf, mandatory=True):44def SAMBA_CHECK_PYTHON_HEADERS(conf, mandatory=True):
43 if conf.env.disable_python:45 if conf.env.disable_python:
44 if mandatory:46 if mandatory:
45 raise Utils.WafError("Cannot check for python headers when "47 raise Errors.WafError("Cannot check for python headers when "
46 "--disable-python specified")48 "--disable-python specified")
4749
48 conf.msg("python headers", "Check disabled due to --disable-python")50 conf.msg("python headers", "Check disabled due to --disable-python")
49 # we don't want PYTHONDIR in config.h, as otherwise changing51 # we don't want PYTHONDIR in config.h, as otherwise changing
50 # --prefix causes a complete rebuild52 # --prefix causes a complete rebuild
51 del(conf.env.defines['PYTHONDIR'])53 conf.env.DEFINES = [x for x in conf.env.DEFINES
52 del(conf.env.defines['PYTHONARCHDIR'])54 if not x.startswith('PYTHONDIR=')
55 and not x.startswith('PYTHONARCHDIR=')]
56
53 return57 return
5458
55 if conf.env["python_headers_checked"] == []:59 if conf.env["python_headers_checked"] == []:
@@ -64,21 +68,22 @@ def SAMBA_CHECK_PYTHON_HEADERS(conf, mandatory=True):
64 if conf.env['EXTRA_PYTHON']:68 if conf.env['EXTRA_PYTHON']:
65 extraversion = conf.all_envs['extrapython']['PYTHON_VERSION']69 extraversion = conf.all_envs['extrapython']['PYTHON_VERSION']
66 if extraversion == conf.env['PYTHON_VERSION']:70 if extraversion == conf.env['PYTHON_VERSION']:
67 raise Utils.WafError("extrapython %s is same as main python %s" % (71 raise Errors.WafError("extrapython %s is same as main python %s" % (
68 extraversion, conf.env['PYTHON_VERSION']))72 extraversion, conf.env['PYTHON_VERSION']))
69 else:73 else:
70 conf.msg("python headers", "using cache")74 conf.msg("python headers", "using cache")
7175
72 # we don't want PYTHONDIR in config.h, as otherwise changing76 # we don't want PYTHONDIR in config.h, as otherwise changing
73 # --prefix causes a complete rebuild77 # --prefix causes a complete rebuild
74 del(conf.env.defines['PYTHONDIR'])78 conf.env.DEFINES = [x for x in conf.env.DEFINES
75 del(conf.env.defines['PYTHONARCHDIR'])79 if not x.startswith('PYTHONDIR=')
80 and not x.startswith('PYTHONARCHDIR=')]
7681
77def _check_python_headers(conf, mandatory):82def _check_python_headers(conf, mandatory):
78 try:83 try:
79 Configure.ConfigurationError84 conf.errors.ConfigurationError
80 conf.check_python_headers(mandatory=mandatory)85 conf.check_python_headers()
81 except Configure.ConfigurationError:86 except conf.errors.ConfigurationError:
82 if mandatory:87 if mandatory:
83 raise88 raise
8489
@@ -95,6 +100,11 @@ def _check_python_headers(conf, mandatory):
95 conf.env.append_unique('LIBPATH_PYEMBED', lib[2:]) # strip '-L'100 conf.env.append_unique('LIBPATH_PYEMBED', lib[2:]) # strip '-L'
96 conf.env['LINKFLAGS_PYEMBED'].remove(lib)101 conf.env['LINKFLAGS_PYEMBED'].remove(lib)
97102
103 # same as in waf 1.5, keep only '-fno-strict-aliasing'
104 # and ignore defines such as NDEBUG _FORTIFY_SOURCE=2
105 conf.env.DEFINES_PYEXT = []
106 conf.env.CFLAGS_PYEXT = ['-fno-strict-aliasing']
107
98 return108 return
99109
100def PYTHON_BUILD_IS_ENABLED(self):110def PYTHON_BUILD_IS_ENABLED(self):
@@ -145,7 +155,7 @@ def SAMBA_PYTHON(bld, name,
145 source = bld.EXPAND_VARIABLES(source, vars=vars)155 source = bld.EXPAND_VARIABLES(source, vars=vars)
146156
147 if realname is not None:157 if realname is not None:
148 link_name = 'python_modules/%s' % realname158 link_name = 'python/%s' % realname
149 else:159 else:
150 link_name = None160 link_name = None
151161
diff --git a/buildtools/wafsamba/samba_third_party.py b/buildtools/wafsamba/samba_third_party.py
index 1144f81..e0dd3e1 100644
--- a/buildtools/wafsamba/samba_third_party.py
+++ b/buildtools/wafsamba/samba_third_party.py
@@ -1,12 +1,12 @@
1# functions to support third party libraries1# functions to support third party libraries
22
3import os3import os
4import Utils, Build4from waflib import Utils, Build, Context
5from Configure import conf5from waflib.Configure import conf
66
7@conf7@conf
8def CHECK_FOR_THIRD_PARTY(conf):8def CHECK_FOR_THIRD_PARTY(conf):
9 return os.path.exists(os.path.join(Utils.g_module.srcdir, 'third_party'))9 return os.path.exists(os.path.join(Context.g_module.top, 'third_party'))
1010
11Build.BuildContext.CHECK_FOR_THIRD_PARTY = CHECK_FOR_THIRD_PARTY11Build.BuildContext.CHECK_FOR_THIRD_PARTY = CHECK_FOR_THIRD_PARTY
1212
@@ -36,18 +36,18 @@ Build.BuildContext.CHECK_POPT = CHECK_POPT
3636
37@conf37@conf
38def CHECK_CMOCKA(conf):38def CHECK_CMOCKA(conf):
39 return conf.CHECK_BUNDLED_SYSTEM_PKG('cmocka', minversion='1.1.1')39 return conf.CHECK_BUNDLED_SYSTEM_PKG('cmocka', minversion='1.1.3')
4040
41Build.BuildContext.CHECK_CMOCKA = CHECK_CMOCKA41Build.BuildContext.CHECK_CMOCKA = CHECK_CMOCKA
4242
43@conf43@conf
44def CHECK_SOCKET_WRAPPER(conf):44def CHECK_SOCKET_WRAPPER(conf):
45 return conf.CHECK_BUNDLED_SYSTEM_PKG('socket_wrapper', minversion='1.1.9')45 return conf.CHECK_BUNDLED_SYSTEM_PKG('socket_wrapper', minversion='1.2.1')
46Build.BuildContext.CHECK_SOCKET_WRAPPER = CHECK_SOCKET_WRAPPER46Build.BuildContext.CHECK_SOCKET_WRAPPER = CHECK_SOCKET_WRAPPER
4747
48@conf48@conf
49def CHECK_NSS_WRAPPER(conf):49def CHECK_NSS_WRAPPER(conf):
50 return conf.CHECK_BUNDLED_SYSTEM_PKG('nss_wrapper', minversion='1.1.3')50 return conf.CHECK_BUNDLED_SYSTEM_PKG('nss_wrapper', minversion='1.1.5')
51Build.BuildContext.CHECK_NSS_WRAPPER = CHECK_NSS_WRAPPER51Build.BuildContext.CHECK_NSS_WRAPPER = CHECK_NSS_WRAPPER
5252
53@conf53@conf
@@ -62,5 +62,5 @@ Build.BuildContext.CHECK_UID_WRAPPER = CHECK_UID_WRAPPER
6262
63@conf63@conf
64def CHECK_PAM_WRAPPER(conf):64def CHECK_PAM_WRAPPER(conf):
65 return conf.CHECK_BUNDLED_SYSTEM_PKG('pam_wrapper', minversion='1.0.4')65 return conf.CHECK_BUNDLED_SYSTEM_PKG('pam_wrapper', minversion='1.0.7')
66Build.BuildContext.CHECK_PAM_WRAPPER = CHECK_PAM_WRAPPER66Build.BuildContext.CHECK_PAM_WRAPPER = CHECK_PAM_WRAPPER
diff --git a/buildtools/wafsamba/samba_utils.py b/buildtools/wafsamba/samba_utils.py
index 0f95c12..ad97de1 100644
--- a/buildtools/wafsamba/samba_utils.py
+++ b/buildtools/wafsamba/samba_utils.py
@@ -1,30 +1,92 @@
1# a waf tool to add autoconf-like macros to the configure section1# a waf tool to add autoconf-like macros to the configure section
2# and for SAMBA_ macros for building libraries, binaries etc2# and for SAMBA_ macros for building libraries, binaries etc
33
4import os, sys, re, fnmatch, shlex4import errno
5import os, sys, re, fnmatch, shlex, inspect
5from optparse import SUPPRESS_HELP6from optparse import SUPPRESS_HELP
6import Build, Options, Utils, Task, Logs, Configure7from waflib import Build, Options, Utils, Task, Logs, Configure, Errors, Context
7from TaskGen import feature, before, after8from waflib import Scripting
8from Configure import conf, ConfigurationContext9from waflib.TaskGen import feature, before, after
9from Logs import debug10from waflib.Configure import ConfigurationContext
11from waflib.Logs import debug
12from waflib import ConfigSet
13from waflib.Build import CACHE_SUFFIX
1014
11# TODO: make this a --option15# TODO: make this a --option
12LIB_PATH="shared"16LIB_PATH="shared"
1317
1418
19PY3 = sys.version_info[0] == 3
20
21if PY3:
22
23 # helper function to get a string from a variable that maybe 'str' or
24 # 'bytes' if 'bytes' then it is decoded using 'utf8'. If 'str' is passed
25 # it is returned unchanged
26 # Using this function is PY2/PY3 code should ensure in most cases
27 # the PY2 code runs unchanged in PY2 whereas the code in PY3 possibly
28 # decodes the variable (see PY2 implementation of this function below)
29 def get_string(bytesorstring):
30 tmp = bytesorstring
31 if isinstance(bytesorstring, bytes):
32 tmp = bytesorstring.decode('utf8')
33 elif not isinstance(bytesorstring, str):
34 raise ValueError('Expected byte of string for %s:%s' % (type(bytesorstring), bytesorstring))
35 return tmp
36
37else:
38
39 # Helper function to return string.
40 # if 'str' or 'unicode' passed in they are returned unchanged
41 # otherwise an exception is generated
42 # Using this function is PY2/PY3 code should ensure in most cases
43 # the PY2 code runs unchanged in PY2 whereas the code in PY3 possibly
44 # decodes the variable (see PY3 implementation of this function above)
45 def get_string(bytesorstring):
46 tmp = bytesorstring
47 if not(isinstance(bytesorstring, str) or isinstance(bytesorstring, unicode)):
48 raise ValueError('Expected str or unicode for %s:%s' % (type(bytesorstring), bytesorstring))
49 return tmp
50
15# sigh, python octal constants are a mess51# sigh, python octal constants are a mess
16MODE_644 = int('644', 8)52MODE_644 = int('644', 8)
53MODE_744 = int('744', 8)
17MODE_755 = int('755', 8)54MODE_755 = int('755', 8)
55MODE_777 = int('777', 8)
56
57def conf(f):
58 # override in order to propagate the argument "mandatory"
59 def fun(*k, **kw):
60 mandatory = True
61 if 'mandatory' in kw:
62 mandatory = kw['mandatory']
63 del kw['mandatory']
64
65 try:
66 return f(*k, **kw)
67 except Errors.ConfigurationError:
68 if mandatory:
69 raise
70
71 fun.__name__ = f.__name__
72 if 'mandatory' in inspect.getsource(f):
73 fun = f
74
75 setattr(Configure.ConfigurationContext, f.__name__, fun)
76 setattr(Build.BuildContext, f.__name__, fun)
77 return f
78Configure.conf = conf
79Configure.conftest = conf
1880
19@conf81@conf
20def SET_TARGET_TYPE(ctx, target, value):82def SET_TARGET_TYPE(ctx, target, value):
21 '''set the target type of a target'''83 '''set the target type of a target'''
22 cache = LOCAL_CACHE(ctx, 'TARGET_TYPE')84 cache = LOCAL_CACHE(ctx, 'TARGET_TYPE')
23 if target in cache and cache[target] != 'EMPTY':85 if target in cache and cache[target] != 'EMPTY':
24 Logs.error("ERROR: Target '%s' in directory %s re-defined as %s - was %s" % (target, ctx.curdir, value, cache[target]))86 Logs.error("ERROR: Target '%s' in directory %s re-defined as %s - was %s" % (target, ctx.path.abspath(), value, cache[target]))
25 sys.exit(1)87 sys.exit(1)
26 LOCAL_CACHE_SET(ctx, 'TARGET_TYPE', target, value)88 LOCAL_CACHE_SET(ctx, 'TARGET_TYPE', target, value)
27 debug("task_gen: Target '%s' created of type '%s' in %s" % (target, value, ctx.curdir))89 debug("task_gen: Target '%s' created of type '%s' in %s" % (target, value, ctx.path.abspath()))
28 return True90 return True
2991
3092
@@ -101,7 +163,7 @@ def LOCAL_CACHE_SET(ctx, cachename, key, value):
101def ASSERT(ctx, expression, msg):163def ASSERT(ctx, expression, msg):
102 '''a build assert call'''164 '''a build assert call'''
103 if not expression:165 if not expression:
104 raise Utils.WafError("ERROR: %s\n" % msg)166 raise Errors.WafError("ERROR: %s\n" % msg)
105Build.BuildContext.ASSERT = ASSERT167Build.BuildContext.ASSERT = ASSERT
106168
107169
@@ -122,9 +184,9 @@ def dict_concat(d1, d2):
122184
123def ADD_COMMAND(opt, name, function):185def ADD_COMMAND(opt, name, function):
124 '''add a new top level command to waf'''186 '''add a new top level command to waf'''
125 Utils.g_module.__dict__[name] = function187 Context.g_module.__dict__[name] = function
126 opt.name = function188 opt.name = function
127Options.Handler.ADD_COMMAND = ADD_COMMAND189Options.OptionsContext.ADD_COMMAND = ADD_COMMAND
128190
129191
130@feature('c', 'cc', 'cshlib', 'cprogram')192@feature('c', 'cc', 'cshlib', 'cprogram')
@@ -199,8 +261,10 @@ def subst_vars_error(string, env):
199 if re.match('\$\{\w+\}', v):261 if re.match('\$\{\w+\}', v):
200 vname = v[2:-1]262 vname = v[2:-1]
201 if not vname in env:263 if not vname in env:
202 raise KeyError("Failed to find variable %s in %s" % (vname, string))264 raise KeyError("Failed to find variable %s in %s in env %s <%s>" % (vname, string, env.__class__, str(env)))
203 v = env[vname]265 v = env[vname]
266 if isinstance(v, list):
267 v = ' '.join(v)
204 out.append(v)268 out.append(v)
205 return ''.join(out)269 return ''.join(out)
206270
@@ -212,51 +276,6 @@ def SUBST_ENV_VAR(ctx, varname):
212Build.BuildContext.SUBST_ENV_VAR = SUBST_ENV_VAR276Build.BuildContext.SUBST_ENV_VAR = SUBST_ENV_VAR
213277
214278
215def ENFORCE_GROUP_ORDERING(bld):
216 '''enforce group ordering for the project. This
217 makes the group ordering apply only when you specify
218 a target with --target'''
219 if Options.options.compile_targets:
220 @feature('*')
221 @before('exec_rule', 'apply_core', 'collect')
222 def force_previous_groups(self):
223 if getattr(self.bld, 'enforced_group_ordering', False):
224 return
225 self.bld.enforced_group_ordering = True
226
227 def group_name(g):
228 tm = self.bld.task_manager
229 return [x for x in tm.groups_names if id(tm.groups_names[x]) == id(g)][0]
230
231 my_id = id(self)
232 bld = self.bld
233 stop = None
234 for g in bld.task_manager.groups:
235 for t in g.tasks_gen:
236 if id(t) == my_id:
237 stop = id(g)
238 debug('group: Forcing up to group %s for target %s',
239 group_name(g), self.name or self.target)
240 break
241 if stop is not None:
242 break
243 if stop is None:
244 return
245
246 for i in xrange(len(bld.task_manager.groups)):
247 g = bld.task_manager.groups[i]
248 bld.task_manager.current_group = i
249 if id(g) == stop:
250 break
251 debug('group: Forcing group %s', group_name(g))
252 for t in g.tasks_gen:
253 if not getattr(t, 'forced_groups', False):
254 debug('group: Posting %s', t.name or t.target)
255 t.forced_groups = True
256 t.post()
257Build.BuildContext.ENFORCE_GROUP_ORDERING = ENFORCE_GROUP_ORDERING
258
259
260def recursive_dirlist(dir, relbase, pattern=None):279def recursive_dirlist(dir, relbase, pattern=None):
261 '''recursive directory list'''280 '''recursive directory list'''
262 ret = []281 ret = []
@@ -271,6 +290,18 @@ def recursive_dirlist(dir, relbase, pattern=None):
271 return ret290 return ret
272291
273292
293def symlink(src, dst, force=True):
294 """Can create symlink by force"""
295 try:
296 os.symlink(src, dst)
297 except OSError as exc:
298 if exc.errno == errno.EEXIST and force:
299 os.remove(dst)
300 os.symlink(src, dst)
301 else:
302 raise
303
304
274def mkdir_p(dir):305def mkdir_p(dir):
275 '''like mkdir -p'''306 '''like mkdir -p'''
276 if not dir:307 if not dir:
@@ -312,8 +343,7 @@ def EXPAND_VARIABLES(ctx, varstr, vars=None):
312 if not isinstance(varstr, str):343 if not isinstance(varstr, str):
313 return varstr344 return varstr
314345
315 import Environment346 env = ConfigSet.ConfigSet()
316 env = Environment.Environment()
317 ret = varstr347 ret = varstr
318 # substitute on user supplied dict if avaiilable348 # substitute on user supplied dict if avaiilable
319 if vars is not None:349 if vars is not None:
@@ -345,16 +375,18 @@ def RUN_COMMAND(cmd,
345 return os.WEXITSTATUS(status)375 return os.WEXITSTATUS(status)
346 if os.WIFSIGNALED(status):376 if os.WIFSIGNALED(status):
347 return - os.WTERMSIG(status)377 return - os.WTERMSIG(status)
348 Logs.error("Unknown exit reason %d for command: %s" (status, cmd))378 Logs.error("Unknown exit reason %d for command: %s" % (status, cmd))
349 return -1379 return -1
350380
351381
352def RUN_PYTHON_TESTS(testfiles, pythonpath=None, extra_env=None):382def RUN_PYTHON_TESTS(testfiles, pythonpath=None, extra_env=None):
353 env = LOAD_ENVIRONMENT()383 env = LOAD_ENVIRONMENT()
354 if pythonpath is None:384 if pythonpath is None:
355 pythonpath = os.path.join(Utils.g_module.blddir, 'python')385 pythonpath = os.path.join(Context.g_module.out, 'python')
356 result = 0386 result = 0
357 for interp in env.python_interpreters:387 for interp in env.python_interpreters:
388 if not isinstance(interp, str):
389 interp = ' '.join(interp)
358 for testfile in testfiles:390 for testfile in testfiles:
359 cmd = "PYTHONPATH=%s %s %s" % (pythonpath, interp, testfile)391 cmd = "PYTHONPATH=%s %s %s" % (pythonpath, interp, testfile)
360 if extra_env:392 if extra_env:
@@ -374,16 +406,15 @@ try:
374 # Even if hashlib.md5 exists, it may be unusable.406 # Even if hashlib.md5 exists, it may be unusable.
375 # Try to use MD5 function. In FIPS mode this will cause an exception407 # Try to use MD5 function. In FIPS mode this will cause an exception
376 # and we'll get to the replacement code408 # and we'll get to the replacement code
377 foo = md5('abcd')409 foo = md5(b'abcd')
378except:410except:
379 try:411 try:
380 import md5412 import md5
381 # repeat the same check here, mere success of import is not enough.413 # repeat the same check here, mere success of import is not enough.
382 # Try to use MD5 function. In FIPS mode this will cause an exception414 # Try to use MD5 function. In FIPS mode this will cause an exception
383 foo = md5.md5('abcd')415 foo = md5.md5(b'abcd')
384 except:416 except:
385 import Constants417 Context.SIG_NIL = hash('abcd')
386 Constants.SIG_NIL = hash('abcd')
387 class replace_md5(object):418 class replace_md5(object):
388 def __init__(self):419 def __init__(self):
389 self.val = None420 self.val = None
@@ -409,20 +440,20 @@ except:
409def LOAD_ENVIRONMENT():440def LOAD_ENVIRONMENT():
410 '''load the configuration environment, allowing access to env vars441 '''load the configuration environment, allowing access to env vars
411 from new commands'''442 from new commands'''
412 import Environment443 env = ConfigSet.ConfigSet()
413 env = Environment.Environment()
414 try:444 try:
415 env.load('.lock-wscript')445 p = os.path.join(Context.g_module.out, 'c4che/default'+CACHE_SUFFIX)
416 env.load(env.blddir + '/c4che/default.cache.py')446 env.load(p)
417 except:447 except (OSError, IOError):
418 pass448 pass
419 return env449 return env
420450
421451
422def IS_NEWER(bld, file1, file2):452def IS_NEWER(bld, file1, file2):
423 '''return True if file1 is newer than file2'''453 '''return True if file1 is newer than file2'''
424 t1 = os.stat(os.path.join(bld.curdir, file1)).st_mtime454 curdir = bld.path.abspath()
425 t2 = os.stat(os.path.join(bld.curdir, file2)).st_mtime455 t1 = os.stat(os.path.join(curdir, file1)).st_mtime
456 t2 = os.stat(os.path.join(curdir, file2)).st_mtime
426 return t1 > t2457 return t1 > t2
427Build.BuildContext.IS_NEWER = IS_NEWER458Build.BuildContext.IS_NEWER = IS_NEWER
428459
@@ -432,47 +463,46 @@ def RECURSE(ctx, directory):
432 '''recurse into a directory, relative to the curdir or top level'''463 '''recurse into a directory, relative to the curdir or top level'''
433 try:464 try:
434 visited_dirs = ctx.visited_dirs465 visited_dirs = ctx.visited_dirs
435 except:466 except AttributeError:
436 visited_dirs = ctx.visited_dirs = set()467 visited_dirs = ctx.visited_dirs = set()
437 d = os.path.join(ctx.curdir, directory)468 d = os.path.join(ctx.path.abspath(), directory)
438 if os.path.exists(d):469 if os.path.exists(d):
439 abspath = os.path.abspath(d)470 abspath = os.path.abspath(d)
440 else:471 else:
441 abspath = os.path.abspath(os.path.join(Utils.g_module.srcdir, directory))472 abspath = os.path.abspath(os.path.join(Context.g_module.top, directory))
442 ctxclass = ctx.__class__.__name__473 ctxclass = ctx.__class__.__name__
443 key = ctxclass + ':' + abspath474 key = ctxclass + ':' + abspath
444 if key in visited_dirs:475 if key in visited_dirs:
445 # already done it476 # already done it
446 return477 return
447 visited_dirs.add(key)478 visited_dirs.add(key)
448 relpath = os_path_relpath(abspath, ctx.curdir)479 relpath = os_path_relpath(abspath, ctx.path.abspath())
449 if ctxclass == 'Handler':480 if ctxclass in ['tmp', 'OptionsContext', 'ConfigurationContext', 'BuildContext']:
450 return ctx.sub_options(relpath)481 return ctx.recurse(relpath)
451 if ctxclass == 'ConfigurationContext':482 if 'waflib.extras.compat15' in sys.modules:
452 return ctx.sub_config(relpath)483 return ctx.recurse(relpath)
453 if ctxclass == 'BuildContext':484 Logs.error('Unknown RECURSE context class: {}'.format(ctxclass))
454 return ctx.add_subdirs(relpath)
455 Logs.error('Unknown RECURSE context class', ctxclass)
456 raise485 raise
457Options.Handler.RECURSE = RECURSE486Options.OptionsContext.RECURSE = RECURSE
458Build.BuildContext.RECURSE = RECURSE487Build.BuildContext.RECURSE = RECURSE
459488
460489
461def CHECK_MAKEFLAGS(bld):490def CHECK_MAKEFLAGS(options):
462 '''check for MAKEFLAGS environment variable in case we are being491 '''check for MAKEFLAGS environment variable in case we are being
463 called from a Makefile try to honor a few make command line flags'''492 called from a Makefile try to honor a few make command line flags'''
464 if not 'WAF_MAKE' in os.environ:493 if not 'WAF_MAKE' in os.environ:
465 return494 return
466 makeflags = os.environ.get('MAKEFLAGS')495 makeflags = os.environ.get('MAKEFLAGS')
467 if makeflags is None:496 if makeflags is None:
468 return497 makeflags = ""
469 jobs_set = False498 jobs_set = False
499 jobs = None
470 # we need to use shlex.split to cope with the escaping of spaces500 # we need to use shlex.split to cope with the escaping of spaces
471 # in makeflags501 # in makeflags
472 for opt in shlex.split(makeflags):502 for opt in shlex.split(makeflags):
473 # options can come either as -x or as x503 # options can come either as -x or as x
474 if opt[0:2] == 'V=':504 if opt[0:2] == 'V=':
475 Options.options.verbose = Logs.verbose = int(opt[2:])505 options.verbose = Logs.verbose = int(opt[2:])
476 if Logs.verbose > 0:506 if Logs.verbose > 0:
477 Logs.zones = ['runner']507 Logs.zones = ['runner']
478 if Logs.verbose > 2:508 if Logs.verbose > 2:
@@ -486,22 +516,53 @@ def CHECK_MAKEFLAGS(bld):
486 # this is also how "make test TESTS=testpattern" works, and516 # this is also how "make test TESTS=testpattern" works, and
487 # "make VERBOSE=1" as well as things like "make SYMBOLCHECK=1"517 # "make VERBOSE=1" as well as things like "make SYMBOLCHECK=1"
488 loc = opt.find('=')518 loc = opt.find('=')
489 setattr(Options.options, opt[0:loc], opt[loc+1:])519 setattr(options, opt[0:loc], opt[loc+1:])
490 elif opt[0] != '-':520 elif opt[0] != '-':
491 for v in opt:521 for v in opt:
492 if v == 'j':522 if re.search(r'j[0-9]*$', v):
493 jobs_set = True523 jobs_set = True
524 jobs = opt.strip('j')
494 elif v == 'k':525 elif v == 'k':
495 Options.options.keep = True526 options.keep = True
496 elif opt == '-j':527 elif re.search(r'-j[0-9]*$', opt):
497 jobs_set = True528 jobs_set = True
529 jobs = opt.strip('-j')
498 elif opt == '-k':530 elif opt == '-k':
499 Options.options.keep = True531 options.keep = True
500 if not jobs_set:532 if not jobs_set:
501 # default to one job533 # default to one job
502 Options.options.jobs = 1534 options.jobs = 1
503535 elif jobs_set and jobs:
504Build.BuildContext.CHECK_MAKEFLAGS = CHECK_MAKEFLAGS536 options.jobs = int(jobs)
537
538waflib_options_parse_cmd_args = Options.OptionsContext.parse_cmd_args
539def wafsamba_options_parse_cmd_args(self, _args=None, cwd=None, allow_unknown=False):
540 (options, commands, envvars) = \
541 waflib_options_parse_cmd_args(self,
542 _args=_args,
543 cwd=cwd,
544 allow_unknown=allow_unknown)
545 CHECK_MAKEFLAGS(options)
546 if options.jobs == 1:
547 #
548 # waflib.Runner.Parallel processes jobs inline if the possible number
549 # of jobs is just 1. But (at least in waf <= 2.0.12) it still calls
550 # create a waflib.Runner.Spawner() which creates a single
551 # waflib.Runner.Consumer() thread that tries to process jobs from the
552 # queue.
553 #
554 # This has strange effects, which are not noticed typically,
555 # but at least on AIX python has broken threading and fails
556 # in random ways.
557 #
558 # So we just add a dummy Spawner class.
559 class NoOpSpawner(object):
560 def __init__(self, master):
561 return
562 from waflib import Runner
563 Runner.Spawner = NoOpSpawner
564 return options, commands, envvars
565Options.OptionsContext.parse_cmd_args = wafsamba_options_parse_cmd_args
505566
506option_groups = {}567option_groups = {}
507568
@@ -513,7 +574,7 @@ def option_group(opt, name):
513 gr = opt.add_option_group(name)574 gr = opt.add_option_group(name)
514 option_groups[name] = gr575 option_groups[name] = gr
515 return gr576 return gr
516Options.Handler.option_group = option_group577Options.OptionsContext.option_group = option_group
517578
518579
519def save_file(filename, contents, create_dir=False):580def save_file(filename, contents, create_dir=False):
@@ -542,9 +603,9 @@ def load_file(filename):
542603
543def reconfigure(ctx):604def reconfigure(ctx):
544 '''rerun configure if necessary'''605 '''rerun configure if necessary'''
545 import Configure, samba_wildcard, Scripting606 if not os.path.exists(os.environ.get('WAFLOCK', '.lock-wscript')):
546 if not os.path.exists(".lock-wscript"):607 raise Errors.WafError('configure has not been run')
547 raise Utils.WafError('configure has not been run')608 import samba_wildcard
548 bld = samba_wildcard.fake_build_environment()609 bld = samba_wildcard.fake_build_environment()
549 Configure.autoconfig = True610 Configure.autoconfig = True
550 Scripting.check_configured(bld)611 Scripting.check_configured(bld)
@@ -561,7 +622,7 @@ def map_shlib_extension(ctx, name, python=False):
561 if python:622 if python:
562 return ctx.env.pyext_PATTERN % root1623 return ctx.env.pyext_PATTERN % root1
563 else:624 else:
564 (root2, ext2) = os.path.splitext(ctx.env.shlib_PATTERN)625 (root2, ext2) = os.path.splitext(ctx.env.cshlib_PATTERN)
565 return root1+ext2626 return root1+ext2
566Build.BuildContext.map_shlib_extension = map_shlib_extension627Build.BuildContext.map_shlib_extension = map_shlib_extension
567628
@@ -583,7 +644,7 @@ def make_libname(ctx, name, nolibprefix=False, version=None, python=False):
583 if python:644 if python:
584 libname = apply_pattern(name, ctx.env.pyext_PATTERN)645 libname = apply_pattern(name, ctx.env.pyext_PATTERN)
585 else:646 else:
586 libname = apply_pattern(name, ctx.env.shlib_PATTERN)647 libname = apply_pattern(name, ctx.env.cshlib_PATTERN)
587 if nolibprefix and libname[0:3] == 'lib':648 if nolibprefix and libname[0:3] == 'lib':
588 libname = libname[3:]649 libname = libname[3:]
589 if version:650 if version:
@@ -617,7 +678,7 @@ def get_tgt_list(bld):
617 tgt_list.append(t)678 tgt_list.append(t)
618 return tgt_list679 return tgt_list
619680
620from Constants import WSCRIPT_FILE681from waflib.Context import WSCRIPT_FILE
621def PROCESS_SEPARATE_RULE(self, rule):682def PROCESS_SEPARATE_RULE(self, rule):
622 ''' cause waf to process additional script based on `rule'.683 ''' cause waf to process additional script based on `rule'.
623 You should have file named wscript_<stage>_rule in the current directory684 You should have file named wscript_<stage>_rule in the current directory
@@ -628,15 +689,21 @@ def PROCESS_SEPARATE_RULE(self, rule):
628 stage = 'configure'689 stage = 'configure'
629 elif isinstance(self, Build.BuildContext):690 elif isinstance(self, Build.BuildContext):
630 stage = 'build'691 stage = 'build'
631 file_path = os.path.join(self.curdir, WSCRIPT_FILE+'_'+stage+'_'+rule)692 file_path = os.path.join(self.path.abspath(), WSCRIPT_FILE+'_'+stage+'_'+rule)
632 txt = load_file(file_path)693 node = self.root.find_node(file_path)
633 if txt:694 if node:
634 dc = {'ctx': self}695 try:
635 if getattr(self.__class__, 'pre_recurse', None):696 cache = self.recurse_cache
636 dc = self.pre_recurse(txt, file_path, self.curdir)697 except AttributeError:
637 exec(compile(txt, file_path, 'exec'), dc)698 cache = self.recurse_cache = {}
638 if getattr(self.__class__, 'post_recurse', None):699 if node not in cache:
639 dc = self.post_recurse(txt, file_path, self.curdir)700 cache[node] = True
701 self.pre_recurse(node)
702 try:
703 function_code = node.read('rU', None)
704 exec(compile(function_code, node.abspath(), 'exec'), self.exec_dict)
705 finally:
706 self.post_recurse(node)
640707
641Build.BuildContext.PROCESS_SEPARATE_RULE = PROCESS_SEPARATE_RULE708Build.BuildContext.PROCESS_SEPARATE_RULE = PROCESS_SEPARATE_RULE
642ConfigurationContext.PROCESS_SEPARATE_RULE = PROCESS_SEPARATE_RULE709ConfigurationContext.PROCESS_SEPARATE_RULE = PROCESS_SEPARATE_RULE
@@ -693,4 +760,4 @@ def samba_add_onoff_option(opt, option, help=(), dest=None, default=True,
693 default=default)760 default=default)
694 opt.add_option(without_val, help=SUPPRESS_HELP, action="store_false",761 opt.add_option(without_val, help=SUPPRESS_HELP, action="store_false",
695 dest=dest)762 dest=dest)
696Options.Handler.samba_add_onoff_option = samba_add_onoff_option763Options.OptionsContext.samba_add_onoff_option = samba_add_onoff_option
diff --git a/buildtools/wafsamba/samba_version.py b/buildtools/wafsamba/samba_version.py
index be26439..f0e7b4d 100644
--- a/buildtools/wafsamba/samba_version.py
+++ b/buildtools/wafsamba/samba_version.py
@@ -1,5 +1,5 @@
1import os1import os, sys
2import Utils2from waflib import Utils, Context
3import samba_utils3import samba_utils
4from samba_git import find_git4from samba_git import find_git
55
@@ -14,7 +14,7 @@ def git_version_summary(path, env=None):
14 environ = dict(os.environ)14 environ = dict(os.environ)
15 environ["GIT_DIR"] = '%s/.git' % path15 environ["GIT_DIR"] = '%s/.git' % path
16 environ["GIT_WORK_TREE"] = path16 environ["GIT_WORK_TREE"] = path
17 git = Utils.cmd_output(env.GIT + ' show --pretty=format:"%h%n%ct%n%H%n%cd" --stat HEAD', silent=True, env=environ)17 git = samba_utils.get_string(Utils.cmd_output(env.GIT + ' show --pretty=format:"%h%n%ct%n%H%n%cd" --stat HEAD', silent=True, env=environ))
1818
19 lines = git.splitlines()19 lines = git.splitlines()
20 if not lines or len(lines) < 4:20 if not lines or len(lines) < 4:
@@ -198,7 +198,10 @@ also accepted as dictionary entries here
198 for name in sorted(self.vcs_fields.keys()):198 for name in sorted(self.vcs_fields.keys()):
199 string+="#define SAMBA_VERSION_%s " % name199 string+="#define SAMBA_VERSION_%s " % name
200 value = self.vcs_fields[name]200 value = self.vcs_fields[name]
201 if isinstance(value, basestring):201 string_types = str
202 if sys.version_info[0] < 3:
203 string_types = basestring
204 if isinstance(value, string_types):
202 string += "\"%s\"" % value205 string += "\"%s\"" % value
203 elif type(value) is int:206 elif type(value) is int:
204 string += "%d" % value207 string += "%d" % value
@@ -260,5 +263,5 @@ def load_version(env=None, is_install=True):
260 env = samba_utils.LOAD_ENVIRONMENT()263 env = samba_utils.LOAD_ENVIRONMENT()
261264
262 version = samba_version_file("./VERSION", ".", env, is_install=is_install)265 version = samba_version_file("./VERSION", ".", env, is_install=is_install)
263 Utils.g_module.VERSION = version.STRING266 Context.g_module.VERSION = version.STRING
264 return version267 return version
diff --git a/buildtools/wafsamba/samba_waf18.py b/buildtools/wafsamba/samba_waf18.py
265new file mode 100644268new file mode 100644
index 0000000..cc310fb
--- /dev/null
+++ b/buildtools/wafsamba/samba_waf18.py
@@ -0,0 +1,429 @@
1# compatibility layer for building with more recent waf versions
2
3import os, shlex, sys
4from waflib import Build, Configure, Node, Utils, Options, Logs, TaskGen
5from waflib import ConfigSet
6from waflib.TaskGen import feature, after
7from waflib.Configure import conf, ConfigurationContext
8
9from waflib.Tools.flex import decide_ext
10
11# This version of flexfun runs in tsk.get_cwd() as opposed to the
12# bld.variant_dir: since input paths adjusted against tsk.get_cwd(), we have to
13# use tsk.get_cwd() for the work directory as well.
14def flexfun(tsk):
15 env = tsk.env
16 bld = tsk.generator.bld
17 def to_list(xx):
18 if isinstance(xx, str):
19 return [xx]
20 return xx
21 tsk.last_cmd = lst = []
22 lst.extend(to_list(env.FLEX))
23 lst.extend(to_list(env.FLEXFLAGS))
24 inputs = [a.path_from(tsk.get_cwd()) for a in tsk.inputs]
25 if env.FLEX_MSYS:
26 inputs = [x.replace(os.sep, '/') for x in inputs]
27 lst.extend(inputs)
28 lst = [x for x in lst if x]
29 txt = bld.cmd_and_log(lst, cwd=tsk.get_cwd(), env=env.env or None, quiet=0)
30 tsk.outputs[0].write(txt.replace('\r\n', '\n').replace('\r', '\n')) # issue #1207
31
32TaskGen.declare_chain(
33 name = 'flex',
34 rule = flexfun, # issue #854
35 ext_in = '.l',
36 decider = decide_ext,
37)
38
39
40for y in (Build.BuildContext, Build.CleanContext, Build.InstallContext, Build.UninstallContext, Build.ListContext):
41 class tmp(y):
42 variant = 'default'
43
44def abspath(self, env=None):
45 if env and hasattr(self, 'children'):
46 return self.get_bld().abspath()
47 return self.old_abspath()
48Node.Node.old_abspath = Node.Node.abspath
49Node.Node.abspath = abspath
50
51def bldpath(self, env=None):
52 return self.abspath()
53 #return self.path_from(self.ctx.bldnode.parent)
54Node.Node.bldpath = bldpath
55
56def srcpath(self, env=None):
57 return self.abspath()
58 #return self.path_from(self.ctx.bldnode.parent)
59Node.Node.srcpath = srcpath
60
61def store_fast(self, filename):
62 file = open(filename, 'wb')
63 data = self.get_merged_dict()
64 try:
65 Build.cPickle.dump(data, file, -1)
66 finally:
67 file.close()
68ConfigSet.ConfigSet.store_fast = store_fast
69
70def load_fast(self, filename):
71 file = open(filename, 'rb')
72 try:
73 data = Build.cPickle.load(file)
74 finally:
75 file.close()
76 self.table.update(data)
77ConfigSet.ConfigSet.load_fast = load_fast
78
79@feature('c', 'cxx', 'd', 'asm', 'fc', 'includes')
80@after('propagate_uselib_vars', 'process_source')
81def apply_incpaths(self):
82 lst = self.to_incnodes(self.to_list(getattr(self, 'includes', [])) + self.env['INCLUDES'])
83 self.includes_nodes = lst
84 cwdx = getattr(self.bld, 'cwdx', self.bld.bldnode)
85 self.env['INCPATHS'] = [x.path_from(cwdx) for x in lst]
86
87@conf
88def define(self, key, val, quote=True, comment=None):
89 assert key and isinstance(key, str)
90
91 if val is None:
92 val = ()
93 elif isinstance(val, bool):
94 val = int(val)
95
96 # waf 1.5
97 self.env[key] = val
98
99 if isinstance(val, int) or isinstance(val, float):
100 s = '%s=%s'
101 else:
102 s = quote and '%s="%s"' or '%s=%s'
103 app = s % (key, str(val))
104
105 ban = key + '='
106 lst = self.env.DEFINES
107 for x in lst:
108 if x.startswith(ban):
109 lst[lst.index(x)] = app
110 break
111 else:
112 self.env.append_value('DEFINES', app)
113
114 self.env.append_unique('define_key', key)
115
116# compat15 removes this but we want to keep it
117@conf
118def undefine(self, key, from_env=True, comment=None):
119 assert key and isinstance(key, str)
120
121 ban = key + '='
122 self.env.DEFINES = [x for x in self.env.DEFINES if not x.startswith(ban)]
123 self.env.append_unique('define_key', key)
124 # waf 1.5
125 if from_env:
126 self.env[key] = ()
127
128class ConfigurationContext(Configure.ConfigurationContext):
129 def init_dirs(self):
130 self.setenv('default')
131 self.env.merge_config_header = True
132 return super(ConfigurationContext, self).init_dirs()
133
134def find_program_samba(self, *k, **kw):
135 kw['mandatory'] = False
136 ret = self.find_program_old(*k, **kw)
137 return ret
138Configure.ConfigurationContext.find_program_old = Configure.ConfigurationContext.find_program
139Configure.ConfigurationContext.find_program = find_program_samba
140
141Build.BuildContext.ENFORCE_GROUP_ORDERING = Utils.nada
142Build.BuildContext.AUTOCLEANUP_STALE_FILES = Utils.nada
143
144@conf
145def check(self, *k, **kw):
146 '''Override the waf defaults to inject --with-directory options'''
147
148 # match the configuration test with speficic options, for example:
149 # --with-libiconv -> Options.options.iconv_open -> "Checking for library iconv"
150 self.validate_c(kw)
151
152 additional_dirs = []
153 if 'msg' in kw:
154 msg = kw['msg']
155 for x in Options.OptionsContext.parser.parser.option_list:
156 if getattr(x, 'match', None) and msg in x.match:
157 d = getattr(Options.options, x.dest, '')
158 if d:
159 additional_dirs.append(d)
160
161 # we add the additional dirs twice: once for the test data, and again if the compilation test suceeds below
162 def add_options_dir(dirs, env):
163 for x in dirs:
164 if not x in env.CPPPATH:
165 env.CPPPATH = [os.path.join(x, 'include')] + env.CPPPATH
166 if not x in env.LIBPATH:
167 env.LIBPATH = [os.path.join(x, 'lib')] + env.LIBPATH
168
169 add_options_dir(additional_dirs, kw['env'])
170
171 self.start_msg(kw['msg'], **kw)
172 ret = None
173 try:
174 ret = self.run_build(*k, **kw)
175 except self.errors.ConfigurationError:
176 self.end_msg(kw['errmsg'], 'YELLOW', **kw)
177 if Logs.verbose > 1:
178 raise
179 else:
180 self.fatal('The configuration failed')
181 else:
182 kw['success'] = ret
183 # success! time for brandy
184 add_options_dir(additional_dirs, self.env)
185
186 ret = self.post_check(*k, **kw)
187 if not ret:
188 self.end_msg(kw['errmsg'], 'YELLOW', **kw)
189 self.fatal('The configuration failed %r' % ret)
190 else:
191 self.end_msg(self.ret_msg(kw['okmsg'], kw), **kw)
192 return ret
193
194@conf
195def CHECK_LIBRARY_SUPPORT(conf, rpath=False, version_script=False, msg=None):
196 '''see if the platform supports building libraries'''
197
198 if msg is None:
199 if rpath:
200 msg = "rpath library support"
201 else:
202 msg = "building library support"
203
204 def build(bld):
205 lib_node = bld.srcnode.make_node('libdir/liblc1.c')
206 lib_node.parent.mkdir()
207 lib_node.write('int lib_func(void) { return 42; }\n', 'w')
208 main_node = bld.srcnode.make_node('main.c')
209 main_node.write('int main(void) {return !(lib_func() == 42);}', 'w')
210 linkflags = []
211 if version_script:
212 script = bld.srcnode.make_node('ldscript')
213 script.write('TEST_1.0A2 { global: *; };\n', 'w')
214 linkflags.append('-Wl,--version-script=%s' % script.abspath())
215 bld(features='c cshlib', source=lib_node, target='lib1', linkflags=linkflags, name='lib1')
216 o = bld(features='c cprogram', source=main_node, target='prog1', uselib_local='lib1')
217 if rpath:
218 o.rpath = [lib_node.parent.abspath()]
219 def run_app(self):
220 args = conf.SAMBA_CROSS_ARGS(msg=msg)
221 env = dict(os.environ)
222 env['LD_LIBRARY_PATH'] = self.inputs[0].parent.abspath() + os.pathsep + env.get('LD_LIBRARY_PATH', '')
223 self.generator.bld.cmd_and_log([self.inputs[0].abspath()] + args, env=env)
224 o.post()
225 bld(rule=run_app, source=o.link_task.outputs[0])
226
227 # ok, so it builds
228 try:
229 conf.check(build_fun=build, msg='Checking for %s' % msg)
230 except conf.errors.ConfigurationError:
231 return False
232 return True
233
234@conf
235def CHECK_NEED_LC(conf, msg):
236 '''check if we need -lc'''
237 def build(bld):
238 lib_node = bld.srcnode.make_node('libdir/liblc1.c')
239 lib_node.parent.mkdir()
240 lib_node.write('#include <stdio.h>\nint lib_func(void) { FILE *f = fopen("foo", "r");}\n', 'w')
241 bld(features='c cshlib', source=[lib_node], linkflags=conf.env.EXTRA_LDFLAGS, target='liblc')
242 try:
243 conf.check(build_fun=build, msg=msg, okmsg='-lc is unnecessary', errmsg='-lc is necessary')
244 except conf.errors.ConfigurationError:
245 return False
246 return True
247
248# already implemented on "waf -v"
249def order(bld, tgt_list):
250 return True
251Build.BuildContext.check_group_ordering = order
252
253@conf
254def CHECK_CFG(self, *k, **kw):
255 if 'args' in kw:
256 kw['args'] = shlex.split(kw['args'])
257 if not 'mandatory' in kw:
258 kw['mandatory'] = False
259 kw['global_define'] = True
260 return self.check_cfg(*k, **kw)
261
262def cmd_output(cmd, **kw):
263
264 silent = False
265 if 'silent' in kw:
266 silent = kw['silent']
267 del(kw['silent'])
268
269 if 'e' in kw:
270 tmp = kw['e']
271 del(kw['e'])
272 kw['env'] = tmp
273
274 kw['shell'] = isinstance(cmd, str)
275 kw['stdout'] = Utils.subprocess.PIPE
276 if silent:
277 kw['stderr'] = Utils.subprocess.PIPE
278
279 try:
280 p = Utils.subprocess.Popen(cmd, **kw)
281 output = p.communicate()[0]
282 except OSError as e:
283 raise ValueError(str(e))
284
285 if p.returncode:
286 if not silent:
287 msg = "command execution failed: %s -> %r" % (cmd, str(output))
288 raise ValueError(msg)
289 output = ''
290 return output
291Utils.cmd_output = cmd_output
292
293
294@TaskGen.feature('c', 'cxx', 'd')
295@TaskGen.before('apply_incpaths', 'propagate_uselib_vars')
296@TaskGen.after('apply_link', 'process_source')
297def apply_uselib_local(self):
298 """
299 process the uselib_local attribute
300 execute after apply_link because of the execution order set on 'link_task'
301 """
302 env = self.env
303 from waflib.Tools.ccroot import stlink_task
304
305 # 1. the case of the libs defined in the project (visit ancestors first)
306 # the ancestors external libraries (uselib) will be prepended
307 self.uselib = self.to_list(getattr(self, 'uselib', []))
308 self.includes = self.to_list(getattr(self, 'includes', []))
309 names = self.to_list(getattr(self, 'uselib_local', []))
310 get = self.bld.get_tgen_by_name
311 seen = set()
312 seen_uselib = set()
313 tmp = Utils.deque(names) # consume a copy of the list of names
314 if tmp:
315 if Logs.verbose:
316 Logs.warn('compat: "uselib_local" is deprecated, replace by "use"')
317 while tmp:
318 lib_name = tmp.popleft()
319 # visit dependencies only once
320 if lib_name in seen:
321 continue
322
323 y = get(lib_name)
324 y.post()
325 seen.add(lib_name)
326
327 # object has ancestors to process (shared libraries): add them to the end of the list
328 if getattr(y, 'uselib_local', None):
329 for x in self.to_list(getattr(y, 'uselib_local', [])):
330 obj = get(x)
331 obj.post()
332 if getattr(obj, 'link_task', None):
333 if not isinstance(obj.link_task, stlink_task):
334 tmp.append(x)
335
336 # link task and flags
337 if getattr(y, 'link_task', None):
338
339 link_name = y.target[y.target.rfind(os.sep) + 1:]
340 if isinstance(y.link_task, stlink_task):
341 env.append_value('STLIB', [link_name])
342 else:
343 # some linkers can link against programs
344 env.append_value('LIB', [link_name])
345
346 # the order
347 self.link_task.set_run_after(y.link_task)
348
349 # for the recompilation
350 self.link_task.dep_nodes += y.link_task.outputs
351
352 # add the link path too
353 tmp_path = y.link_task.outputs[0].parent.bldpath()
354 if not tmp_path in env['LIBPATH']:
355 env.prepend_value('LIBPATH', [tmp_path])
356
357 # add ancestors uselib too - but only propagate those that have no staticlib defined
358 for v in self.to_list(getattr(y, 'uselib', [])):
359 if v not in seen_uselib:
360 seen_uselib.add(v)
361 if not env['STLIB_' + v]:
362 if not v in self.uselib:
363 self.uselib.insert(0, v)
364
365 # if the library task generator provides 'export_includes', add to the include path
366 # the export_includes must be a list of paths relative to the other library
367 if getattr(y, 'export_includes', None):
368 self.includes.extend(y.to_incnodes(y.export_includes))
369
370@TaskGen.feature('cprogram', 'cxxprogram', 'cstlib', 'cxxstlib', 'cshlib', 'cxxshlib', 'dprogram', 'dstlib', 'dshlib')
371@TaskGen.after('apply_link')
372def apply_objdeps(self):
373 "add the .o files produced by some other object files in the same manner as uselib_local"
374 names = getattr(self, 'add_objects', [])
375 if not names:
376 return
377 names = self.to_list(names)
378
379 get = self.bld.get_tgen_by_name
380 seen = []
381 while names:
382 x = names[0]
383
384 # visit dependencies only once
385 if x in seen:
386 names = names[1:]
387 continue
388
389 # object does not exist ?
390 y = get(x)
391
392 # object has ancestors to process first ? update the list of names
393 if getattr(y, 'add_objects', None):
394 added = 0
395 lst = y.to_list(y.add_objects)
396 lst.reverse()
397 for u in lst:
398 if u in seen:
399 continue
400 added = 1
401 names = [u]+names
402 if added:
403 continue # list of names modified, loop
404
405 # safe to process the current object
406 y.post()
407 seen.append(x)
408
409 for t in getattr(y, 'compiled_tasks', []):
410 self.link_task.inputs.extend(t.outputs)
411
412@TaskGen.after('apply_link')
413def process_obj_files(self):
414 if not hasattr(self, 'obj_files'):
415 return
416 for x in self.obj_files:
417 node = self.path.find_resource(x)
418 self.link_task.inputs.append(node)
419
420@TaskGen.taskgen_method
421def add_obj_file(self, file):
422 """Small example on how to link object files as if they were source
423 obj = bld.create_obj('cc')
424 obj.add_obj_file('foo.o')"""
425 if not hasattr(self, 'obj_files'):
426 self.obj_files = []
427 if not 'process_obj_files' in self.meths:
428 self.meths.append('process_obj_files')
429 self.obj_files.append(file)
diff --git a/buildtools/wafsamba/samba_wildcard.py b/buildtools/wafsamba/samba_wildcard.py
index ed3e0c2..6173ce8 100644
--- a/buildtools/wafsamba/samba_wildcard.py
+++ b/buildtools/wafsamba/samba_wildcard.py
@@ -1,15 +1,15 @@
1# based on playground/evil in the waf svn tree1# based on playground/evil in the waf svn tree
22
3import os, datetime, fnmatch3import os, datetime, fnmatch
4import Scripting, Utils, Options, Logs, Environment4from waflib import Scripting, Utils, Options, Logs, Errors
5from Constants import SRCDIR, BLDDIR5from waflib import ConfigSet, Context
6from samba_utils import LOCAL_CACHE, os_path_relpath6from samba_utils import LOCAL_CACHE, os_path_relpath
77
8def run_task(t, k):8def run_task(t, k):
9 '''run a single build task'''9 '''run a single build task'''
10 ret = t.run()10 ret = t.run()
11 if ret:11 if ret:
12 raise Utils.WafError("Failed to build %s: %u" % (k, ret))12 raise Errors.WafError("Failed to build %s: %u" % (k, ret))
1313
1414
15def run_named_build_task(cmd):15def run_named_build_task(cmd):
@@ -45,7 +45,7 @@ def run_named_build_task(cmd):
4545
4646
47 if not found:47 if not found:
48 raise Utils.WafError("Unable to find build target matching %s" % cmd)48 raise Errors.WafError("Unable to find build target matching %s" % cmd)
4949
5050
51def rewrite_compile_targets():51def rewrite_compile_targets():
@@ -125,7 +125,7 @@ def wildcard_main(missing_cmd_fn):
125def fake_build_environment(info=True, flush=False):125def fake_build_environment(info=True, flush=False):
126 """create all the tasks for the project, but do not run the build126 """create all the tasks for the project, but do not run the build
127 return the build context in use"""127 return the build context in use"""
128 bld = getattr(Utils.g_module, 'build_context', Utils.Context)()128 bld = getattr(Context.g_module, 'build_context', Utils.Context)()
129 bld = Scripting.check_configured(bld)129 bld = Scripting.check_configured(bld)
130130
131 Options.commands['install'] = False131 Options.commands['install'] = False
@@ -134,16 +134,15 @@ def fake_build_environment(info=True, flush=False):
134 bld.is_install = 0 # False134 bld.is_install = 0 # False
135135
136 try:136 try:
137 proj = Environment.Environment(Options.lockfile)137 proj = ConfigSet.ConfigSet(Options.lockfile)
138 except IOError:138 except IOError:
139 raise Utils.WafError("Project not configured (run 'waf configure' first)")139 raise Errors.WafError("Project not configured (run 'waf configure' first)")
140140
141 bld.load_dirs(proj[SRCDIR], proj[BLDDIR])
142 bld.load_envs()141 bld.load_envs()
143142
144 if info:143 if info:
145 Logs.info("Waf: Entering directory `%s'" % bld.bldnode.abspath())144 Logs.info("Waf: Entering directory `%s'" % bld.bldnode.abspath())
146 bld.add_subdirs([os.path.split(Utils.g_module.root_path)[0]])145 bld.add_subdirs([os.path.split(Context.g_module.root_path)[0]])
147146
148 bld.pre_build()147 bld.pre_build()
149 if flush:148 if flush:
diff --git a/buildtools/wafsamba/stale_files.py b/buildtools/wafsamba/stale_files.py
index 2dd08e1..175f573 100644
--- a/buildtools/wafsamba/stale_files.py
+++ b/buildtools/wafsamba/stale_files.py
@@ -14,7 +14,9 @@ nodes/tasks, in which case the method will have to be modified
14to exclude some folders for example.14to exclude some folders for example.
15"""15"""
1616
17import Logs, Build, os, samba_utils, Options, Utils17from waflib import Logs, Build, Options, Utils, Errors
18import os
19from wafsamba import samba_utils
18from Runner import Parallel20from Runner import Parallel
1921
20old_refill_task_list = Parallel.refill_task_list22old_refill_task_list = Parallel.refill_task_list
@@ -46,7 +48,7 @@ def replace_refill_task_list(self):
4648
47 # paranoia49 # paranoia
48 if bin_base[-4:] != '/bin':50 if bin_base[-4:] != '/bin':
49 raise Utils.WafError("Invalid bin base: %s" % bin_base)51 raise Errors.WafError("Invalid bin base: %s" % bin_base)
5052
51 # obtain the expected list of files53 # obtain the expected list of files
52 expected = []54 expected = []
diff --git a/buildtools/wafsamba/symbols.py b/buildtools/wafsamba/symbols.py
index 7ff4bac..3eca3d4 100644
--- a/buildtools/wafsamba/symbols.py
+++ b/buildtools/wafsamba/symbols.py
@@ -2,8 +2,8 @@
2# using nm, producing a set of exposed defined/undefined symbols2# using nm, producing a set of exposed defined/undefined symbols
33
4import os, re, subprocess4import os, re, subprocess
5import Utils, Build, Options, Logs5from waflib import Utils, Build, Options, Logs, Errors
6from Logs import debug6from waflib.Logs import debug
7from samba_utils import TO_LIST, LOCAL_CACHE, get_tgt_list, os_path_relpath7from samba_utils import TO_LIST, LOCAL_CACHE, get_tgt_list, os_path_relpath
88
9# these are the data structures used in symbols.py:9# these are the data structures used in symbols.py:
@@ -59,12 +59,12 @@ def symbols_extract(bld, objfiles, dynamic=False):
5959
60 for line in nmpipe:60 for line in nmpipe:
61 line = line.strip()61 line = line.strip()
62 if line.endswith(':'):62 if line.endswith(b':'):
63 filename = line[:-1]63 filename = line[:-1]
64 ret[filename] = { "PUBLIC": set(), "UNDEFINED" : set() }64 ret[filename] = { "PUBLIC": set(), "UNDEFINED" : set() }
65 continue65 continue
66 cols = line.split(" ")66 cols = line.split(b" ")
67 if cols == ['']:67 if cols == [b'']:
68 continue68 continue
69 # see if the line starts with an address69 # see if the line starts with an address
70 if len(cols) == 3:70 if len(cols) == 3:
@@ -73,10 +73,10 @@ def symbols_extract(bld, objfiles, dynamic=False):
73 else:73 else:
74 symbol_type = cols[0]74 symbol_type = cols[0]
75 symbol = cols[1]75 symbol = cols[1]
76 if symbol_type in "BDGTRVWSi":76 if symbol_type in b"BDGTRVWSi":
77 # its a public symbol77 # its a public symbol
78 ret[filename]["PUBLIC"].add(symbol)78 ret[filename]["PUBLIC"].add(symbol)
79 elif symbol_type in "U":79 elif symbol_type in b"U":
80 ret[filename]["UNDEFINED"].add(symbol)80 ret[filename]["UNDEFINED"].add(symbol)
8181
82 # add to the cache82 # add to the cache
@@ -106,10 +106,10 @@ def find_ldd_path(bld, libname, binary):
106 lddpipe = subprocess.Popen(['ldd', binary], stdout=subprocess.PIPE).stdout106 lddpipe = subprocess.Popen(['ldd', binary], stdout=subprocess.PIPE).stdout
107 for line in lddpipe:107 for line in lddpipe:
108 line = line.strip()108 line = line.strip()
109 cols = line.split(" ")109 cols = line.split(b" ")
110 if len(cols) < 3 or cols[1] != "=>":110 if len(cols) < 3 or cols[1] != b"=>":
111 continue111 continue
112 if cols[0].startswith("libc."):112 if cols[0].startswith(b"libc."):
113 # save this one too113 # save this one too
114 bld.env.libc_path = cols[2]114 bld.env.libc_path = cols[2]
115 if cols[0].startswith(libname):115 if cols[0].startswith(libname):
@@ -119,8 +119,9 @@ def find_ldd_path(bld, libname, binary):
119119
120120
121# some regular expressions for parsing readelf output121# some regular expressions for parsing readelf output
122re_sharedlib = re.compile('Shared library: \[(.*)\]')122re_sharedlib = re.compile(b'Shared library: \[(.*)\]')
123re_rpath = re.compile('Library rpath: \[(.*)\]')123# output from readelf could be `Library rpath` or `Libray runpath`
124re_rpath = re.compile(b'Library (rpath|runpath): \[(.*)\]')
124125
125def get_libs(bld, binname):126def get_libs(bld, binname):
126 '''find the list of linked libraries for any binary or library127 '''find the list of linked libraries for any binary or library
@@ -146,7 +147,8 @@ def get_libs(bld, binname):
146 libs.add(m.group(1))147 libs.add(m.group(1))
147 m = re_rpath.search(line)148 m = re_rpath.search(line)
148 if m:149 if m:
149 rpath.extend(m.group(1).split(":"))150 # output from Popen is always bytestr even in py3
151 rpath.extend(m.group(2).split(b":"))
150152
151 ret = set()153 ret = set()
152 for lib in libs:154 for lib in libs:
@@ -410,7 +412,7 @@ def check_library_deps(bld, t):
410 if dep2 == name and t.in_library != t2.in_library:412 if dep2 == name and t.in_library != t2.in_library:
411 Logs.warn("WARNING: mutual dependency %s <=> %s" % (name, real_name(t2.sname)))413 Logs.warn("WARNING: mutual dependency %s <=> %s" % (name, real_name(t2.sname)))
412 Logs.warn("Libraries should match. %s != %s" % (t.in_library, t2.in_library))414 Logs.warn("Libraries should match. %s != %s" % (t.in_library, t2.in_library))
413 # raise Utils.WafError("illegal mutual dependency")415 # raise Errors.WafError("illegal mutual dependency")
414416
415417
416def check_syslib_collisions(bld, tgt_list):418def check_syslib_collisions(bld, tgt_list):
@@ -430,7 +432,7 @@ def check_syslib_collisions(bld, tgt_list):
430 Logs.error("ERROR: Target '%s' has symbols '%s' which is also in syslib '%s'" % (t.sname, common, lib))432 Logs.error("ERROR: Target '%s' has symbols '%s' which is also in syslib '%s'" % (t.sname, common, lib))
431 has_error = True433 has_error = True
432 if has_error:434 if has_error:
433 raise Utils.WafError("symbols in common with system libraries")435 raise Errors.WafError("symbols in common with system libraries")
434436
435437
436def check_dependencies(bld, t):438def check_dependencies(bld, t):
@@ -546,7 +548,7 @@ def symbols_whyneeded(task):
546548
547 why = Options.options.WHYNEEDED.split(":")549 why = Options.options.WHYNEEDED.split(":")
548 if len(why) != 2:550 if len(why) != 2:
549 raise Utils.WafError("usage: WHYNEEDED=TARGET:DEPENDENCY")551 raise Errors.WafError("usage: WHYNEEDED=TARGET:DEPENDENCY")
550 target = why[0]552 target = why[0]
551 subsystem = why[1]553 subsystem = why[1]
552554
@@ -579,7 +581,7 @@ def report_duplicate(bld, binname, sym, libs, fail_on_error):
579 else:581 else:
580 libnames.append(lib)582 libnames.append(lib)
581 if fail_on_error:583 if fail_on_error:
582 raise Utils.WafError("%s: Symbol %s linked in multiple libraries %s" % (binname, sym, libnames))584 raise Errors.WafError("%s: Symbol %s linked in multiple libraries %s" % (binname, sym, libnames))
583 else:585 else:
584 print("%s: Symbol %s linked in multiple libraries %s" % (binname, sym, libnames))586 print("%s: Symbol %s linked in multiple libraries %s" % (binname, sym, libnames))
585587
diff --git a/buildtools/wafsamba/test_duplicate_symbol.sh b/buildtools/wafsamba/test_duplicate_symbol.sh
index 89a4027..46f44a6 100755
--- a/buildtools/wafsamba/test_duplicate_symbol.sh
+++ b/buildtools/wafsamba/test_duplicate_symbol.sh
@@ -5,7 +5,7 @@
55
6subunit_start_test duplicate_symbols6subunit_start_test duplicate_symbols
77
8if ./buildtools/bin/waf build --dup-symbol-check; then8if $PYTHON ./buildtools/bin/waf build --dup-symbol-check; then
9 subunit_pass_test duplicate_symbols9 subunit_pass_test duplicate_symbols
10else10else
11 echo | subunit_fail_test duplicate_symbols11 echo | subunit_fail_test duplicate_symbols
diff --git a/buildtools/wafsamba/tests/test_abi.py b/buildtools/wafsamba/tests/test_abi.py
index bba78c1..d6bdb04 100644
--- a/buildtools/wafsamba/tests/test_abi.py
+++ b/buildtools/wafsamba/tests/test_abi.py
@@ -21,7 +21,7 @@ from wafsamba.samba_abi import (
21 normalise_signature,21 normalise_signature,
22 )22 )
2323
24from cStringIO import StringIO24from samba.compat import StringIO
2525
2626
27class NormaliseSignatureTests(TestCase):27class NormaliseSignatureTests(TestCase):
@@ -66,6 +66,10 @@ class WriteVscriptTests(TestCase):
661.0 {661.0 {
67\tglobal:67\tglobal:
68\t\t*;68\t\t*;
69\tlocal:
70\t\t_end;
71\t\t__bss_start;
72\t\t_edata;
69};73};
70""")74""")
7175
@@ -84,6 +88,10 @@ MYLIB_0.1 {
841.0 {881.0 {
85\tglobal:89\tglobal:
86\t\t*;90\t\t*;
91\tlocal:
92\t\t_end;
93\t\t__bss_start;
94\t\t_edata;
87};95};
88""")96""")
8997
@@ -99,6 +107,9 @@ MYLIB_0.1 {
99\t\t*;107\t\t*;
100\tlocal:108\tlocal:
101\t\texc_*;109\t\texc_*;
110\t\t_end;
111\t\t__bss_start;
112\t\t_edata;
102};113};
103""")114""")
104115
@@ -115,6 +126,9 @@ MYLIB_0.1 {
115\t\tpub_*;126\t\tpub_*;
116\tlocal:127\tlocal:
117\t\texc_*;128\t\texc_*;
129\t\t_end;
130\t\t__bss_start;
131\t\t_edata;
118\t\t*;132\t\t*;
119};133};
120""")134""")
diff --git a/buildtools/wafsamba/tru64cc.py b/buildtools/wafsamba/tru64cc.py
121deleted file mode 100644135deleted file mode 100644
index e1bbb1d..0000000
--- a/buildtools/wafsamba/tru64cc.py
+++ /dev/null
@@ -1,77 +0,0 @@
1
2# compiler definition for tru64/OSF1 cc compiler
3# based on suncc.py from waf
4
5import os, optparse
6import Utils, Options, Configure
7import ccroot, ar
8from Configure import conftest
9
10from compiler_cc import c_compiler
11
12c_compiler['osf1V'] = ['gcc', 'tru64cc']
13
14@conftest
15def find_tru64cc(conf):
16 v = conf.env
17 cc = None
18 if v['CC']: cc = v['CC']
19 elif 'CC' in conf.environ: cc = conf.environ['CC']
20 if not cc: cc = conf.find_program('cc', var='CC')
21 if not cc: conf.fatal('tru64cc was not found')
22 cc = conf.cmd_to_list(cc)
23
24 try:
25 if not Utils.cmd_output(cc + ['-V']):
26 conf.fatal('tru64cc %r was not found' % cc)
27 except ValueError:
28 conf.fatal('tru64cc -V could not be executed')
29
30 v['CC'] = cc
31 v['CC_NAME'] = 'tru64'
32
33@conftest
34def tru64cc_common_flags(conf):
35 v = conf.env
36
37 v['CC_SRC_F'] = ''
38 v['CC_TGT_F'] = ['-c', '-o', '']
39 v['CPPPATH_ST'] = '-I%s' # template for adding include paths
40
41 # linker
42 if not v['LINK_CC']: v['LINK_CC'] = v['CC']
43 v['CCLNK_SRC_F'] = ''
44 v['CCLNK_TGT_F'] = ['-o', '']
45
46 v['LIB_ST'] = '-l%s' # template for adding libs
47 v['LIBPATH_ST'] = '-L%s' # template for adding libpaths
48 v['STATICLIB_ST'] = '-l%s'
49 v['STATICLIBPATH_ST'] = '-L%s'
50 v['CCDEFINES_ST'] = '-D%s'
51
52# v['SONAME_ST'] = '-Wl,-h -Wl,%s'
53# v['SHLIB_MARKER'] = '-Bdynamic'
54# v['STATICLIB_MARKER'] = '-Bstatic'
55
56 # program
57 v['program_PATTERN'] = '%s'
58
59 # shared library
60# v['shlib_CCFLAGS'] = ['-Kpic', '-DPIC']
61 v['shlib_LINKFLAGS'] = ['-shared']
62 v['shlib_PATTERN'] = 'lib%s.so'
63
64 # static lib
65# v['staticlib_LINKFLAGS'] = ['-Bstatic']
66# v['staticlib_PATTERN'] = 'lib%s.a'
67
68detect = '''
69find_tru64cc
70find_cpp
71find_ar
72tru64cc_common_flags
73cc_load_tools
74cc_add_flags
75link_add_flags
76'''
77
diff --git a/buildtools/wafsamba/wafsamba.py b/buildtools/wafsamba/wafsamba.py
index 12d5421..1b98e1c 100644
--- a/buildtools/wafsamba/wafsamba.py
+++ b/buildtools/wafsamba/wafsamba.py
@@ -1,15 +1,16 @@
1# a waf tool to add autoconf-like macros to the configure section1# a waf tool to add autoconf-like macros to the configure section
2# and for SAMBA_ macros for building libraries, binaries etc2# and for SAMBA_ macros for building libraries, binaries etc
33
4import Build, os, sys, Options, Task, Utils, cc, TaskGen, fnmatch, re, shutil, Logs, Constants4import os, sys, re, shutil, fnmatch
5from Configure import conf5from waflib import Build, Options, Task, Utils, TaskGen, Logs, Context, Errors
6from Logs import debug6from waflib.Configure import conf
7from waflib.Logs import debug
7from samba_utils import SUBST_VARS_RECURSIVE8from samba_utils import SUBST_VARS_RECURSIVE
8TaskGen.task_gen.apply_verif = Utils.nada9TaskGen.task_gen.apply_verif = Utils.nada
910
10# bring in the other samba modules11# bring in the other samba modules
11from samba_optimisation import *
12from samba_utils import *12from samba_utils import *
13from samba_utils import symlink
13from samba_version import *14from samba_version import *
14from samba_autoconf import *15from samba_autoconf import *
15from samba_patterns import *16from samba_patterns import *
@@ -25,27 +26,19 @@ import samba_install
25import samba_conftests26import samba_conftests
26import samba_abi27import samba_abi
27import samba_headers28import samba_headers
28import tru64cc
29import irixcc
30import hpuxcc
31import generic_cc29import generic_cc
32import samba_dist30import samba_dist
33import samba_wildcard31import samba_wildcard
34import stale_files
35import symbols32import symbols
36import pkgconfig33import pkgconfig
37import configure_file34import configure_file
3835import samba_waf18
39# some systems have broken threading in python
40if os.environ.get('WAF_NOTHREADS') == '1':
41 import nothreads
4236
43LIB_PATH="shared"37LIB_PATH="shared"
4438
45os.environ['PYTHONUNBUFFERED'] = '1'39os.environ['PYTHONUNBUFFERED'] = '1'
4640
4741if Context.HEXVERSION not in (0x2000800,):
48if Constants.HEXVERSION < 0x105019:
49 Logs.error('''42 Logs.error('''
50Please use the version of waf that comes with Samba, not43Please use the version of waf that comes with Samba, not
51a system installed version. See http://wiki.samba.org/index.php/Waf44a system installed version. See http://wiki.samba.org/index.php/Waf
@@ -55,26 +48,25 @@ Alternatively, please run ./configure and make as usual. That will
55call the right version of waf.''')48call the right version of waf.''')
56 sys.exit(1)49 sys.exit(1)
5750
58
59@conf51@conf
60def SAMBA_BUILD_ENV(conf):52def SAMBA_BUILD_ENV(conf):
61 '''create the samba build environment'''53 '''create the samba build environment'''
62 conf.env.BUILD_DIRECTORY = conf.blddir54 conf.env.BUILD_DIRECTORY = conf.bldnode.abspath()
63 mkdir_p(os.path.join(conf.blddir, LIB_PATH))55 mkdir_p(os.path.join(conf.env.BUILD_DIRECTORY, LIB_PATH))
64 mkdir_p(os.path.join(conf.blddir, LIB_PATH, "private"))56 mkdir_p(os.path.join(conf.env.BUILD_DIRECTORY, LIB_PATH, "private"))
65 mkdir_p(os.path.join(conf.blddir, "modules"))57 mkdir_p(os.path.join(conf.env.BUILD_DIRECTORY, "modules"))
66 mkdir_p(os.path.join(conf.blddir, 'python/samba/dcerpc'))58 mkdir_p(os.path.join(conf.env.BUILD_DIRECTORY, 'python/samba/dcerpc'))
67 # this allows all of the bin/shared and bin/python targets59 # this allows all of the bin/shared and bin/python targets
68 # to be expressed in terms of build directory paths60 # to be expressed in terms of build directory paths
69 mkdir_p(os.path.join(conf.blddir, 'default'))61 mkdir_p(os.path.join(conf.env.BUILD_DIRECTORY, 'default'))
70 for (source, target) in [('shared', 'shared'), ('modules', 'modules'), ('python', 'python_modules')]:62 for (source, target) in [('shared', 'shared'), ('modules', 'modules'), ('python', 'python')]:
71 link_target = os.path.join(conf.blddir, 'default/' + target)63 link_target = os.path.join(conf.env.BUILD_DIRECTORY, 'default/' + target)
72 if not os.path.lexists(link_target):64 if not os.path.lexists(link_target):
73 os.symlink('../' + source, link_target)65 symlink('../' + source, link_target)
7466
75 # get perl to put the blib files in the build directory67 # get perl to put the blib files in the build directory
76 blib_bld = os.path.join(conf.blddir, 'default/pidl/blib')68 blib_bld = os.path.join(conf.env.BUILD_DIRECTORY, 'default/pidl/blib')
77 blib_src = os.path.join(conf.srcdir, 'pidl/blib')69 blib_src = os.path.join(conf.srcnode.abspath(), 'pidl/blib')
78 mkdir_p(blib_bld + '/man1')70 mkdir_p(blib_bld + '/man1')
79 mkdir_p(blib_bld + '/man3')71 mkdir_p(blib_bld + '/man3')
80 if os.path.islink(blib_src):72 if os.path.islink(blib_src):
@@ -148,7 +140,7 @@ def SAMBA_LIBRARY(bld, libname, source,
148 public_headers = None140 public_headers = None
149141
150 if private_library and public_headers:142 if private_library and public_headers:
151 raise Utils.WafError("private library '%s' must not have public header files" %143 raise Errors.WafError("private library '%s' must not have public header files" %
152 libname)144 libname)
153145
154 if LIB_MUST_BE_PRIVATE(bld, libname):146 if LIB_MUST_BE_PRIVATE(bld, libname):
@@ -225,13 +217,13 @@ def SAMBA_LIBRARY(bld, libname, source,
225 # we don't want any public libraries without version numbers217 # we don't want any public libraries without version numbers
226 if (not private_library and target_type != 'PYTHON' and not realname):218 if (not private_library and target_type != 'PYTHON' and not realname):
227 if vnum is None and soname is None:219 if vnum is None and soname is None:
228 raise Utils.WafError("public library '%s' must have a vnum" %220 raise Errors.WafError("public library '%s' must have a vnum" %
229 libname)221 libname)
230 if pc_files is None:222 if pc_files is None:
231 raise Utils.WafError("public library '%s' must have pkg-config file" %223 raise Errors.WafError("public library '%s' must have pkg-config file" %
232 libname)224 libname)
233 if public_headers is None and not bld.env['IS_EXTRA_PYTHON']:225 if public_headers is None and not bld.env['IS_EXTRA_PYTHON']:
234 raise Utils.WafError("public library '%s' must have header files" %226 raise Errors.WafError("public library '%s' must have header files" %
235 libname)227 libname)
236228
237 if bundled_name is not None:229 if bundled_name is not None:
@@ -273,7 +265,7 @@ def SAMBA_LIBRARY(bld, libname, source,
273 vscript = None265 vscript = None
274 if bld.env.HAVE_LD_VERSION_SCRIPT:266 if bld.env.HAVE_LD_VERSION_SCRIPT:
275 if private_library:267 if private_library:
276 version = "%s_%s" % (Utils.g_module.APPNAME, Utils.g_module.VERSION)268 version = "%s_%s" % (Context.g_module.APPNAME, Context.g_module.VERSION)
277 elif vnum:269 elif vnum:
278 version = "%s_%s" % (libname, vnum)270 version = "%s_%s" % (libname, vnum)
279 else:271 else:
@@ -282,17 +274,17 @@ def SAMBA_LIBRARY(bld, libname, source,
282 vscript = "%s.vscript" % libname274 vscript = "%s.vscript" % libname
283 bld.ABI_VSCRIPT(version_libname, abi_directory, version, vscript,275 bld.ABI_VSCRIPT(version_libname, abi_directory, version, vscript,
284 abi_match)276 abi_match)
285 fullname = apply_pattern(bundled_name, bld.env.shlib_PATTERN)277 fullname = apply_pattern(bundled_name, bld.env.cshlib_PATTERN)
286 fullpath = bld.path.find_or_declare(fullname)278 fullpath = bld.path.find_or_declare(fullname)
287 vscriptpath = bld.path.find_or_declare(vscript)279 vscriptpath = bld.path.find_or_declare(vscript)
288 if not fullpath:280 if not fullpath:
289 raise Utils.WafError("unable to find fullpath for %s" % fullname)281 raise Errors.WafError("unable to find fullpath for %s" % fullname)
290 if not vscriptpath:282 if not vscriptpath:
291 raise Utils.WafError("unable to find vscript path for %s" % vscript)283 raise Errors.WafError("unable to find vscript path for %s" % vscript)
292 bld.add_manual_dependency(fullpath, vscriptpath)284 bld.add_manual_dependency(fullpath, vscriptpath)
293 if bld.is_install:285 if bld.is_install:
294 # also make the .inst file depend on the vscript286 # also make the .inst file depend on the vscript
295 instname = apply_pattern(bundled_name + '.inst', bld.env.shlib_PATTERN)287 instname = apply_pattern(bundled_name + '.inst', bld.env.cshlib_PATTERN)
296 bld.add_manual_dependency(bld.path.find_or_declare(instname), bld.path.find_or_declare(vscript))288 bld.add_manual_dependency(bld.path.find_or_declare(instname), bld.path.find_or_declare(vscript))
297 vscript = os.path.join(bld.path.abspath(bld.env), vscript)289 vscript = os.path.join(bld.path.abspath(bld.env), vscript)
298290
@@ -327,10 +319,12 @@ def SAMBA_LIBRARY(bld, libname, source,
327 link_name = 'shared/%s' % realname319 link_name = 'shared/%s' % realname
328320
329 if link_name:321 if link_name:
322 if 'waflib.extras.compat15' in sys.modules:
323 link_name = 'default/' + link_name
330 t.link_name = link_name324 t.link_name = link_name
331325
332 if pc_files is not None and not private_library:326 if pc_files is not None and not private_library:
333 if pyembed and bld.env['IS_EXTRA_PYTHON']:327 if pyembed:
334 bld.PKG_CONFIG_FILES(pc_files, vnum=vnum, extra_name=bld.env['PYTHON_SO_ABI_FLAG'])328 bld.PKG_CONFIG_FILES(pc_files, vnum=vnum, extra_name=bld.env['PYTHON_SO_ABI_FLAG'])
335 else:329 else:
336 bld.PKG_CONFIG_FILES(pc_files, vnum=vnum)330 bld.PKG_CONFIG_FILES(pc_files, vnum=vnum)
@@ -674,7 +668,7 @@ def SAMBA_GENERATOR(bld, name, rule, source='', target='',
674 target=target,668 target=target,
675 shell=isinstance(rule, str),669 shell=isinstance(rule, str),
676 update_outputs=True,670 update_outputs=True,
677 before='cc',671 before='c',
678 ext_out='.c',672 ext_out='.c',
679 samba_type='GENERATOR',673 samba_type='GENERATOR',
680 dep_vars = dep_vars,674 dep_vars = dep_vars,
@@ -728,22 +722,6 @@ Build.BuildContext.SET_BUILD_GROUP = SET_BUILD_GROUP
728722
729723
730724
731@conf
732def ENABLE_TIMESTAMP_DEPENDENCIES(conf):
733 """use timestamps instead of file contents for deps
734 this currently doesn't work"""
735 def h_file(filename):
736 import stat
737 st = os.stat(filename)
738 if stat.S_ISDIR(st[stat.ST_MODE]): raise IOError('not a file')
739 m = Utils.md5()
740 m.update(str(st.st_mtime))
741 m.update(str(st.st_size))
742 m.update(filename)
743 return m.digest()
744 Utils.h_file = h_file
745
746
747def SAMBA_SCRIPT(bld, name, pattern, installdir, installname=None):725def SAMBA_SCRIPT(bld, name, pattern, installdir, installname=None):
748 '''used to copy scripts from the source tree into the build directory726 '''used to copy scripts from the source tree into the build directory
749 for use by selftest'''727 for use by selftest'''
@@ -758,14 +736,14 @@ def SAMBA_SCRIPT(bld, name, pattern, installdir, installname=None):
758 target = os.path.join(installdir, iname)736 target = os.path.join(installdir, iname)
759 tgtdir = os.path.dirname(os.path.join(bld.srcnode.abspath(bld.env), '..', target))737 tgtdir = os.path.dirname(os.path.join(bld.srcnode.abspath(bld.env), '..', target))
760 mkdir_p(tgtdir)738 mkdir_p(tgtdir)
761 link_src = os.path.normpath(os.path.join(bld.curdir, s))739 link_src = os.path.normpath(os.path.join(bld.path.abspath(), s))
762 link_dst = os.path.join(tgtdir, os.path.basename(iname))740 link_dst = os.path.join(tgtdir, os.path.basename(iname))
763 if os.path.islink(link_dst) and os.readlink(link_dst) == link_src:741 if os.path.islink(link_dst) and os.readlink(link_dst) == link_src:
764 continue742 continue
765 if os.path.exists(link_dst):743 if os.path.islink(link_dst):
766 os.unlink(link_dst)744 os.unlink(link_dst)
767 Logs.info("symlink: %s -> %s/%s" % (s, installdir, iname))745 Logs.info("symlink: %s -> %s/%s" % (s, installdir, iname))
768 os.symlink(link_src, link_dst)746 symlink(link_src, link_dst)
769Build.BuildContext.SAMBA_SCRIPT = SAMBA_SCRIPT747Build.BuildContext.SAMBA_SCRIPT = SAMBA_SCRIPT
770748
771749
@@ -779,10 +757,10 @@ def copy_and_fix_python_path(task):
779 replacement="""sys.path.insert(0, "%s")757 replacement="""sys.path.insert(0, "%s")
780sys.path.insert(1, "%s")""" % (task.env["PYTHONARCHDIR"], task.env["PYTHONDIR"])758sys.path.insert(1, "%s")""" % (task.env["PYTHONARCHDIR"], task.env["PYTHONDIR"])
781759
782 if task.env["PYTHON"][0] == "/":760 if task.env["PYTHON"][0].startswith("/"):
783 replacement_shebang = "#!%s\n" % task.env["PYTHON"]761 replacement_shebang = "#!%s\n" % task.env["PYTHON"][0]
784 else:762 else:
785 replacement_shebang = "#!/usr/bin/env %s\n" % task.env["PYTHON"]763 replacement_shebang = "#!/usr/bin/env %s\n" % task.env["PYTHON"][0]
786764
787 installed_location=task.outputs[0].bldpath(task.env)765 installed_location=task.outputs[0].bldpath(task.env)
788 source_file = open(task.inputs[0].srcpath(task.env))766 source_file = open(task.inputs[0].srcpath(task.env))
@@ -790,7 +768,7 @@ sys.path.insert(1, "%s")""" % (task.env["PYTHONARCHDIR"], task.env["PYTHONDIR"])
790 lineno = 0768 lineno = 0
791 for line in source_file:769 for line in source_file:
792 newline = line770 newline = line
793 if (lineno == 0 and task.env["PYTHON_SPECIFIED"] is True and771 if (lineno == 0 and
794 line[:2] == "#!"):772 line[:2] == "#!"):
795 newline = replacement_shebang773 newline = replacement_shebang
796 elif pattern in line:774 elif pattern in line:
@@ -798,7 +776,7 @@ sys.path.insert(1, "%s")""" % (task.env["PYTHONARCHDIR"], task.env["PYTHONDIR"])
798 installed_file.write(newline)776 installed_file.write(newline)
799 lineno = lineno + 1777 lineno = lineno + 1
800 installed_file.close()778 installed_file.close()
801 os.chmod(installed_location, 0755)779 os.chmod(installed_location, 0o755)
802 return 0780 return 0
803781
804def copy_and_fix_perl_path(task):782def copy_and_fix_perl_path(task):
@@ -826,7 +804,7 @@ def copy_and_fix_perl_path(task):
826 installed_file.write(newline)804 installed_file.write(newline)
827 lineno = lineno + 1805 lineno = lineno + 1
828 installed_file.close()806 installed_file.close()
829 os.chmod(installed_location, 0755)807 os.chmod(installed_location, 0o755)
830 return 0808 return 0
831809
832810
@@ -834,6 +812,8 @@ def install_file(bld, destdir, file, chmod=MODE_644, flat=False,
834 python_fixup=False, perl_fixup=False,812 python_fixup=False, perl_fixup=False,
835 destname=None, base_name=None):813 destname=None, base_name=None):
836 '''install a file'''814 '''install a file'''
815 if not isinstance(file, str):
816 file = file.abspath()
837 destdir = bld.EXPAND_VARIABLES(destdir)817 destdir = bld.EXPAND_VARIABLES(destdir)
838 if not destname:818 if not destname:
839 destname = file819 destname = file
@@ -898,16 +878,19 @@ def INSTALL_DIR(bld, path, chmod=0o755, env=None):
898 if not path:878 if not path:
899 return []879 return []
900880
901 destpath = bld.get_install_path(path, env)881 destpath = bld.EXPAND_VARIABLES(path)
882 if Options.options.destdir:
883 destpath = os.path.join(Options.options.destdir, destpath.lstrip(os.sep))
902884
903 if bld.is_install > 0:885 if bld.is_install > 0:
904 if not os.path.isdir(destpath):886 if not os.path.isdir(destpath):
905 try:887 try:
888 Logs.info('* create %s', destpath)
906 os.makedirs(destpath)889 os.makedirs(destpath)
907 os.chmod(destpath, chmod)890 os.chmod(destpath, chmod)
908 except OSError as e:891 except OSError as e:
909 if not os.path.isdir(destpath):892 if not os.path.isdir(destpath):
910 raise Utils.WafError("Cannot create the folder '%s' (error: %s)" % (path, e))893 raise Errors.WafError("Cannot create the folder '%s' (error: %s)" % (path, e))
911Build.BuildContext.INSTALL_DIR = INSTALL_DIR894Build.BuildContext.INSTALL_DIR = INSTALL_DIR
912895
913def INSTALL_DIRS(bld, destdir, dirs, chmod=0o755, env=None):896def INSTALL_DIRS(bld, destdir, dirs, chmod=0o755, env=None):
@@ -938,7 +921,7 @@ def SAMBAMANPAGES(bld, manpages, extra_source=None):
938 '''build and install manual pages'''921 '''build and install manual pages'''
939 bld.env.SAMBA_EXPAND_XSL = bld.srcnode.abspath() + '/docs-xml/xslt/expand-sambadoc.xsl'922 bld.env.SAMBA_EXPAND_XSL = bld.srcnode.abspath() + '/docs-xml/xslt/expand-sambadoc.xsl'
940 bld.env.SAMBA_MAN_XSL = bld.srcnode.abspath() + '/docs-xml/xslt/man.xsl'923 bld.env.SAMBA_MAN_XSL = bld.srcnode.abspath() + '/docs-xml/xslt/man.xsl'
941 bld.env.SAMBA_CATALOG = bld.srcnode.abspath() + '/bin/default/docs-xml/build/catalog.xml'924 bld.env.SAMBA_CATALOG = bld.bldnode.abspath() + '/docs-xml/build/catalog.xml'
942 bld.env.SAMBA_CATALOGS = 'file:///etc/xml/catalog file:///usr/local/share/xml/catalog file://' + bld.env.SAMBA_CATALOG925 bld.env.SAMBA_CATALOGS = 'file:///etc/xml/catalog file:///usr/local/share/xml/catalog file://' + bld.env.SAMBA_CATALOG
943926
944 for m in manpages.split():927 for m in manpages.split():
@@ -958,59 +941,6 @@ def SAMBAMANPAGES(bld, manpages, extra_source=None):
958 bld.INSTALL_FILES('${MANDIR}/man%s' % m[-1], m, flat=True)941 bld.INSTALL_FILES('${MANDIR}/man%s' % m[-1], m, flat=True)
959Build.BuildContext.SAMBAMANPAGES = SAMBAMANPAGES942Build.BuildContext.SAMBAMANPAGES = SAMBAMANPAGES
960943
961#############################################################
962# give a nicer display when building different types of files
963def progress_display(self, msg, fname):
964 col1 = Logs.colors(self.color)
965 col2 = Logs.colors.NORMAL
966 total = self.position[1]
967 n = len(str(total))
968 fs = '[%%%dd/%%%dd] %s %%s%%s%%s\n' % (n, n, msg)
969 return fs % (self.position[0], self.position[1], col1, fname, col2)
970
971def link_display(self):
972 if Options.options.progress_bar != 0:
973 return Task.Task.old_display(self)
974 fname = self.outputs[0].bldpath(self.env)
975 return progress_display(self, 'Linking', fname)
976Task.TaskBase.classes['cc_link'].display = link_display
977
978def samba_display(self):
979 if Options.options.progress_bar != 0:
980 return Task.Task.old_display(self)
981
982 targets = LOCAL_CACHE(self, 'TARGET_TYPE')
983 if self.name in targets:
984 target_type = targets[self.name]
985 type_map = { 'GENERATOR' : 'Generating',
986 'PROTOTYPE' : 'Generating'
987 }
988 if target_type in type_map:
989 return progress_display(self, type_map[target_type], self.name)
990
991 if len(self.inputs) == 0:
992 return Task.Task.old_display(self)
993
994 fname = self.inputs[0].bldpath(self.env)
995 if fname[0:3] == '../':
996 fname = fname[3:]
997 ext_loc = fname.rfind('.')
998 if ext_loc == -1:
999 return Task.Task.old_display(self)
1000 ext = fname[ext_loc:]
1001
1002 ext_map = { '.idl' : 'Compiling IDL',
1003 '.et' : 'Compiling ERRTABLE',
1004 '.asn1': 'Compiling ASN1',
1005 '.c' : 'Compiling' }
1006 if ext in ext_map:
1007 return progress_display(self, ext_map[ext], fname)
1008 return Task.Task.old_display(self)
1009
1010Task.TaskBase.classes['Task'].old_display = Task.TaskBase.classes['Task'].display
1011Task.TaskBase.classes['Task'].display = samba_display
1012
1013
1014@after('apply_link')944@after('apply_link')
1015@feature('cshlib')945@feature('cshlib')
1016def apply_bundle_remove_dynamiclib_patch(self):946def apply_bundle_remove_dynamiclib_patch(self):
diff --git a/buildtools/wafsamba/wscript b/buildtools/wafsamba/wscript
index 0eef330..ab19859 100644
--- a/buildtools/wafsamba/wscript
+++ b/buildtools/wafsamba/wscript
@@ -3,7 +3,8 @@
3# this is a base set of waf rules that everything else pulls in first3# this is a base set of waf rules that everything else pulls in first
44
5import os, sys5import os, sys
6import wafsamba, Configure, Logs, Options, Utils6from waflib import Configure, Logs, Options, Utils, Context, Errors
7import wafsamba
7from samba_utils import os_path_relpath8from samba_utils import os_path_relpath
8from optparse import SUPPRESS_HELP9from optparse import SUPPRESS_HELP
910
@@ -14,12 +15,17 @@ from optparse import SUPPRESS_HELP
14# are resolved related to WAFCACHE. It will need a lot of testing15# are resolved related to WAFCACHE. It will need a lot of testing
15# before it is enabled by default.16# before it is enabled by default.
16if '--enable-auto-reconfigure' in sys.argv:17if '--enable-auto-reconfigure' in sys.argv:
17 Configure.autoconfig = True18 Configure.autoconfig = 'clobber'
1819
19def set_options(opt):20def default_value(option, default=''):
20 opt.tool_options('compiler_cc')21 if option in Options.options.__dict__:
22 return Options.options.__dict__[option]
23 return default
2124
22 opt.tool_options('gnu_dirs')25def options(opt):
26 opt.load('compiler_cc')
27
28 opt.load('gnu_dirs')
2329
24 gr = opt.option_group('library handling options')30 gr = opt.option_group('library handling options')
2531
@@ -31,17 +37,17 @@ def set_options(opt):
31 help=("comma separated list of normally public libraries to build instead as private libraries. May include !LIBNAME to disable making a library private. Can be 'NONE' or 'ALL' [auto]"),37 help=("comma separated list of normally public libraries to build instead as private libraries. May include !LIBNAME to disable making a library private. Can be 'NONE' or 'ALL' [auto]"),
32 action="store", dest='PRIVATE_LIBS', default='')38 action="store", dest='PRIVATE_LIBS', default='')
3339
34 extension_default = Options.options['PRIVATE_EXTENSION_DEFAULT']40 extension_default = default_value('PRIVATE_EXTENSION_DEFAULT')
35 gr.add_option('--private-library-extension',41 gr.add_option('--private-library-extension',
36 help=("name extension for private libraries [%s]" % extension_default),42 help=("name extension for private libraries [%s]" % extension_default),
37 action="store", dest='PRIVATE_EXTENSION', default=extension_default)43 action="store", dest='PRIVATE_EXTENSION', default=extension_default)
3844
39 extension_exception = Options.options['PRIVATE_EXTENSION_EXCEPTION']45 extension_exception = default_value('PRIVATE_EXTENSION_EXCEPTION')
40 gr.add_option('--private-extension-exception',46 gr.add_option('--private-extension-exception',
41 help=("comma separated list of libraries to not apply extension to [%s]" % extension_exception),47 help=("comma separated list of libraries to not apply extension to [%s]" % extension_exception),
42 action="store", dest='PRIVATE_EXTENSION_EXCEPTION', default=extension_exception)48 action="store", dest='PRIVATE_EXTENSION_EXCEPTION', default=extension_exception)
4349
44 builtin_default = Options.options['BUILTIN_LIBRARIES_DEFAULT']50 builtin_default = default_value('BUILTIN_LIBRARIES_DEFAULT')
45 gr.add_option('--builtin-libraries',51 gr.add_option('--builtin-libraries',
46 help=("command separated list of libraries to build directly into binaries [%s]" % builtin_default),52 help=("command separated list of libraries to build directly into binaries [%s]" % builtin_default),
47 action="store", dest='BUILTIN_LIBRARIES', default=builtin_default)53 action="store", dest='BUILTIN_LIBRARIES', default=builtin_default)
@@ -71,7 +77,7 @@ def set_options(opt):
71 action="store", dest='MODULESDIR', default='${PREFIX}/modules')77 action="store", dest='MODULESDIR', default='${PREFIX}/modules')
7278
73 opt.add_option('--with-privatelibdir',79 opt.add_option('--with-privatelibdir',
74 help=("private library directory [PREFIX/lib/%s]" % Utils.g_module.APPNAME),80 help=("private library directory [PREFIX/lib/%s]" % Context.g_module.APPNAME),
75 action="store", dest='PRIVATELIBDIR', default=None)81 action="store", dest='PRIVATELIBDIR', default=None)
7682
77 opt.add_option('--with-libiconv',83 opt.add_option('--with-libiconv',
@@ -109,9 +115,6 @@ def set_options(opt):
109 gr.add_option('--enable-gccdeps',115 gr.add_option('--enable-gccdeps',
110 help=("Enable use of gcc -MD dependency module"),116 help=("Enable use of gcc -MD dependency module"),
111 action="store_true", dest='enable_gccdeps', default=True)117 action="store_true", dest='enable_gccdeps', default=True)
112 gr.add_option('--timestamp-dependencies',
113 help=("use file timestamps instead of content for build dependencies (BROKEN)"),
114 action="store_true", dest='timestamp_dependencies', default=False)
115 gr.add_option('--pedantic',118 gr.add_option('--pedantic',
116 help=("Enable even more compiler warnings"),119 help=("Enable even more compiler warnings"),
117 action='store_true', dest='pedantic', default=False)120 action='store_true', dest='pedantic', default=False)
@@ -210,23 +213,19 @@ def set_options(opt):
210@Utils.run_once213@Utils.run_once
211def configure(conf):214def configure(conf):
212 conf.env.hlist = []215 conf.env.hlist = []
213 conf.env.srcdir = conf.srcdir216 conf.env.srcdir = conf.srcnode.abspath()
214217
215 conf.define('SRCDIR', conf.env['srcdir'])218 conf.define('SRCDIR', conf.env['srcdir'])
216219
217 if Options.options.timestamp_dependencies:
218 conf.ENABLE_TIMESTAMP_DEPENDENCIES()
219
220 conf.SETUP_CONFIGURE_CACHE(Options.options.enable_configure_cache)220 conf.SETUP_CONFIGURE_CACHE(Options.options.enable_configure_cache)
221221
222 # load our local waf extensions222 # load our local waf extensions
223 conf.check_tool('gnu_dirs')223 conf.load('gnu_dirs')
224 conf.check_tool('wafsamba')224 conf.load('wafsamba')
225 conf.check_tool('print_commands')
226225
227 conf.CHECK_CC_ENV()226 conf.CHECK_CC_ENV()
228227
229 conf.check_tool('compiler_cc')228 conf.load('compiler_c')
230229
231 conf.CHECK_STANDARD_LIBPATH()230 conf.CHECK_STANDARD_LIBPATH()
232231
@@ -236,31 +235,10 @@ def configure(conf):
236 # older gcc versions (< 4.4) does not work with gccdeps, so we have to see if the .d file is generated235 # older gcc versions (< 4.4) does not work with gccdeps, so we have to see if the .d file is generated
237 if Options.options.enable_gccdeps:236 if Options.options.enable_gccdeps:
238 # stale file removal - the configuration may pick up the old .pyc file237 # stale file removal - the configuration may pick up the old .pyc file
239 p = os.path.join(conf.srcdir, 'buildtools/wafsamba/gccdeps.pyc')238 p = os.path.join(conf.env.srcdir, 'buildtools/wafsamba/gccdeps.pyc')
240 if os.path.exists(p):239 if os.path.exists(p):
241 os.remove(p)240 os.remove(p)
242241 conf.load('gccdeps')
243 from TaskGen import feature, after
244 @feature('testd')
245 @after('apply_core')
246 def check_d(self):
247 tsk = self.compiled_tasks[0]
248 tsk.outputs.append(tsk.outputs[0].change_ext('.d'))
249
250 import Task
251 cc = Task.TaskBase.classes['cc']
252 oldmeth = cc.run
253
254 cc.run = Task.compile_fun_noshell('cc', '${CC} ${CCFLAGS} ${CPPFLAGS} ${_CCINCFLAGS} ${_CCDEFFLAGS} ${CC_SRC_F}${SRC} ${CC_TGT_F}${TGT[0].abspath(env)}')[0]
255 try:
256 try:
257 conf.check(features='c testd', fragment='int main() {return 0;}\n', ccflags=['-MD'], mandatory=True, msg='Check for -MD')
258 except:
259 pass
260 else:
261 conf.check_tool('gccdeps', tooldir=conf.srcdir + "/buildtools/wafsamba")
262 finally:
263 cc.run = oldmeth
264242
265 # make the install paths available in environment243 # make the install paths available in environment
266 conf.env.LIBDIR = Options.options.LIBDIR or '${PREFIX}/lib'244 conf.env.LIBDIR = Options.options.LIBDIR or '${PREFIX}/lib'
@@ -330,15 +308,16 @@ def configure(conf):
330 "-qhalt=w", # IBM xlc308 "-qhalt=w", # IBM xlc
331 "-w2", # Tru64309 "-w2", # Tru64
332 ]:310 ]:
333 if conf.CHECK_CFLAGS([f], '''311 if conf.CHECK_CFLAGS([f]):
334'''):
335 if not 'WERROR_CFLAGS' in conf.env:312 if not 'WERROR_CFLAGS' in conf.env:
336 conf.env['WERROR_CFLAGS'] = []313 conf.env['WERROR_CFLAGS'] = []
337 conf.env['WERROR_CFLAGS'].extend([f])314 conf.env['WERROR_CFLAGS'].extend([f])
338 break315 break
339316
340 # check which compiler/linker flags are needed for rpath support317 # check which compiler/linker flags are needed for rpath support
341 if not conf.CHECK_LDFLAGS(['-Wl,-rpath,.']) and conf.CHECK_LDFLAGS(['-Wl,-R,.']):318 if conf.CHECK_LDFLAGS(['-Wl,-rpath,.']):
319 conf.env['RPATH_ST'] = '-Wl,-rpath,%s'
320 elif conf.CHECK_LDFLAGS(['-Wl,-R,.']):
342 conf.env['RPATH_ST'] = '-Wl,-R,%s'321 conf.env['RPATH_ST'] = '-Wl,-R,%s'
343322
344 # check for rpath323 # check for rpath
@@ -348,7 +327,7 @@ def configure(conf):
348 conf.env.RPATH_ON_INSTALL = (conf.env.RPATH_ON_BUILD and327 conf.env.RPATH_ON_INSTALL = (conf.env.RPATH_ON_BUILD and
349 not Options.options.disable_rpath_install)328 not Options.options.disable_rpath_install)
350 if not conf.env.PRIVATELIBDIR:329 if not conf.env.PRIVATELIBDIR:
351 conf.env.PRIVATELIBDIR = '%s/%s' % (conf.env.LIBDIR, Utils.g_module.APPNAME)330 conf.env.PRIVATELIBDIR = '%s/%s' % (conf.env.LIBDIR, Context.g_module.APPNAME)
352 conf.env.RPATH_ON_INSTALL_PRIVATE = (331 conf.env.RPATH_ON_INSTALL_PRIVATE = (
353 not Options.options.disable_rpath_private_install)332 not Options.options.disable_rpath_private_install)
354 else:333 else:
@@ -370,10 +349,10 @@ def configure(conf):
370 else:349 else:
371 conf.env.HAVE_LD_VERSION_SCRIPT = False350 conf.env.HAVE_LD_VERSION_SCRIPT = False
372351
373 if conf.CHECK_CFLAGS(['-fvisibility=hidden'] + conf.env.WERROR_CFLAGS):352 if conf.CHECK_CFLAGS(['-fvisibility=hidden']):
374 conf.env.VISIBILITY_CFLAGS = '-fvisibility=hidden'353 conf.env.VISIBILITY_CFLAGS = '-fvisibility=hidden'
375 conf.CHECK_CODE('''int main(void) { return 0; }354 conf.CHECK_CODE('''int main(void) { return 0; }
376 __attribute__((visibility("default"))) void vis_foo2(void) {}''',355 __attribute__((visibility("default"))) void vis_foo2(void) {}\n''',
377 cflags=conf.env.VISIBILITY_CFLAGS,356 cflags=conf.env.VISIBILITY_CFLAGS,
378 strict=True,357 strict=True,
379 define='HAVE_VISIBILITY_ATTR', addmain=False)358 define='HAVE_VISIBILITY_ATTR', addmain=False)
@@ -466,6 +445,15 @@ def configure(conf):
466 conf.DEFINE('_GNU_SOURCE', 1, add_to_cflags=True)445 conf.DEFINE('_GNU_SOURCE', 1, add_to_cflags=True)
467 conf.DEFINE('_XOPEN_SOURCE_EXTENDED', 1, add_to_cflags=True)446 conf.DEFINE('_XOPEN_SOURCE_EXTENDED', 1, add_to_cflags=True)
468447
448 #
449 # Needs to be defined before std*.h and string*.h are included
450 # As Python.h already brings string.h we need it in CFLAGS.
451 # See memset_s() details here:
452 # https://en.cppreference.com/w/c/string/byte/memset
453 #
454 if conf.CHECK_CFLAGS(['-D__STDC_WANT_LIB_EXT1__=1']):
455 conf.ADD_CFLAGS('-D__STDC_WANT_LIB_EXT1__=1')
456
469 # on Tru64 certain features are only available with _OSF_SOURCE set to 1457 # on Tru64 certain features are only available with _OSF_SOURCE set to 1
470 # and _XOPEN_SOURCE set to 600458 # and _XOPEN_SOURCE set to 600
471 if conf.env['SYSTEM_UNAME_SYSNAME'] == 'OSF1':459 if conf.env['SYSTEM_UNAME_SYSNAME'] == 'OSF1':
@@ -501,7 +489,7 @@ struct foo bar = { .y = 'X', .x = 1 };
501489
502 # see if we need special largefile flags490 # see if we need special largefile flags
503 if not conf.CHECK_LARGEFILE():491 if not conf.CHECK_LARGEFILE():
504 raise Utils.WafError('Samba requires large file support support, but not available on this platform: sizeof(off_t) < 8')492 raise Errors.WafError('Samba requires large file support support, but not available on this platform: sizeof(off_t) < 8')
505493
506 if conf.env.HAVE_STDDEF_H and conf.env.HAVE_STDLIB_H:494 if conf.env.HAVE_STDDEF_H and conf.env.HAVE_STDLIB_H:
507 conf.DEFINE('STDC_HEADERS', 1)495 conf.DEFINE('STDC_HEADERS', 1)
@@ -512,7 +500,7 @@ struct foo bar = { .y = 'X', .x = 1 };
512 conf.DEFINE('TIME_WITH_SYS_TIME', 1)500 conf.DEFINE('TIME_WITH_SYS_TIME', 1)
513501
514 # cope with different extensions for libraries502 # cope with different extensions for libraries
515 (root, ext) = os.path.splitext(conf.env.shlib_PATTERN)503 (root, ext) = os.path.splitext(conf.env.cshlib_PATTERN)
516 if ext[0] == '.':504 if ext[0] == '.':
517 conf.define('SHLIBEXT', ext[1:], quote=True)505 conf.define('SHLIBEXT', ext[1:], quote=True)
518 else:506 else:
@@ -534,7 +522,7 @@ struct foo bar = { .y = 'X', .x = 1 };
534 #if !defined(LITTLE) || !defined(B) || LITTLE != B522 #if !defined(LITTLE) || !defined(B) || LITTLE != B
535 #error Not little endian.523 #error Not little endian.
536 #endif524 #endif
537 int main(void) { return 0; }""",525 int main(void) { return 0; }\n""",
538 addmain=False,526 addmain=False,
539 headers="endian.h sys/endian.h",527 headers="endian.h sys/endian.h",
540 define="HAVE_LITTLE_ENDIAN")528 define="HAVE_LITTLE_ENDIAN")
@@ -553,7 +541,7 @@ struct foo bar = { .y = 'X', .x = 1 };
553 #if !defined(BIG) || !defined(B) || BIG != B541 #if !defined(BIG) || !defined(B) || BIG != B
554 #error Not big endian.542 #error Not big endian.
555 #endif543 #endif
556 int main(void) { return 0; }""",544 int main(void) { return 0; }\n""",
557 addmain=False,545 addmain=False,
558 headers="endian.h sys/endian.h",546 headers="endian.h sys/endian.h",
559 define="HAVE_BIG_ENDIAN")547 define="HAVE_BIG_ENDIAN")
@@ -576,7 +564,7 @@ struct foo bar = { .y = 'X', .x = 1 };
576 # Extra sanity check.564 # Extra sanity check.
577 if conf.CONFIG_SET("HAVE_BIG_ENDIAN") == conf.CONFIG_SET("HAVE_LITTLE_ENDIAN"):565 if conf.CONFIG_SET("HAVE_BIG_ENDIAN") == conf.CONFIG_SET("HAVE_LITTLE_ENDIAN"):
578 Logs.error("Failed endian determination. The PDP-11 is back?")566 Logs.error("Failed endian determination. The PDP-11 is back?")
579 sys.exit(1)567 sys.exit(1)
580 else:568 else:
581 if conf.CONFIG_SET("HAVE_BIG_ENDIAN"):569 if conf.CONFIG_SET("HAVE_BIG_ENDIAN"):
582 conf.DEFINE('WORDS_BIGENDIAN', 1)570 conf.DEFINE('WORDS_BIGENDIAN', 1)
@@ -607,12 +595,13 @@ struct foo bar = { .y = 'X', .x = 1 };
607595
608def build(bld):596def build(bld):
609 # give a more useful message if the source directory has moved597 # give a more useful message if the source directory has moved
610 relpath = os_path_relpath(bld.curdir, bld.srcnode.abspath())598 curdir = bld.path.abspath()
599 srcdir = bld.srcnode.abspath()
600 relpath = os_path_relpath(curdir, srcdir)
611 if relpath.find('../') != -1:601 if relpath.find('../') != -1:
612 Logs.error('bld.curdir %s is not a child of %s' % (bld.curdir, bld.srcnode.abspath()))602 Logs.error('bld.path %s is not a child of %s' % (curdir, srcdir))
613 raise Utils.WafError('''The top source directory has moved. Please run distclean and reconfigure''')603 raise Errors.WafError('''The top source directory has moved. Please run distclean and reconfigure''')
614604
615 bld.CHECK_MAKEFLAGS()
616 bld.SETUP_BUILD_GROUPS()605 bld.SETUP_BUILD_GROUPS()
617 bld.ENFORCE_GROUP_ORDERING()606 bld.ENFORCE_GROUP_ORDERING()
618 bld.CHECK_PROJECT_RULES()607 bld.CHECK_PROJECT_RULES()
diff --git a/common/dump.c b/common/dump.c
index 73286b8..adcf591 100644
--- a/common/dump.c
+++ b/common/dump.c
@@ -60,6 +60,7 @@ static tdb_off_t tdb_dump_record(struct tdb_context *tdb, int hash,
6060
61static int tdb_dump_chain(struct tdb_context *tdb, int i)61static int tdb_dump_chain(struct tdb_context *tdb, int i)
62{62{
63 struct tdb_chainwalk_ctx chainwalk;
63 tdb_off_t rec_ptr, top;64 tdb_off_t rec_ptr, top;
6465
65 if (i == -1) {66 if (i == -1) {
@@ -74,11 +75,19 @@ static int tdb_dump_chain(struct tdb_context *tdb, int i)
74 if (tdb_ofs_read(tdb, top, &rec_ptr) == -1)75 if (tdb_ofs_read(tdb, top, &rec_ptr) == -1)
75 return tdb_unlock(tdb, i, F_WRLCK);76 return tdb_unlock(tdb, i, F_WRLCK);
7677
78 tdb_chainwalk_init(&chainwalk, rec_ptr);
79
77 if (rec_ptr)80 if (rec_ptr)
78 printf("hash=%d\n", i);81 printf("hash=%d\n", i);
7982
80 while (rec_ptr) {83 while (rec_ptr) {
84 bool ok;
81 rec_ptr = tdb_dump_record(tdb, i, rec_ptr);85 rec_ptr = tdb_dump_record(tdb, i, rec_ptr);
86 ok = tdb_chainwalk_check(tdb, &chainwalk, rec_ptr);
87 if (!ok) {
88 printf("circular hash chain %d\n", i);
89 break;
90 }
82 }91 }
8392
84 return tdb_unlock(tdb, i, F_WRLCK);93 return tdb_unlock(tdb, i, F_WRLCK);
@@ -86,7 +95,7 @@ static int tdb_dump_chain(struct tdb_context *tdb, int i)
8695
87_PUBLIC_ void tdb_dump_all(struct tdb_context *tdb)96_PUBLIC_ void tdb_dump_all(struct tdb_context *tdb)
88{97{
89 int i;98 uint32_t i;
90 for (i=0;i<tdb->hash_size;i++) {99 for (i=0;i<tdb->hash_size;i++) {
91 tdb_dump_chain(tdb, i);100 tdb_dump_chain(tdb, i);
92 }101 }
diff --git a/common/freelist.c b/common/freelist.c
index 86fac2f..37a4c16 100644
--- a/common/freelist.c
+++ b/common/freelist.c
@@ -27,12 +27,6 @@
2727
28#include "tdb_private.h"28#include "tdb_private.h"
2929
30/* 'right' merges can involve O(n^2) cost when combined with a
31 traverse, so they are disabled until we find a way to do them in
32 O(1) time
33*/
34#define USE_RIGHT_MERGES 0
35
36/* read a freelist record and check for simple errors */30/* read a freelist record and check for simple errors */
37int tdb_rec_free_read(struct tdb_context *tdb, tdb_off_t off, struct tdb_record *rec)31int tdb_rec_free_read(struct tdb_context *tdb, tdb_off_t off, struct tdb_record *rec)
38{32{
@@ -61,30 +55,6 @@ int tdb_rec_free_read(struct tdb_context *tdb, tdb_off_t off, struct tdb_record
61 return 0;55 return 0;
62}56}
6357
64
65#if USE_RIGHT_MERGES
66/* Remove an element from the freelist. Must have alloc lock. */
67static int remove_from_freelist(struct tdb_context *tdb, tdb_off_t off, tdb_off_t next)
68{
69 tdb_off_t last_ptr, i;
70
71 /* read in the freelist top */
72 last_ptr = FREELIST_TOP;
73 while (tdb_ofs_read(tdb, last_ptr, &i) != -1 && i != 0) {
74 if (i == off) {
75 /* We've found it! */
76 return tdb_ofs_write(tdb, last_ptr, &next);
77 }
78 /* Follow chain (next offset is at start of record) */
79 last_ptr = i;
80 }
81 tdb->ecode = TDB_ERR_CORRUPT;
82 TDB_LOG((tdb, TDB_DEBUG_FATAL,"remove_from_freelist: not on list at off=%u\n", off));
83 return -1;
84}
85#endif
86
87
88/* update a record tailer (must hold allocation lock) */58/* update a record tailer (must hold allocation lock) */
89static int update_tailer(struct tdb_context *tdb, tdb_off_t offset,59static int update_tailer(struct tdb_context *tdb, tdb_off_t offset,
90 const struct tdb_record *rec)60 const struct tdb_record *rec)
@@ -199,7 +169,7 @@ static int merge_with_left_record(struct tdb_context *tdb,
199 * 0 if left was not a free record169 * 0 if left was not a free record
200 * 1 if left was free and successfully merged.170 * 1 if left was free and successfully merged.
201 *171 *
202 * The currend record is handed in with pointer and fully read record.172 * The current record is handed in with pointer and fully read record.
203 *173 *
204 * The left record pointer and struct can be retrieved as result174 * The left record pointer and struct can be retrieved as result
205 * in lp and lr;175 * in lp and lr;
@@ -318,33 +288,6 @@ int tdb_free(struct tdb_context *tdb, tdb_off_t offset, struct tdb_record *rec)
318 goto fail;288 goto fail;
319 }289 }
320290
321#if USE_RIGHT_MERGES
322 /* Look right first (I'm an Australian, dammit) */
323 if (offset + sizeof(*rec) + rec->rec_len + sizeof(*rec) <= tdb->map_size) {
324 tdb_off_t right = offset + sizeof(*rec) + rec->rec_len;
325 struct tdb_record r;
326
327 if (tdb->methods->tdb_read(tdb, right, &r, sizeof(r), DOCONV()) == -1) {
328 TDB_LOG((tdb, TDB_DEBUG_FATAL, "tdb_free: right read failed at %u\n", right));
329 goto left;
330 }
331
332 /* If it's free, expand to include it. */
333 if (r.magic == TDB_FREE_MAGIC) {
334 if (remove_from_freelist(tdb, right, r.next) == -1) {
335 TDB_LOG((tdb, TDB_DEBUG_FATAL, "tdb_free: right free failed at %u\n", right));
336 goto left;
337 }
338 rec->rec_len += sizeof(r) + r.rec_len;
339 if (update_tailer(tdb, offset, rec) == -1) {
340 TDB_LOG((tdb, TDB_DEBUG_FATAL, "tdb_free: update_tailer failed at %u\n", offset));
341 goto fail;
342 }
343 }
344 }
345left:
346#endif
347
348 ret = check_merge_with_left_record(tdb, offset, rec, NULL, NULL);291 ret = check_merge_with_left_record(tdb, offset, rec, NULL, NULL);
349 if (ret == -1) {292 if (ret == -1) {
350 goto fail;293 goto fail;
@@ -444,6 +387,8 @@ static tdb_off_t tdb_allocate_from_freelist(
444 struct tdb_context *tdb, tdb_len_t length, struct tdb_record *rec)387 struct tdb_context *tdb, tdb_len_t length, struct tdb_record *rec)
445{388{
446 tdb_off_t rec_ptr, last_ptr, newrec_ptr;389 tdb_off_t rec_ptr, last_ptr, newrec_ptr;
390 struct tdb_chainwalk_ctx chainwalk;
391 bool modified;
447 struct {392 struct {
448 tdb_off_t rec_ptr, last_ptr;393 tdb_off_t rec_ptr, last_ptr;
449 tdb_len_t rec_len;394 tdb_len_t rec_len;
@@ -466,6 +411,9 @@ static tdb_off_t tdb_allocate_from_freelist(
466 if (tdb_ofs_read(tdb, FREELIST_TOP, &rec_ptr) == -1)411 if (tdb_ofs_read(tdb, FREELIST_TOP, &rec_ptr) == -1)
467 return 0;412 return 0;
468413
414 modified = false;
415 tdb_chainwalk_init(&chainwalk, rec_ptr);
416
469 bestfit.rec_ptr = 0;417 bestfit.rec_ptr = 0;
470 bestfit.last_ptr = 0;418 bestfit.last_ptr = 0;
471 bestfit.rec_len = 0;419 bestfit.rec_len = 0;
@@ -526,6 +474,8 @@ static tdb_off_t tdb_allocate_from_freelist(
526 merge_created_candidate = true;474 merge_created_candidate = true;
527 }475 }
528476
477 modified = true;
478
529 continue;479 continue;
530 }480 }
531481
@@ -542,6 +492,14 @@ static tdb_off_t tdb_allocate_from_freelist(
542 last_ptr = rec_ptr;492 last_ptr = rec_ptr;
543 rec_ptr = rec->next;493 rec_ptr = rec->next;
544494
495 if (!modified) {
496 bool ok;
497 ok = tdb_chainwalk_check(tdb, &chainwalk, rec_ptr);
498 if (!ok) {
499 return 0;
500 }
501 }
502
545 /* if we've found a record that is big enough, then503 /* if we've found a record that is big enough, then
546 stop searching if its also not too big. The504 stop searching if its also not too big. The
547 definition of 'too big' changes as we scan505 definition of 'too big' changes as we scan
@@ -597,6 +555,17 @@ static bool tdb_alloc_dead(
597 return (tdb_ofs_write(tdb, last_ptr, &rec->next) == 0);555 return (tdb_ofs_write(tdb, last_ptr, &rec->next) == 0);
598}556}
599557
558static void tdb_purge_dead(struct tdb_context *tdb, uint32_t hash)
559{
560 int max_dead_records = tdb->max_dead_records;
561
562 tdb->max_dead_records = 0;
563
564 tdb_trim_dead(tdb, hash);
565
566 tdb->max_dead_records = max_dead_records;
567}
568
600/*569/*
601 * Chain "hash" is assumed to be locked570 * Chain "hash" is assumed to be locked
602 */571 */
@@ -605,7 +574,7 @@ tdb_off_t tdb_allocate(struct tdb_context *tdb, int hash, tdb_len_t length,
605 struct tdb_record *rec)574 struct tdb_record *rec)
606{575{
607 tdb_off_t ret;576 tdb_off_t ret;
608 int i;577 uint32_t i;
609578
610 if (tdb->max_dead_records == 0) {579 if (tdb->max_dead_records == 0) {
611 /*580 /*
@@ -661,6 +630,12 @@ blocking_freelist_allocate:
661 if (tdb_lock(tdb, -1, F_WRLCK) == -1) {630 if (tdb_lock(tdb, -1, F_WRLCK) == -1) {
662 return 0;631 return 0;
663 }632 }
633 /*
634 * Dead records can happen even if max_dead_records==0, they
635 * are older than the max_dead_records concept: They happen if
636 * tdb_delete happens concurrently with a traverse.
637 */
638 tdb_purge_dead(tdb, hash);
664 ret = tdb_allocate_from_freelist(tdb, length, rec);639 ret = tdb_allocate_from_freelist(tdb, length, rec);
665 tdb_unlock(tdb, -1, F_WRLCK);640 tdb_unlock(tdb, -1, F_WRLCK);
666 return ret;641 return ret;
diff --git a/common/io.c b/common/io.c
index 94b3163..df46017 100644
--- a/common/io.c
+++ b/common/io.c
@@ -96,7 +96,7 @@ static int tdb_ftruncate(struct tdb_context *tdb, off_t length)
96 return ret;96 return ret;
97}97}
9898
99#if HAVE_POSIX_FALLOCATE99#ifdef HAVE_POSIX_FALLOCATE
100static int tdb_posix_fallocate(struct tdb_context *tdb, off_t offset,100static int tdb_posix_fallocate(struct tdb_context *tdb, off_t offset,
101 off_t len)101 off_t len)
102{102{
@@ -413,7 +413,7 @@ static int tdb_expand_file(struct tdb_context *tdb, tdb_off_t size, tdb_off_t ad
413 return -1;413 return -1;
414 }414 }
415415
416#if HAVE_POSIX_FALLOCATE416#ifdef HAVE_POSIX_FALLOCATE
417 ret = tdb_posix_fallocate(tdb, size, addition);417 ret = tdb_posix_fallocate(tdb, size, addition);
418 if (ret == 0) {418 if (ret == 0) {
419 return 0;419 return 0;
diff --git a/common/lock.c b/common/lock.c
index 9f30c7a..f55184d 100644
--- a/common/lock.c
+++ b/common/lock.c
@@ -149,8 +149,8 @@ static int fcntl_unlock(struct tdb_context *tdb, int rw, off_t off, off_t len)
149 * This is the memory layout of the hashchain array:149 * This is the memory layout of the hashchain array:
150 *150 *
151 * FREELIST_TOP + 0 = freelist151 * FREELIST_TOP + 0 = freelist
152 * FREELIST_TOP + 4 = hashtbale list 0152 * FREELIST_TOP + 4 = hashtable list 0
153 * FREELIST_TOP + 8 = hashtbale list 1153 * FREELIST_TOP + 8 = hashtable list 1
154 * ...154 * ...
155 *155 *
156 * Otoh lock_offset computes:156 * Otoh lock_offset computes:
diff --git a/common/open.c b/common/open.c
index 8baa7e4..dd5783e 100644
--- a/common/open.c
+++ b/common/open.c
@@ -230,8 +230,6 @@ static bool check_header_hash(struct tdb_context *tdb,
230static bool tdb_mutex_open_ok(struct tdb_context *tdb,230static bool tdb_mutex_open_ok(struct tdb_context *tdb,
231 const struct tdb_header *header)231 const struct tdb_header *header)
232{232{
233 int locked;
234
235 if (tdb->flags & TDB_NOLOCK) {233 if (tdb->flags & TDB_NOLOCK) {
236 /*234 /*
237 * We don't look at locks, so it does not matter to have a235 * We don't look at locks, so it does not matter to have a
@@ -240,37 +238,6 @@ static bool tdb_mutex_open_ok(struct tdb_context *tdb,
240 return true;238 return true;
241 }239 }
242240
243 locked = tdb_nest_lock(tdb, ACTIVE_LOCK, F_WRLCK,
244 TDB_LOCK_NOWAIT|TDB_LOCK_PROBE);
245
246 if ((locked == -1) && (tdb->ecode == TDB_ERR_LOCK)) {
247 /*
248 * CLEAR_IF_FIRST still active. The tdb was created on this
249 * host, so we can assume the mutex implementation is
250 * compatible. Important for tools like tdbdump on a still
251 * open locking.tdb.
252 */
253 goto check_local_settings;
254 }
255
256 /*
257 * We got the CLEAR_IF_FIRST lock. That means the database was
258 * potentially copied from somewhere else. The mutex implementation
259 * might be incompatible.
260 */
261
262 if (tdb_nest_unlock(tdb, ACTIVE_LOCK, F_WRLCK, false) == -1) {
263 /*
264 * Should not happen
265 */
266 TDB_LOG((tdb, TDB_DEBUG_ERROR, "tdb_mutex_open_ok: "
267 "failed to release ACTIVE_LOCK on %s: %s\n",
268 tdb->name, strerror(errno)));
269 return false;
270 }
271
272check_local_settings:
273
274 if (!(tdb->flags & TDB_MUTEX_LOCKING)) {241 if (!(tdb->flags & TDB_MUTEX_LOCKING)) {
275 TDB_LOG((tdb, TDB_DEBUG_ERROR, "tdb_mutex_open_ok[%s]: "242 TDB_LOG((tdb, TDB_DEBUG_ERROR, "tdb_mutex_open_ok[%s]: "
276 "Can use mutexes only with "243 "Can use mutexes only with "
@@ -281,10 +248,10 @@ check_local_settings:
281248
282 if (tdb_mutex_size(tdb) != header->mutex_size) {249 if (tdb_mutex_size(tdb) != header->mutex_size) {
283 TDB_LOG((tdb, TDB_DEBUG_ERROR, "tdb_mutex_open_ok[%s]: "250 TDB_LOG((tdb, TDB_DEBUG_ERROR, "tdb_mutex_open_ok[%s]: "
284 "Mutex size changed from %u to %u\n.",251 "Mutex size changed from %"PRIu32" to %zu\n.",
285 tdb->name,252 tdb->name,
286 (unsigned int)header->mutex_size,253 header->mutex_size,
287 (unsigned int)tdb_mutex_size(tdb)));254 tdb_mutex_size(tdb)));
288 return false;255 return false;
289 }256 }
290257
@@ -297,7 +264,9 @@ _PUBLIC_ struct tdb_context *tdb_open_ex(const char *name, int hash_size, int td
297 tdb_hash_func hash_fn)264 tdb_hash_func hash_fn)
298{265{
299 int orig_errno = errno;266 int orig_errno = errno;
300 struct tdb_header header;267 struct tdb_header header = {
268 .version = 0,
269 };
301 struct tdb_context *tdb;270 struct tdb_context *tdb;
302 struct stat st;271 struct stat st;
303 int rev = 0;272 int rev = 0;
@@ -309,8 +278,6 @@ _PUBLIC_ struct tdb_context *tdb_open_ex(const char *name, int hash_size, int td
309 uint32_t magic1, magic2;278 uint32_t magic1, magic2;
310 int ret;279 int ret;
311280
312 ZERO_STRUCT(header);
313
314 if (!(tdb = (struct tdb_context *)calloc(1, sizeof *tdb))) {281 if (!(tdb = (struct tdb_context *)calloc(1, sizeof *tdb))) {
315 /* Can't log this */282 /* Can't log this */
316 errno = ENOMEM;283 errno = ENOMEM;
@@ -418,14 +385,6 @@ _PUBLIC_ struct tdb_context *tdb_open_ex(const char *name, int hash_size, int td
418 * the runtime check for existing tdb's comes later.385 * the runtime check for existing tdb's comes later.
419 */386 */
420387
421 if (!(tdb->flags & TDB_CLEAR_IF_FIRST)) {
422 TDB_LOG((tdb, TDB_DEBUG_ERROR, "tdb_open_ex: "
423 "invalid flags for %s - TDB_MUTEX_LOCKING "
424 "requires TDB_CLEAR_IF_FIRST\n", name));
425 errno = EINVAL;
426 goto fail;
427 }
428
429 if (tdb->flags & TDB_INTERNAL) {388 if (tdb->flags & TDB_INTERNAL) {
430 TDB_LOG((tdb, TDB_DEBUG_ERROR, "tdb_open_ex: "389 TDB_LOG((tdb, TDB_DEBUG_ERROR, "tdb_open_ex: "
431 "invalid flags for %s - TDB_MUTEX_LOCKING and "390 "invalid flags for %s - TDB_MUTEX_LOCKING and "
@@ -634,6 +593,30 @@ _PUBLIC_ struct tdb_context *tdb_open_ex(const char *name, int hash_size, int td
634 * mutex locking.593 * mutex locking.
635 */594 */
636 tdb->hdr_ofs = header.mutex_size;595 tdb->hdr_ofs = header.mutex_size;
596
597 if ((!(tdb_flags & TDB_CLEAR_IF_FIRST)) && (!tdb->read_only)) {
598 /*
599 * Open an existing mutexed tdb, but without
600 * CLEAR_IF_FIRST. We need to initialize the
601 * mutex array and keep the CLEAR_IF_FIRST
602 * lock locked.
603 */
604 ret = tdb_nest_lock(tdb, ACTIVE_LOCK, F_WRLCK,
605 TDB_LOCK_NOWAIT|TDB_LOCK_PROBE);
606 locked = (ret == 0);
607
608 if (locked) {
609 ret = tdb_mutex_init(tdb);
610 if (ret == -1) {
611 TDB_LOG((tdb,
612 TDB_DEBUG_FATAL,
613 "tdb_open_ex: tdb_mutex_init "
614 "failed for ""%s: %s\n",
615 name, strerror(errno)));
616 goto fail;
617 }
618 }
The diff has been truncated for viewing.

Subscribers

People subscribed via source and target branches