Merge ~ahasenack/ubuntu/+source/tdb:disco-tdb-1.3.18 into ubuntu/+source/tdb:ubuntu/devel
- Git
- lp:~ahasenack/ubuntu/+source/tdb
- disco-tdb-1.3.18
- Merge into ubuntu/devel
Status: | Merged | ||||
---|---|---|---|---|---|
Approved by: | Andreas Hasenack | ||||
Approved revision: | cbfc32075322ceab4847ec1d185429090232c13b | ||||
Merged at revision: | cbfc32075322ceab4847ec1d185429090232c13b | ||||
Proposed branch: | ~ahasenack/ubuntu/+source/tdb:disco-tdb-1.3.18 | ||||
Merge into: | ubuntu/+source/tdb:ubuntu/devel | ||||
Diff against target: |
67291 lines (+42355/-1419) 258 files modified
ABI/tdb-1.3.17.sigs (+73/-0) ABI/tdb-1.3.18.sigs (+73/-0) Makefile (+3/-1) _tdb_text.py (+0/-1) buildtools/bin/waf (+111/-21) buildtools/examples/run_on_target.py (+1/-1) buildtools/scripts/abi_gen.sh (+1/-1) buildtools/wafsamba/configure_file.py (+4/-2) buildtools/wafsamba/generic_cc.py (+51/-52) buildtools/wafsamba/pkgconfig.py (+2/-2) buildtools/wafsamba/samba3.py (+4/-5) buildtools/wafsamba/samba_abi.py (+22/-14) buildtools/wafsamba/samba_autoconf.py (+99/-73) buildtools/wafsamba/samba_autoproto.py (+3/-3) buildtools/wafsamba/samba_bundled.py (+11/-11) buildtools/wafsamba/samba_conftests.py (+34/-31) buildtools/wafsamba/samba_cross.py (+14/-13) buildtools/wafsamba/samba_deps.py (+23/-14) buildtools/wafsamba/samba_dist.py (+42/-9) buildtools/wafsamba/samba_git.py (+1/-1) buildtools/wafsamba/samba_headers.py (+7/-6) buildtools/wafsamba/samba_install.py (+6/-5) buildtools/wafsamba/samba_patterns.py (+10/-3) buildtools/wafsamba/samba_perl.py (+8/-5) buildtools/wafsamba/samba_pidl.py (+18/-12) buildtools/wafsamba/samba_python.py (+31/-21) buildtools/wafsamba/samba_third_party.py (+7/-7) buildtools/wafsamba/samba_utils.py (+178/-111) buildtools/wafsamba/samba_version.py (+8/-5) buildtools/wafsamba/samba_waf18.py (+429/-0) buildtools/wafsamba/samba_wildcard.py (+8/-9) buildtools/wafsamba/stale_files.py (+4/-2) buildtools/wafsamba/symbols.py (+19/-17) buildtools/wafsamba/test_duplicate_symbol.sh (+1/-1) buildtools/wafsamba/tests/test_abi.py (+15/-1) buildtools/wafsamba/wafsamba.py (+48/-118) buildtools/wafsamba/wscript (+47/-58) common/dump.c (+10/-1) common/freelist.c (+34/-59) common/io.c (+2/-2) common/lock.c (+2/-2) common/open.c (+48/-55) common/summary.c (+8/-0) common/tdb.c (+170/-105) common/tdb_private.h (+11/-2) common/traverse.c (+110/-6) configure (+1/-1) debian/changelog (+15/-0) debian/libtdb-dev.install (+1/-0) debian/libtdb1.symbols (+4/-0) debian/patches/40_test_transaction_expand_non_fatal.diff (+4/-4) debian/rules (+2/-7) dev/null (+0/-129) include/tdb.h (+68/-1) lib/replace/Makefile (+2/-1) lib/replace/README (+1/-0) lib/replace/configure (+1/-1) lib/replace/getifaddrs.c (+1/-1) lib/replace/replace.c (+109/-0) lib/replace/replace.h (+52/-27) lib/replace/snprintf.c (+72/-72) lib/replace/system/capability.h (+2/-0) lib/replace/system/dir.h (+2/-2) lib/replace/system/filesys.h (+4/-12) lib/replace/system/gssapi.h (+6/-6) lib/replace/system/kerberos.h (+2/-2) lib/replace/system/readline.h (+1/-1) lib/replace/system/threads.h (+27/-0) lib/replace/wscript (+71/-18) pytdb.c (+34/-18) python/tdbdump.py (+1/-1) python/tests/simple.py (+1/-1) test/run-circular-chain.c (+42/-0) test/run-circular-freelist.c (+50/-0) test/run-marklock-deadlock.c (+1/-1) test/run-mutex-openflags2.c (+0/-7) test/run-traverse-chain.c (+94/-0) test/test_tdbbackup.sh (+54/-0) third_party/waf/waflib/Build.py (+1474/-0) third_party/waf/waflib/ConfigSet.py (+361/-0) third_party/waf/waflib/Configure.py (+638/-0) third_party/waf/waflib/Context.py (+737/-0) third_party/waf/waflib/Errors.py (+68/-0) third_party/waf/waflib/Logs.py (+379/-0) third_party/waf/waflib/Node.py (+970/-0) third_party/waf/waflib/Options.py (+342/-0) third_party/waf/waflib/Runner.py (+586/-0) third_party/waf/waflib/Scripting.py (+613/-0) third_party/waf/waflib/Task.py (+1281/-0) third_party/waf/waflib/TaskGen.py (+917/-0) third_party/waf/waflib/Tools/__init__.py (+1/-1) third_party/waf/waflib/Tools/ar.py (+24/-0) third_party/waf/waflib/Tools/asm.py (+73/-0) third_party/waf/waflib/Tools/bison.py (+49/-0) third_party/waf/waflib/Tools/c.py (+39/-0) third_party/waf/waflib/Tools/c_aliases.py (+144/-0) third_party/waf/waflib/Tools/c_config.py (+1352/-0) third_party/waf/waflib/Tools/c_osx.py (+193/-0) third_party/waf/waflib/Tools/c_preproc.py (+1091/-0) third_party/waf/waflib/Tools/c_tests.py (+229/-0) third_party/waf/waflib/Tools/ccroot.py (+775/-0) third_party/waf/waflib/Tools/clang.py (+29/-0) third_party/waf/waflib/Tools/clangxx.py (+30/-0) third_party/waf/waflib/Tools/compiler_c.py (+110/-0) third_party/waf/waflib/Tools/compiler_cxx.py (+111/-0) third_party/waf/waflib/Tools/compiler_d.py (+85/-0) third_party/waf/waflib/Tools/compiler_fc.py (+73/-0) third_party/waf/waflib/Tools/cs.py (+211/-0) third_party/waf/waflib/Tools/cxx.py (+40/-0) third_party/waf/waflib/Tools/d.py (+97/-0) third_party/waf/waflib/Tools/d_config.py (+64/-0) third_party/waf/waflib/Tools/d_scan.py (+211/-0) third_party/waf/waflib/Tools/dbus.py (+70/-0) third_party/waf/waflib/Tools/dmd.py (+80/-0) third_party/waf/waflib/Tools/errcheck.py (+237/-0) third_party/waf/waflib/Tools/fc.py (+187/-0) third_party/waf/waflib/Tools/fc_config.py (+488/-0) third_party/waf/waflib/Tools/fc_scan.py (+114/-0) third_party/waf/waflib/Tools/flex.py (+62/-0) third_party/waf/waflib/Tools/g95.py (+66/-0) third_party/waf/waflib/Tools/gas.py (+18/-0) third_party/waf/waflib/Tools/gcc.py (+156/-0) third_party/waf/waflib/Tools/gdc.py (+55/-0) third_party/waf/waflib/Tools/gfortran.py (+93/-0) third_party/waf/waflib/Tools/glib2.py (+489/-0) third_party/waf/waflib/Tools/gnu_dirs.py (+131/-0) third_party/waf/waflib/Tools/gxx.py (+157/-0) third_party/waf/waflib/Tools/icc.py (+30/-0) third_party/waf/waflib/Tools/icpc.py (+30/-0) third_party/waf/waflib/Tools/ifort.py (+413/-0) third_party/waf/waflib/Tools/intltool.py (+231/-0) third_party/waf/waflib/Tools/irixcc.py (+66/-0) third_party/waf/waflib/Tools/javaw.py (+464/-0) third_party/waf/waflib/Tools/ldc2.py (+56/-0) third_party/waf/waflib/Tools/lua.py (+38/-0) third_party/waf/waflib/Tools/md5_tstamp.py (+39/-0) third_party/waf/waflib/Tools/msvc.py (+1020/-0) third_party/waf/waflib/Tools/nasm.py (+26/-0) third_party/waf/waflib/Tools/nobuild.py (+24/-0) third_party/waf/waflib/Tools/perl.py (+156/-0) third_party/waf/waflib/Tools/python.py (+627/-0) third_party/waf/waflib/Tools/qt5.py (+796/-0) third_party/waf/waflib/Tools/ruby.py (+186/-0) third_party/waf/waflib/Tools/suncc.py (+67/-0) third_party/waf/waflib/Tools/suncxx.py (+67/-0) third_party/waf/waflib/Tools/tex.py (+543/-0) third_party/waf/waflib/Tools/vala.py (+355/-0) third_party/waf/waflib/Tools/waf_unit_test.py (+296/-0) third_party/waf/waflib/Tools/winres.py (+78/-0) third_party/waf/waflib/Tools/xlc.py (+65/-0) third_party/waf/waflib/Tools/xlcxx.py (+65/-0) third_party/waf/waflib/Utils.py (+1021/-0) third_party/waf/waflib/__init__.py (+1/-1) third_party/waf/waflib/ansiterm.py (+342/-0) third_party/waf/waflib/extras/__init__.py (+3/-0) third_party/waf/waflib/extras/batched_cc.py (+173/-0) third_party/waf/waflib/extras/biber.py (+58/-0) third_party/waf/waflib/extras/bjam.py (+128/-0) third_party/waf/waflib/extras/blender.py (+108/-0) third_party/waf/waflib/extras/boo.py (+81/-0) third_party/waf/waflib/extras/boost.py (+525/-0) third_party/waf/waflib/extras/build_file_tracker.py (+28/-0) third_party/waf/waflib/extras/build_logs.py (+110/-0) third_party/waf/waflib/extras/buildcopy.py (+82/-0) third_party/waf/waflib/extras/c_bgxlc.py (+32/-0) third_party/waf/waflib/extras/c_dumbpreproc.py (+72/-0) third_party/waf/waflib/extras/c_emscripten.py (+87/-0) third_party/waf/waflib/extras/c_nec.py (+74/-0) third_party/waf/waflib/extras/cabal.py (+152/-0) third_party/waf/waflib/extras/cfg_altoptions.py (+110/-0) third_party/waf/waflib/extras/clang_compilation_database.py (+85/-0) third_party/waf/waflib/extras/codelite.py (+875/-0) third_party/waf/waflib/extras/color_gcc.py (+39/-0) third_party/waf/waflib/extras/color_rvct.py (+51/-0) third_party/waf/waflib/extras/compat15.py (+406/-0) third_party/waf/waflib/extras/cppcheck.py (+585/-0) third_party/waf/waflib/extras/cpplint.py (+222/-0) third_party/waf/waflib/extras/cross_gnu.py (+227/-0) third_party/waf/waflib/extras/cython.py (+146/-0) third_party/waf/waflib/extras/dcc.py (+72/-0) third_party/waf/waflib/extras/distnet.py (+430/-0) third_party/waf/waflib/extras/doxygen.py (+227/-0) third_party/waf/waflib/extras/dpapi.py (+87/-0) third_party/waf/waflib/extras/eclipse.py (+431/-0) third_party/waf/waflib/extras/erlang.py (+110/-0) third_party/waf/waflib/extras/fast_partial.py (+518/-0) third_party/waf/waflib/extras/fc_bgxlf.py (+32/-0) third_party/waf/waflib/extras/fc_cray.py (+51/-0) third_party/waf/waflib/extras/fc_nag.py (+61/-0) third_party/waf/waflib/extras/fc_nec.py (+60/-0) third_party/waf/waflib/extras/fc_open64.py (+58/-0) third_party/waf/waflib/extras/fc_pgfortran.py (+68/-0) third_party/waf/waflib/extras/fc_solstudio.py (+62/-0) third_party/waf/waflib/extras/fc_xlf.py (+63/-0) third_party/waf/waflib/extras/file_to_object.py (+137/-0) third_party/waf/waflib/extras/fluid.py (+13/-9) third_party/waf/waflib/extras/freeimage.py (+74/-0) third_party/waf/waflib/extras/fsb.py (+31/-0) third_party/waf/waflib/extras/fsc.py (+64/-0) third_party/waf/waflib/extras/gccdeps.py (+214/-0) third_party/waf/waflib/extras/gdbus.py (+87/-0) third_party/waf/waflib/extras/gob2.py (+5/-5) third_party/waf/waflib/extras/halide.py (+151/-0) third_party/waf/waflib/extras/javatest.py (+118/-0) third_party/waf/waflib/extras/kde4.py (+93/-0) third_party/waf/waflib/extras/local_rpath.py (+19/-0) third_party/waf/waflib/extras/make.py (+142/-0) third_party/waf/waflib/extras/midl.py (+69/-0) third_party/waf/waflib/extras/msvcdeps.py (+256/-0) third_party/waf/waflib/extras/msvs.py (+1048/-0) third_party/waf/waflib/extras/netcache_client.py (+390/-0) third_party/waf/waflib/extras/objcopy.py (+50/-0) third_party/waf/waflib/extras/ocaml.py (+142/-91) third_party/waf/waflib/extras/package.py (+76/-0) third_party/waf/waflib/extras/parallel_debug.py (+459/-0) third_party/waf/waflib/extras/pch.py (+148/-0) third_party/waf/waflib/extras/pep8.py (+106/-0) third_party/waf/waflib/extras/pgicc.py (+75/-0) third_party/waf/waflib/extras/pgicxx.py (+20/-0) third_party/waf/waflib/extras/proc.py (+54/-0) third_party/waf/waflib/extras/protoc.py (+243/-0) third_party/waf/waflib/extras/pyqt5.py (+241/-0) third_party/waf/waflib/extras/pytest.py (+225/-0) third_party/waf/waflib/extras/qnxnto.py (+72/-0) third_party/waf/waflib/extras/qt4.py (+695/-0) third_party/waf/waflib/extras/relocation.py (+85/-0) third_party/waf/waflib/extras/remote.py (+327/-0) third_party/waf/waflib/extras/resx.py (+35/-0) third_party/waf/waflib/extras/review.py (+325/-0) third_party/waf/waflib/extras/rst.py (+260/-0) third_party/waf/waflib/extras/run_do_script.py (+139/-0) third_party/waf/waflib/extras/run_m_script.py (+88/-0) third_party/waf/waflib/extras/run_py_script.py (+104/-0) third_party/waf/waflib/extras/run_r_script.py (+86/-0) third_party/waf/waflib/extras/sas.py (+71/-0) third_party/waf/waflib/extras/satellite_assembly.py (+57/-0) third_party/waf/waflib/extras/scala.py (+128/-0) third_party/waf/waflib/extras/slow_qt4.py (+96/-0) third_party/waf/waflib/extras/softlink_libs.py (+76/-0) third_party/waf/waflib/extras/stale.py (+98/-0) third_party/waf/waflib/extras/stracedeps.py (+174/-0) third_party/waf/waflib/extras/swig.py (+237/-0) third_party/waf/waflib/extras/syms.py (+84/-0) third_party/waf/waflib/extras/ticgt.py (+300/-0) third_party/waf/waflib/extras/unity.py (+108/-0) third_party/waf/waflib/extras/use_config.py (+185/-0) third_party/waf/waflib/extras/valadoc.py (+127/-99) third_party/waf/waflib/extras/waf_xattr.py (+150/-0) third_party/waf/waflib/extras/why.py (+78/-0) third_party/waf/waflib/extras/win32_opts.py (+170/-0) third_party/waf/waflib/extras/wix.py (+87/-0) third_party/waf/waflib/extras/xcode6.py (+727/-0) third_party/waf/waflib/fixpy2.py (+64/-0) third_party/waf/waflib/processor.py (+64/-0) tools/tdbbackup.c (+28/-7) tools/tdbdump.c (+3/-1) tools/tdbtorture.c (+17/-6) wscript (+33/-18) |
||||
Related bugs: |
|
Reviewer | Review Type | Date Requested | Status |
---|---|---|---|
Christian Ehrhardt (community) | Approve | ||
Canonical Server | Pending | ||
Review via email: mp+364086@code.launchpad.net |
Commit message
Description of the change
PPA with builds: ppa:ahasenack/
https:/
Bileto ticket:
https:/
It was green, but recent uploads removed the dep8 test history. I have to copy the packages over again.
This is part of the effort to remove python2 from the desktop iso.
That requires samba to be updated to 4.10 (bug #1818518), and that update requires a new tdb, which is that this branch is about.
We are going ahead of Debian.
Upstream switched from python 2 to using python 3 by default, so that required a few changes to the ./configure call in d/rules, and more cleanups in dh_clean.
I don't know why debian is skipping generating a symbols file for the python{,3}-tdb packages (the -N option):
override_
I also don't know why debian is manually building a static library in override_
I didn't clean up the commit tree, because there has been some back and forth here about dropping or not the py2 packages entirely. Foundations wanted the python-* (py2) packages entirely dropped, but at least python-tdb is still needed by bzr-git (Recommends). I'll keep building it, but we might be asked to drop it and somehow fix bzr-git. Anyway, I think that can be done later if needed.
Andreas Hasenack (ahasenack) wrote : | # |
Christian Ehrhardt (paelzer) wrote : | # |
probably bikeshedding, but all the changes in the changelog are "due to" the New upstream version.
I'd therefore have expected to find them indented as
* New upstream version: 1.3.18 (LP: #1818520)
- d/p/40_
fuzz
- ...
up to you
Andreas Hasenack (ahasenack) wrote : | # |
I can improve that
Christian Ehrhardt (paelzer) wrote : | # |
waf cleanup is fine, but given how much you list don't you want to convert it to just
$ find ... -name .pyc -delete
or something like that?
Considering that you also clear directories maybe:
$ find -name ".pyc" -o -name "__pycache__" -exec rm {} \;
Christian Ehrhardt (paelzer) wrote : | # |
Other than the nit picks mentioned the commits LGTM, I'll go check the build log now ...
Christian Ehrhardt (paelzer) wrote : | # |
I wondered if [1] would cause trouble for the py2 packages, but after reviewing what ends up in those packages I think you are good.
[1]: https:/
Christian Ehrhardt (paelzer) wrote : | # |
I'm done parsing the upstream changes towards 1.3.18 and the build log not much showing up there that needs work.
The one thing I wondered is (probably unimportant):
dh_gencontrol
dpkg-gencontrol: warning: package python3-tdb: substitution variable ${python3:Versions} unused, but is defined
dpkg-gencontrol: warning: package python3-tdb: substitution variable ${python3:Versions} unused, but is defined
dpkg-gencontrol: warning: package python-tdb: substitution variable ${python:Versions} unused, but is defined
dpkg-gencontrol: warning: package python-tdb: substitution variable ${python:Versions} unused, but is defined
The field [1] is optional after all.
An entry would look loke that on the py3 packages (py2 isn't needed since py2.7 is the only one)
XS-Python-Version: ${python3:Versions}
OTOH it might future transitions easier (or harder??)
See also man dh_python3 for that.
Since I have not seen you iterating over python3 versions in d/rules it might be safer to explicitly define this to be for the current py3.7.
I have seen builds crash in -devel in times there were two py3 (e.g. 3.6&3.7) in -release, and as far as i understand the flag here it might help with that.
And adding those lines should be easier than modifying more of d/rules to build potentially multiply pytohn modules iterating over `py3versions -vr`
[1]: https:/
Christian Ehrhardt (paelzer) wrote : | # |
All that said, nothing I found is critical - the changes LGTM.
Do you want to change the MP state to ack now or only after a re-review once you are finished?
(I must admit I squashed the commits for reviewability)
Andreas Hasenack (ahasenack) wrote : | # |
I'll look at the python issues you pointed at, and maybe use that for the other packages too. Let's leave the MP state as is for now.
Andreas Hasenack (ahasenack) wrote : | # |
Even when using X-Python3-Version: ${python3:Versions} in d/control, I still get the warning, specifically in the binary packages. I'll remove it for now, pending a better understanding of how this works.
Andreas Hasenack (ahasenack) wrote : | # |
- changelog updated
- I also squashed the revert commit I had
tdb will build py2 and py3 packages. py2 because of bzr-git, even though the FFe bug said it would have been ok to drop python-tdb (py2). We can still do that later.
I'll move on to the other MPs now, get all consistent.
Andreas Hasenack (ahasenack) wrote : | # |
Tagged and uploaded:
$ git push pkg upload/
Enumerating objects: 395, done.
Counting objects: 100% (395/395), done.
Delta compression using up to 4 threads
Compressing objects: 100% (208/208), done.
Writing objects: 100% (301/301), 438.24 KiB | 245.00 KiB/s, done.
Total 301 (delta 101), reused 277 (delta 86)
To ssh://git.
* [new tag] upload/
$ dput ubuntu ../tdb_
Checking signature on .changes
gpg: ../tdb_
Checking signature on .dsc
gpg: ../tdb_
Uploading to ubuntu (via ftp to upload.ubuntu.com):
Uploading tdb_1.3.
Uploading tdb_1.3.
Uploading tdb_1.3.
Uploading tdb_1.3.
Successfully uploaded packages.
Andreas Hasenack (ahasenack) wrote : | # |
Now uploading with the orig tarball included:
$ dput -f ubuntu ../tdb_
Checking signature on .changes
gpg: ../tdb_
Checking signature on .dsc
gpg: ../tdb_
Uploading to ubuntu (via ftp to upload.ubuntu.com):
Uploading tdb_1.3.
Uploading tdb_1.3.
Uploading tdb_1.3.
Uploading tdb_1.3.
Uploading tdb_1.3.
Successfully uploaded packages.
Preview Diff
1 | diff --git a/ABI/tdb-1.3.17.sigs b/ABI/tdb-1.3.17.sigs |
2 | new file mode 100644 |
3 | index 0000000..e2b0427 |
4 | --- /dev/null |
5 | +++ b/ABI/tdb-1.3.17.sigs |
6 | @@ -0,0 +1,73 @@ |
7 | +tdb_add_flags: void (struct tdb_context *, unsigned int) |
8 | +tdb_append: int (struct tdb_context *, TDB_DATA, TDB_DATA) |
9 | +tdb_chainlock: int (struct tdb_context *, TDB_DATA) |
10 | +tdb_chainlock_mark: int (struct tdb_context *, TDB_DATA) |
11 | +tdb_chainlock_nonblock: int (struct tdb_context *, TDB_DATA) |
12 | +tdb_chainlock_read: int (struct tdb_context *, TDB_DATA) |
13 | +tdb_chainlock_read_nonblock: int (struct tdb_context *, TDB_DATA) |
14 | +tdb_chainlock_unmark: int (struct tdb_context *, TDB_DATA) |
15 | +tdb_chainunlock: int (struct tdb_context *, TDB_DATA) |
16 | +tdb_chainunlock_read: int (struct tdb_context *, TDB_DATA) |
17 | +tdb_check: int (struct tdb_context *, int (*)(TDB_DATA, TDB_DATA, void *), void *) |
18 | +tdb_close: int (struct tdb_context *) |
19 | +tdb_delete: int (struct tdb_context *, TDB_DATA) |
20 | +tdb_dump_all: void (struct tdb_context *) |
21 | +tdb_enable_seqnum: void (struct tdb_context *) |
22 | +tdb_error: enum TDB_ERROR (struct tdb_context *) |
23 | +tdb_errorstr: const char *(struct tdb_context *) |
24 | +tdb_exists: int (struct tdb_context *, TDB_DATA) |
25 | +tdb_fd: int (struct tdb_context *) |
26 | +tdb_fetch: TDB_DATA (struct tdb_context *, TDB_DATA) |
27 | +tdb_firstkey: TDB_DATA (struct tdb_context *) |
28 | +tdb_freelist_size: int (struct tdb_context *) |
29 | +tdb_get_flags: int (struct tdb_context *) |
30 | +tdb_get_logging_private: void *(struct tdb_context *) |
31 | +tdb_get_seqnum: int (struct tdb_context *) |
32 | +tdb_hash_size: int (struct tdb_context *) |
33 | +tdb_increment_seqnum_nonblock: void (struct tdb_context *) |
34 | +tdb_jenkins_hash: unsigned int (TDB_DATA *) |
35 | +tdb_lock_nonblock: int (struct tdb_context *, int, int) |
36 | +tdb_lockall: int (struct tdb_context *) |
37 | +tdb_lockall_mark: int (struct tdb_context *) |
38 | +tdb_lockall_nonblock: int (struct tdb_context *) |
39 | +tdb_lockall_read: int (struct tdb_context *) |
40 | +tdb_lockall_read_nonblock: int (struct tdb_context *) |
41 | +tdb_lockall_unmark: int (struct tdb_context *) |
42 | +tdb_log_fn: tdb_log_func (struct tdb_context *) |
43 | +tdb_map_size: size_t (struct tdb_context *) |
44 | +tdb_name: const char *(struct tdb_context *) |
45 | +tdb_nextkey: TDB_DATA (struct tdb_context *, TDB_DATA) |
46 | +tdb_null: dptr = 0xXXXX, dsize = 0 |
47 | +tdb_open: struct tdb_context *(const char *, int, int, int, mode_t) |
48 | +tdb_open_ex: struct tdb_context *(const char *, int, int, int, mode_t, const struct tdb_logging_context *, tdb_hash_func) |
49 | +tdb_parse_record: int (struct tdb_context *, TDB_DATA, int (*)(TDB_DATA, TDB_DATA, void *), void *) |
50 | +tdb_printfreelist: int (struct tdb_context *) |
51 | +tdb_remove_flags: void (struct tdb_context *, unsigned int) |
52 | +tdb_reopen: int (struct tdb_context *) |
53 | +tdb_reopen_all: int (int) |
54 | +tdb_repack: int (struct tdb_context *) |
55 | +tdb_rescue: int (struct tdb_context *, void (*)(TDB_DATA, TDB_DATA, void *), void *) |
56 | +tdb_runtime_check_for_robust_mutexes: bool (void) |
57 | +tdb_set_logging_function: void (struct tdb_context *, const struct tdb_logging_context *) |
58 | +tdb_set_max_dead: void (struct tdb_context *, int) |
59 | +tdb_setalarm_sigptr: void (struct tdb_context *, volatile sig_atomic_t *) |
60 | +tdb_store: int (struct tdb_context *, TDB_DATA, TDB_DATA, int) |
61 | +tdb_storev: int (struct tdb_context *, TDB_DATA, const TDB_DATA *, int, int) |
62 | +tdb_summary: char *(struct tdb_context *) |
63 | +tdb_transaction_active: bool (struct tdb_context *) |
64 | +tdb_transaction_cancel: int (struct tdb_context *) |
65 | +tdb_transaction_commit: int (struct tdb_context *) |
66 | +tdb_transaction_prepare_commit: int (struct tdb_context *) |
67 | +tdb_transaction_start: int (struct tdb_context *) |
68 | +tdb_transaction_start_nonblock: int (struct tdb_context *) |
69 | +tdb_transaction_write_lock_mark: int (struct tdb_context *) |
70 | +tdb_transaction_write_lock_unmark: int (struct tdb_context *) |
71 | +tdb_traverse: int (struct tdb_context *, tdb_traverse_func, void *) |
72 | +tdb_traverse_chain: int (struct tdb_context *, unsigned int, tdb_traverse_func, void *) |
73 | +tdb_traverse_key_chain: int (struct tdb_context *, TDB_DATA, tdb_traverse_func, void *) |
74 | +tdb_traverse_read: int (struct tdb_context *, tdb_traverse_func, void *) |
75 | +tdb_unlock: int (struct tdb_context *, int, int) |
76 | +tdb_unlockall: int (struct tdb_context *) |
77 | +tdb_unlockall_read: int (struct tdb_context *) |
78 | +tdb_validate_freelist: int (struct tdb_context *, int *) |
79 | +tdb_wipe_all: int (struct tdb_context *) |
80 | diff --git a/ABI/tdb-1.3.18.sigs b/ABI/tdb-1.3.18.sigs |
81 | new file mode 100644 |
82 | index 0000000..e2b0427 |
83 | --- /dev/null |
84 | +++ b/ABI/tdb-1.3.18.sigs |
85 | @@ -0,0 +1,73 @@ |
86 | +tdb_add_flags: void (struct tdb_context *, unsigned int) |
87 | +tdb_append: int (struct tdb_context *, TDB_DATA, TDB_DATA) |
88 | +tdb_chainlock: int (struct tdb_context *, TDB_DATA) |
89 | +tdb_chainlock_mark: int (struct tdb_context *, TDB_DATA) |
90 | +tdb_chainlock_nonblock: int (struct tdb_context *, TDB_DATA) |
91 | +tdb_chainlock_read: int (struct tdb_context *, TDB_DATA) |
92 | +tdb_chainlock_read_nonblock: int (struct tdb_context *, TDB_DATA) |
93 | +tdb_chainlock_unmark: int (struct tdb_context *, TDB_DATA) |
94 | +tdb_chainunlock: int (struct tdb_context *, TDB_DATA) |
95 | +tdb_chainunlock_read: int (struct tdb_context *, TDB_DATA) |
96 | +tdb_check: int (struct tdb_context *, int (*)(TDB_DATA, TDB_DATA, void *), void *) |
97 | +tdb_close: int (struct tdb_context *) |
98 | +tdb_delete: int (struct tdb_context *, TDB_DATA) |
99 | +tdb_dump_all: void (struct tdb_context *) |
100 | +tdb_enable_seqnum: void (struct tdb_context *) |
101 | +tdb_error: enum TDB_ERROR (struct tdb_context *) |
102 | +tdb_errorstr: const char *(struct tdb_context *) |
103 | +tdb_exists: int (struct tdb_context *, TDB_DATA) |
104 | +tdb_fd: int (struct tdb_context *) |
105 | +tdb_fetch: TDB_DATA (struct tdb_context *, TDB_DATA) |
106 | +tdb_firstkey: TDB_DATA (struct tdb_context *) |
107 | +tdb_freelist_size: int (struct tdb_context *) |
108 | +tdb_get_flags: int (struct tdb_context *) |
109 | +tdb_get_logging_private: void *(struct tdb_context *) |
110 | +tdb_get_seqnum: int (struct tdb_context *) |
111 | +tdb_hash_size: int (struct tdb_context *) |
112 | +tdb_increment_seqnum_nonblock: void (struct tdb_context *) |
113 | +tdb_jenkins_hash: unsigned int (TDB_DATA *) |
114 | +tdb_lock_nonblock: int (struct tdb_context *, int, int) |
115 | +tdb_lockall: int (struct tdb_context *) |
116 | +tdb_lockall_mark: int (struct tdb_context *) |
117 | +tdb_lockall_nonblock: int (struct tdb_context *) |
118 | +tdb_lockall_read: int (struct tdb_context *) |
119 | +tdb_lockall_read_nonblock: int (struct tdb_context *) |
120 | +tdb_lockall_unmark: int (struct tdb_context *) |
121 | +tdb_log_fn: tdb_log_func (struct tdb_context *) |
122 | +tdb_map_size: size_t (struct tdb_context *) |
123 | +tdb_name: const char *(struct tdb_context *) |
124 | +tdb_nextkey: TDB_DATA (struct tdb_context *, TDB_DATA) |
125 | +tdb_null: dptr = 0xXXXX, dsize = 0 |
126 | +tdb_open: struct tdb_context *(const char *, int, int, int, mode_t) |
127 | +tdb_open_ex: struct tdb_context *(const char *, int, int, int, mode_t, const struct tdb_logging_context *, tdb_hash_func) |
128 | +tdb_parse_record: int (struct tdb_context *, TDB_DATA, int (*)(TDB_DATA, TDB_DATA, void *), void *) |
129 | +tdb_printfreelist: int (struct tdb_context *) |
130 | +tdb_remove_flags: void (struct tdb_context *, unsigned int) |
131 | +tdb_reopen: int (struct tdb_context *) |
132 | +tdb_reopen_all: int (int) |
133 | +tdb_repack: int (struct tdb_context *) |
134 | +tdb_rescue: int (struct tdb_context *, void (*)(TDB_DATA, TDB_DATA, void *), void *) |
135 | +tdb_runtime_check_for_robust_mutexes: bool (void) |
136 | +tdb_set_logging_function: void (struct tdb_context *, const struct tdb_logging_context *) |
137 | +tdb_set_max_dead: void (struct tdb_context *, int) |
138 | +tdb_setalarm_sigptr: void (struct tdb_context *, volatile sig_atomic_t *) |
139 | +tdb_store: int (struct tdb_context *, TDB_DATA, TDB_DATA, int) |
140 | +tdb_storev: int (struct tdb_context *, TDB_DATA, const TDB_DATA *, int, int) |
141 | +tdb_summary: char *(struct tdb_context *) |
142 | +tdb_transaction_active: bool (struct tdb_context *) |
143 | +tdb_transaction_cancel: int (struct tdb_context *) |
144 | +tdb_transaction_commit: int (struct tdb_context *) |
145 | +tdb_transaction_prepare_commit: int (struct tdb_context *) |
146 | +tdb_transaction_start: int (struct tdb_context *) |
147 | +tdb_transaction_start_nonblock: int (struct tdb_context *) |
148 | +tdb_transaction_write_lock_mark: int (struct tdb_context *) |
149 | +tdb_transaction_write_lock_unmark: int (struct tdb_context *) |
150 | +tdb_traverse: int (struct tdb_context *, tdb_traverse_func, void *) |
151 | +tdb_traverse_chain: int (struct tdb_context *, unsigned int, tdb_traverse_func, void *) |
152 | +tdb_traverse_key_chain: int (struct tdb_context *, TDB_DATA, tdb_traverse_func, void *) |
153 | +tdb_traverse_read: int (struct tdb_context *, tdb_traverse_func, void *) |
154 | +tdb_unlock: int (struct tdb_context *, int, int) |
155 | +tdb_unlockall: int (struct tdb_context *) |
156 | +tdb_unlockall_read: int (struct tdb_context *) |
157 | +tdb_validate_freelist: int (struct tdb_context *, int *) |
158 | +tdb_wipe_all: int (struct tdb_context *) |
159 | diff --git a/Makefile b/Makefile |
160 | index fe44ff6..8fd56c8 100644 |
161 | --- a/Makefile |
162 | +++ b/Makefile |
163 | @@ -1,6 +1,8 @@ |
164 | # simple makefile wrapper to run waf |
165 | |
166 | -WAF=WAF_MAKE=1 PATH=buildtools/bin:../../buildtools/bin:$$PATH waf |
167 | +WAF_BIN=`PATH=buildtools/bin:../../buildtools/bin:$$PATH which waf` |
168 | +WAF_BINARY=$(PYTHON) $(WAF_BIN) |
169 | +WAF=PYTHONHASHSEED=1 WAF_MAKE=1 $(WAF_BINARY) |
170 | |
171 | all: |
172 | $(WAF) build |
173 | diff --git a/_tdb_text.py b/_tdb_text.py |
174 | index c823bf8..f3caa53 100644 |
175 | --- a/_tdb_text.py |
176 | +++ b/_tdb_text.py |
177 | @@ -4,7 +4,6 @@ |
178 | # Published under the GNU LGPLv3 or later |
179 | |
180 | import sys |
181 | -import functools |
182 | |
183 | import tdb |
184 | |
185 | diff --git a/buildtools/bin/waf b/buildtools/bin/waf |
186 | index 1b0f466..3ee4d5b 100755 |
187 | --- a/buildtools/bin/waf |
188 | +++ b/buildtools/bin/waf |
189 | @@ -1,7 +1,7 @@ |
190 | -#!/usr/bin/env python |
191 | -# encoding: ISO-8859-1 |
192 | -# Thomas Nagy, 2005-2010 |
193 | - |
194 | +#!/usr/bin/env python3 |
195 | +# encoding: latin-1 |
196 | +# Thomas Nagy, 2005-2018 |
197 | +# |
198 | """ |
199 | Redistribution and use in source and binary forms, with or without |
200 | modification, are permitted provided that the following conditions |
201 | @@ -30,25 +30,24 @@ IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE |
202 | POSSIBILITY OF SUCH DAMAGE. |
203 | """ |
204 | |
205 | -import os, sys |
206 | -if sys.hexversion<0x203000f: raise ImportError("Waf requires Python >= 2.3") |
207 | - |
208 | -if 'PSYCOWAF' in os.environ: |
209 | - try:import psyco;psyco.full() |
210 | - except:pass |
211 | +import os, sys, inspect |
212 | |
213 | -VERSION="1.5.19" |
214 | +VERSION="2.0.8" |
215 | REVISION="x" |
216 | +GIT="x" |
217 | INSTALL="x" |
218 | C1='x' |
219 | C2='x' |
220 | +C3='x' |
221 | cwd = os.getcwd() |
222 | join = os.path.join |
223 | |
224 | +if sys.hexversion<0x206000f: |
225 | + raise ImportError('Python >= 2.6 is required to create the waf file') |
226 | + |
227 | WAF='waf' |
228 | def b(x): |
229 | return x |
230 | - |
231 | if sys.hexversion>0x300000f: |
232 | WAF='waf3' |
233 | def b(x): |
234 | @@ -58,20 +57,111 @@ def err(m): |
235 | print(('\033[91mError: %s\033[0m' % m)) |
236 | sys.exit(1) |
237 | |
238 | -def test(dir): |
239 | - try: os.stat(join(dir, 'wafadmin')); return os.path.abspath(dir) |
240 | +def unpack_wafdir(dir, src): |
241 | + f = open(src,'rb') |
242 | + c = 'corrupt archive (%d)' |
243 | + while 1: |
244 | + line = f.readline() |
245 | + if not line: err('run waf-light from a folder containing waflib') |
246 | + if line == b('#==>\n'): |
247 | + txt = f.readline() |
248 | + if not txt: err(c % 1) |
249 | + if f.readline() != b('#<==\n'): err(c % 2) |
250 | + break |
251 | + if not txt: err(c % 3) |
252 | + txt = txt[1:-1].replace(b(C1), b('\n')).replace(b(C2), b('\r')).replace(b(C3), b('\x00')) |
253 | + |
254 | + import shutil, tarfile |
255 | + try: shutil.rmtree(dir) |
256 | except OSError: pass |
257 | + try: |
258 | + for x in ('Tools', 'extras'): |
259 | + os.makedirs(join(dir, 'waflib', x)) |
260 | + except OSError: |
261 | + err("Cannot unpack waf lib into %s\nMove waf in a writable directory" % dir) |
262 | + |
263 | + os.chdir(dir) |
264 | + tmp = 't.bz2' |
265 | + t = open(tmp,'wb') |
266 | + try: t.write(txt) |
267 | + finally: t.close() |
268 | + |
269 | + try: |
270 | + t = tarfile.open(tmp) |
271 | + except: |
272 | + try: |
273 | + os.system('bunzip2 t.bz2') |
274 | + t = tarfile.open('t') |
275 | + tmp = 't' |
276 | + except: |
277 | + os.chdir(cwd) |
278 | + try: shutil.rmtree(dir) |
279 | + except OSError: pass |
280 | + err("Waf cannot be unpacked, check that bzip2 support is present") |
281 | + |
282 | + try: |
283 | + for x in t: t.extract(x) |
284 | + finally: |
285 | + t.close() |
286 | + |
287 | + for x in ('Tools', 'extras'): |
288 | + os.chmod(join('waflib',x), 493) |
289 | + |
290 | + if sys.hexversion<0x300000f: |
291 | + sys.path = [join(dir, 'waflib')] + sys.path |
292 | + import fixpy2 |
293 | + fixpy2.fixdir(dir) |
294 | + |
295 | + os.remove(tmp) |
296 | + os.chdir(cwd) |
297 | + |
298 | + try: dir = unicode(dir, 'mbcs') |
299 | + except: pass |
300 | + try: |
301 | + from ctypes import windll |
302 | + windll.kernel32.SetFileAttributesW(dir, 2) |
303 | + except: |
304 | + pass |
305 | + |
306 | +def test(dir): |
307 | + try: |
308 | + os.stat(join(dir, 'waflib')) |
309 | + return os.path.abspath(dir) |
310 | + except OSError: |
311 | + pass |
312 | |
313 | def find_lib(): |
314 | - return os.path.abspath(os.path.join(os.path.dirname(__file__), '../../third_party/waf')) |
315 | + path = '../../third_party/waf' |
316 | + paths = [path, path+'/waflib'] |
317 | + return [os.path.abspath(os.path.join(os.path.dirname(__file__), x)) for x in paths] |
318 | |
319 | wafdir = find_lib() |
320 | -w = join(wafdir, 'wafadmin') |
321 | -t = join(w, 'Tools') |
322 | -f = join(w, '3rdparty') |
323 | -sys.path = [w, t, f] + sys.path |
324 | +for p in wafdir: |
325 | + sys.path.insert(0, p) |
326 | |
327 | if __name__ == '__main__': |
328 | - import Scripting |
329 | - Scripting.prepare(t, cwd, VERSION, wafdir) |
330 | + #import extras.compat15#PRELUDE |
331 | + import sys |
332 | + |
333 | + from waflib.Tools import ccroot, c, ar, compiler_c, gcc |
334 | + sys.modules['cc'] = c |
335 | + sys.modules['ccroot'] = ccroot |
336 | + sys.modules['ar'] = ar |
337 | + sys.modules['compiler_cc'] = compiler_c |
338 | + sys.modules['gcc'] = gcc |
339 | + |
340 | + from waflib import Options |
341 | + Options.lockfile = os.environ.get('WAFLOCK', '.lock-wscript') |
342 | + if os.path.isfile(Options.lockfile) and os.stat(Options.lockfile).st_size == 0: |
343 | + os.environ['NOCLIMB'] = "1" |
344 | + # there is a single top-level, but libraries must build independently |
345 | + os.environ['NO_LOCK_IN_TOP'] = "1" |
346 | + |
347 | + from waflib import Task |
348 | + class o(object): |
349 | + display = None |
350 | + Task.classes['cc_link'] = o |
351 | + |
352 | + from waflib import Scripting |
353 | + Scripting.waf_entry_point(cwd, VERSION, wafdir[0]) |
354 | |
355 | diff --git a/buildtools/examples/run_on_target.py b/buildtools/examples/run_on_target.py |
356 | index 8322759..79c5730 100755 |
357 | --- a/buildtools/examples/run_on_target.py |
358 | +++ b/buildtools/examples/run_on_target.py |
359 | @@ -1,4 +1,4 @@ |
360 | -#!/usr/bin/env python |
361 | +#!/usr/bin/env python3 |
362 | |
363 | # |
364 | # Sample run-on-target script |
365 | diff --git a/buildtools/scripts/abi_gen.sh b/buildtools/scripts/abi_gen.sh |
366 | index 787718c..6dd6d32 100755 |
367 | --- a/buildtools/scripts/abi_gen.sh |
368 | +++ b/buildtools/scripts/abi_gen.sh |
369 | @@ -17,5 +17,5 @@ done |
370 | ) > $GDBSCRIPT |
371 | |
372 | # forcing the terminal avoids a problem on Fedora12 |
373 | -TERM=none gdb -batch -x $GDBSCRIPT "$SHAREDLIB" < /dev/null |
374 | +TERM=none gdb -n -batch -x $GDBSCRIPT "$SHAREDLIB" < /dev/null |
375 | rm -f $GDBSCRIPT |
376 | diff --git a/buildtools/wafsamba/configure_file.py b/buildtools/wafsamba/configure_file.py |
377 | index e28282b..6ad4354 100644 |
378 | --- a/buildtools/wafsamba/configure_file.py |
379 | +++ b/buildtools/wafsamba/configure_file.py |
380 | @@ -1,7 +1,9 @@ |
381 | # handle substitution of variables in .in files |
382 | |
383 | -import re, os |
384 | -import Build, sys, Logs |
385 | +import sys |
386 | +import re |
387 | +import os |
388 | +from waflib import Build, Logs |
389 | from samba_utils import SUBST_VARS_RECURSIVE |
390 | |
391 | def subst_at_vars(task): |
392 | diff --git a/buildtools/wafsamba/generic_cc.py b/buildtools/wafsamba/generic_cc.py |
393 | index 504e902..1352c54 100644 |
394 | --- a/buildtools/wafsamba/generic_cc.py |
395 | +++ b/buildtools/wafsamba/generic_cc.py |
396 | @@ -3,69 +3,68 @@ |
397 | # based on suncc.py from waf |
398 | |
399 | import os, optparse |
400 | -import Utils, Options, Configure |
401 | -import ccroot, ar |
402 | -from Configure import conftest |
403 | +from waflib import Errors |
404 | +from waflib.Tools import ccroot, ar |
405 | +from waflib.Configure import conf |
406 | |
407 | -from compiler_cc import c_compiler |
408 | +# |
409 | +# Let waflib provide useful defaults, but |
410 | +# provide generic_cc as last resort fallback on |
411 | +# all platforms |
412 | +# |
413 | +from waflib.Tools.compiler_c import c_compiler |
414 | +for key in c_compiler.keys(): |
415 | + c_compiler[key].append('generic_cc') |
416 | |
417 | -c_compiler['default'] = ['gcc', 'generic_cc'] |
418 | -c_compiler['hpux'] = ['gcc', 'generic_cc'] |
419 | - |
420 | -@conftest |
421 | +@conf |
422 | def find_generic_cc(conf): |
423 | v = conf.env |
424 | cc = None |
425 | - if v['CC']: cc = v['CC'] |
426 | - elif 'CC' in conf.environ: cc = conf.environ['CC'] |
427 | - if not cc: cc = conf.find_program('cc', var='CC') |
428 | - if not cc: conf.fatal('generic_cc was not found') |
429 | - cc = conf.cmd_to_list(cc) |
430 | - v['CC'] = cc |
431 | - v['CC_NAME'] = 'generic' |
432 | - |
433 | -@conftest |
434 | -def generic_cc_common_flags(conf): |
435 | - v = conf.env |
436 | + if v.CC: |
437 | + cc = v.CC |
438 | + elif 'CC' in conf.environ: |
439 | + cc = conf.environ['CC'] |
440 | + if not cc: |
441 | + cc = conf.find_program('cc', var='CC') |
442 | + if not cc: |
443 | + conf.fatal('generic_cc was not found') |
444 | |
445 | - v['CC_SRC_F'] = '' |
446 | - v['CC_TGT_F'] = ['-c', '-o', ''] |
447 | - v['CPPPATH_ST'] = '-I%s' # template for adding include paths |
448 | + try: |
449 | + conf.cmd_and_log(cc + ['--version']) |
450 | + except Errors.WafError: |
451 | + conf.fatal('%r --version could not be executed' % cc) |
452 | |
453 | - # linker |
454 | - if not v['LINK_CC']: v['LINK_CC'] = v['CC'] |
455 | - v['CCLNK_SRC_F'] = '' |
456 | - v['CCLNK_TGT_F'] = ['-o', ''] |
457 | + v.CC = cc |
458 | + v.CC_NAME = 'generic_cc' |
459 | |
460 | - v['LIB_ST'] = '-l%s' # template for adding libs |
461 | - v['LIBPATH_ST'] = '-L%s' # template for adding libpaths |
462 | - v['STATICLIB_ST'] = '-l%s' |
463 | - v['STATICLIBPATH_ST'] = '-L%s' |
464 | - v['CCDEFINES_ST'] = '-D%s' |
465 | +@conf |
466 | +def generic_cc_common_flags(conf): |
467 | + v = conf.env |
468 | |
469 | -# v['SONAME_ST'] = '-Wl,-h -Wl,%s' |
470 | -# v['SHLIB_MARKER'] = '-Bdynamic' |
471 | -# v['STATICLIB_MARKER'] = '-Bstatic' |
472 | + v.CC_SRC_F = '' |
473 | + v.CC_TGT_F = ['-c', '-o'] |
474 | + v.CPPPATH_ST = '-I%s' |
475 | + v.DEFINES_ST = '-D%s' |
476 | |
477 | - # program |
478 | - v['program_PATTERN'] = '%s' |
479 | + if not v.LINK_CC: |
480 | + v.LINK_CC = v.CC |
481 | |
482 | - # shared library |
483 | -# v['shlib_CCFLAGS'] = ['-Kpic', '-DPIC'] |
484 | -# v['shlib_LINKFLAGS'] = ['-G'] |
485 | - v['shlib_PATTERN'] = 'lib%s.so' |
486 | + v.CCLNK_SRC_F = '' |
487 | + v.CCLNK_TGT_F = ['-o'] |
488 | |
489 | - # static lib |
490 | -# v['staticlib_LINKFLAGS'] = ['-Bstatic'] |
491 | -# v['staticlib_PATTERN'] = 'lib%s.a' |
492 | + v.LIB_ST = '-l%s' # template for adding libs |
493 | + v.LIBPATH_ST = '-L%s' # template for adding libpaths |
494 | + v.STLIB_ST = '-l%s' |
495 | + v.STLIBPATH_ST = '-L%s' |
496 | |
497 | -detect = ''' |
498 | -find_generic_cc |
499 | -find_cpp |
500 | -find_ar |
501 | -generic_cc_common_flags |
502 | -cc_load_tools |
503 | -cc_add_flags |
504 | -link_add_flags |
505 | -''' |
506 | + v.cprogram_PATTERN = '%s' |
507 | + v.cshlib_PATTERN = 'lib%s.so' |
508 | + v.cstlib_PATTERN = 'lib%s.a' |
509 | |
510 | +def configure(conf): |
511 | + conf.find_generic_cc() |
512 | + conf.find_ar() |
513 | + conf.generic_cc_common_flags() |
514 | + conf.cc_load_tools() |
515 | + conf.cc_add_flags() |
516 | + conf.link_add_flags() |
517 | diff --git a/buildtools/wafsamba/hpuxcc.py b/buildtools/wafsamba/hpuxcc.py |
518 | deleted file mode 100644 |
519 | index c263556..0000000 |
520 | --- a/buildtools/wafsamba/hpuxcc.py |
521 | +++ /dev/null |
522 | @@ -1,56 +0,0 @@ |
523 | -# compiler definition for HPUX |
524 | -# based on suncc.py from waf |
525 | - |
526 | -import os, optparse, sys |
527 | -import Utils, Options, Configure |
528 | -import ccroot, ar |
529 | -from Configure import conftest |
530 | -import gcc |
531 | - |
532 | - |
533 | -@conftest |
534 | -def gcc_modifier_hpux(conf): |
535 | - v=conf.env |
536 | - v['CCFLAGS_DEBUG']=['-g'] |
537 | - v['CCFLAGS_RELEASE']=['-O2'] |
538 | - v['CC_SRC_F']='' |
539 | - v['CC_TGT_F']=['-c','-o',''] |
540 | - v['CPPPATH_ST']='-I%s' |
541 | - if not v['LINK_CC']:v['LINK_CC']=v['CC'] |
542 | - v['CCLNK_SRC_F']='' |
543 | - v['CCLNK_TGT_F']=['-o',''] |
544 | - v['LIB_ST']='-l%s' |
545 | - v['LIBPATH_ST']='-L%s' |
546 | - v['STATICLIB_ST']='-l%s' |
547 | - v['STATICLIBPATH_ST']='-L%s' |
548 | - v['RPATH_ST']='-Wl,-rpath,%s' |
549 | - v['CCDEFINES_ST']='-D%s' |
550 | - v['SONAME_ST']='-Wl,-h,%s' |
551 | - v['SHLIB_MARKER']=[] |
552 | -# v['STATICLIB_MARKER']='-Wl,-Bstatic' |
553 | - v['FULLSTATIC_MARKER']='-static' |
554 | - v['program_PATTERN']='%s' |
555 | - v['shlib_CCFLAGS']=['-fPIC','-DPIC'] |
556 | - v['shlib_LINKFLAGS']=['-shared'] |
557 | - v['shlib_PATTERN']='lib%s.sl' |
558 | -# v['staticlib_LINKFLAGS']=['-Wl,-Bstatic'] |
559 | - v['staticlib_PATTERN']='lib%s.a' |
560 | - |
561 | -gcc.gcc_modifier_hpux = gcc_modifier_hpux |
562 | - |
563 | -from TaskGen import feature, after |
564 | -@feature('cprogram', 'cshlib') |
565 | -@after('apply_link', 'apply_lib_vars', 'apply_obj_vars') |
566 | -def hpux_addfullpath(self): |
567 | - if sys.platform == 'hp-ux11': |
568 | - link = getattr(self, 'link_task', None) |
569 | - if link: |
570 | - lst = link.env.LINKFLAGS |
571 | - buf = [] |
572 | - for x in lst: |
573 | - if x.startswith('-L'): |
574 | - p2 = x[2:] |
575 | - if not os.path.isabs(p2): |
576 | - x = x[:2] + self.bld.srcnode.abspath(link.env) + "/../" + x[2:].lstrip('.') |
577 | - buf.append(x) |
578 | - link.env.LINKFLAGS = buf |
579 | diff --git a/buildtools/wafsamba/irixcc.py b/buildtools/wafsamba/irixcc.py |
580 | deleted file mode 100644 |
581 | index f3cb451..0000000 |
582 | --- a/buildtools/wafsamba/irixcc.py |
583 | +++ /dev/null |
584 | @@ -1,79 +0,0 @@ |
585 | - |
586 | -# compiler definition for irix/MIPSpro cc compiler |
587 | -# based on suncc.py from waf |
588 | - |
589 | -import os, optparse |
590 | -import Utils, Options, Configure |
591 | -import ccroot, ar |
592 | -from Configure import conftest |
593 | - |
594 | -from compiler_cc import c_compiler |
595 | - |
596 | -c_compiler['irix'] = ['gcc', 'irixcc'] |
597 | - |
598 | -@conftest |
599 | -def find_irixcc(conf): |
600 | - v = conf.env |
601 | - cc = None |
602 | - if v['CC']: cc = v['CC'] |
603 | - elif 'CC' in conf.environ: cc = conf.environ['CC'] |
604 | - if not cc: cc = conf.find_program('cc', var='CC') |
605 | - if not cc: conf.fatal('irixcc was not found') |
606 | - cc = conf.cmd_to_list(cc) |
607 | - |
608 | - try: |
609 | - if Utils.cmd_output(cc + ['-c99'] + ['-version']) != '': |
610 | - conf.fatal('irixcc %r was not found' % cc) |
611 | - except ValueError: |
612 | - conf.fatal('irixcc -v could not be executed') |
613 | - |
614 | - conf.env.append_unique('CCFLAGS', '-c99') |
615 | - |
616 | - v['CC'] = cc |
617 | - v['CC_NAME'] = 'irix' |
618 | - |
619 | -@conftest |
620 | -def irixcc_common_flags(conf): |
621 | - v = conf.env |
622 | - |
623 | - v['CC_SRC_F'] = '' |
624 | - v['CC_TGT_F'] = ['-c', '-o', ''] |
625 | - v['CPPPATH_ST'] = '-I%s' # template for adding include paths |
626 | - |
627 | - # linker |
628 | - if not v['LINK_CC']: v['LINK_CC'] = v['CC'] |
629 | - v['CCLNK_SRC_F'] = '' |
630 | - v['CCLNK_TGT_F'] = ['-o', ''] |
631 | - |
632 | - v['LIB_ST'] = '-l%s' # template for adding libs |
633 | - v['LIBPATH_ST'] = '-L%s' # template for adding libpaths |
634 | - v['STATICLIB_ST'] = '-l%s' |
635 | - v['STATICLIBPATH_ST'] = '-L%s' |
636 | - v['CCDEFINES_ST'] = '-D%s' |
637 | - |
638 | -# v['SONAME_ST'] = '-Wl,-h -Wl,%s' |
639 | -# v['SHLIB_MARKER'] = '-Bdynamic' |
640 | -# v['STATICLIB_MARKER'] = '-Bstatic' |
641 | - |
642 | - # program |
643 | - v['program_PATTERN'] = '%s' |
644 | - |
645 | - # shared library |
646 | -# v['shlib_CCFLAGS'] = ['-Kpic', '-DPIC'] |
647 | -# v['shlib_LINKFLAGS'] = ['-G'] |
648 | - v['shlib_PATTERN'] = 'lib%s.so' |
649 | - |
650 | - # static lib |
651 | -# v['staticlib_LINKFLAGS'] = ['-Bstatic'] |
652 | -# v['staticlib_PATTERN'] = 'lib%s.a' |
653 | - |
654 | -detect = ''' |
655 | -find_irixcc |
656 | -find_cpp |
657 | -find_ar |
658 | -irixcc_common_flags |
659 | -cc_load_tools |
660 | -cc_add_flags |
661 | -link_add_flags |
662 | -''' |
663 | - |
664 | diff --git a/buildtools/wafsamba/nothreads.py b/buildtools/wafsamba/nothreads.py |
665 | deleted file mode 100644 |
666 | index 9bd33e8..0000000 |
667 | --- a/buildtools/wafsamba/nothreads.py |
668 | +++ /dev/null |
669 | @@ -1,219 +0,0 @@ |
670 | -# encoding: utf-8 |
671 | -# Thomas Nagy, 2005-2008 (ita) |
672 | - |
673 | -# this replaces the core of Runner.py in waf with a varient that works |
674 | -# on systems with completely broken threading (such as Python 2.5.x on |
675 | -# AIX). For simplicity we enable this when JOBS=1, which is triggered |
676 | -# by the compatibility makefile used for the waf build. That also ensures |
677 | -# this code is tested, as it means it is used in the build farm, and by |
678 | -# anyone using 'make' to build Samba with waf |
679 | - |
680 | -"Execute the tasks" |
681 | - |
682 | -import sys, random, threading |
683 | -try: from Queue import Queue |
684 | -except ImportError: from queue import Queue |
685 | -import Utils, Options |
686 | -from Constants import EXCEPTION, CRASHED, MAXJOBS, ASK_LATER, SKIPPED, SKIP_ME, SUCCESS |
687 | - |
688 | -GAP = 15 |
689 | - |
690 | -run_old = threading.Thread.run |
691 | -def run(*args, **kwargs): |
692 | - try: |
693 | - run_old(*args, **kwargs) |
694 | - except (KeyboardInterrupt, SystemExit): |
695 | - raise |
696 | - except: |
697 | - sys.excepthook(*sys.exc_info()) |
698 | -threading.Thread.run = run |
699 | - |
700 | - |
701 | -class TaskConsumer(object): |
702 | - consumers = 1 |
703 | - |
704 | -def process(tsk): |
705 | - m = tsk.master |
706 | - if m.stop: |
707 | - m.out.put(tsk) |
708 | - return |
709 | - |
710 | - try: |
711 | - tsk.generator.bld.printout(tsk.display()) |
712 | - if tsk.__class__.stat: ret = tsk.__class__.stat(tsk) |
713 | - # actual call to task's run() function |
714 | - else: ret = tsk.call_run() |
715 | - except Exception as e: |
716 | - tsk.err_msg = Utils.ex_stack() |
717 | - tsk.hasrun = EXCEPTION |
718 | - |
719 | - # TODO cleanup |
720 | - m.error_handler(tsk) |
721 | - m.out.put(tsk) |
722 | - return |
723 | - |
724 | - if ret: |
725 | - tsk.err_code = ret |
726 | - tsk.hasrun = CRASHED |
727 | - else: |
728 | - try: |
729 | - tsk.post_run() |
730 | - except Utils.WafError: |
731 | - pass |
732 | - except Exception: |
733 | - tsk.err_msg = Utils.ex_stack() |
734 | - tsk.hasrun = EXCEPTION |
735 | - else: |
736 | - tsk.hasrun = SUCCESS |
737 | - if tsk.hasrun != SUCCESS: |
738 | - m.error_handler(tsk) |
739 | - |
740 | - m.out.put(tsk) |
741 | - |
742 | -class Parallel(object): |
743 | - """ |
744 | - keep the consumer threads busy, and avoid consuming cpu cycles |
745 | - when no more tasks can be added (end of the build, etc) |
746 | - """ |
747 | - def __init__(self, bld, j=2): |
748 | - |
749 | - # number of consumers |
750 | - self.numjobs = j |
751 | - |
752 | - self.manager = bld.task_manager |
753 | - self.manager.current_group = 0 |
754 | - |
755 | - self.total = self.manager.total() |
756 | - |
757 | - # tasks waiting to be processed - IMPORTANT |
758 | - self.outstanding = [] |
759 | - self.maxjobs = MAXJOBS |
760 | - |
761 | - # tasks that are awaiting for another task to complete |
762 | - self.frozen = [] |
763 | - |
764 | - # tasks returned by the consumers |
765 | - self.out = Queue(0) |
766 | - |
767 | - self.count = 0 # tasks not in the producer area |
768 | - |
769 | - self.processed = 1 # progress indicator |
770 | - |
771 | - self.stop = False # error condition to stop the build |
772 | - self.error = False # error flag |
773 | - |
774 | - def get_next(self): |
775 | - "override this method to schedule the tasks in a particular order" |
776 | - if not self.outstanding: |
777 | - return None |
778 | - return self.outstanding.pop(0) |
779 | - |
780 | - def postpone(self, tsk): |
781 | - "override this method to schedule the tasks in a particular order" |
782 | - # TODO consider using a deque instead |
783 | - if random.randint(0, 1): |
784 | - self.frozen.insert(0, tsk) |
785 | - else: |
786 | - self.frozen.append(tsk) |
787 | - |
788 | - def refill_task_list(self): |
789 | - "called to set the next group of tasks" |
790 | - |
791 | - while self.count > self.numjobs + GAP or self.count >= self.maxjobs: |
792 | - self.get_out() |
793 | - |
794 | - while not self.outstanding: |
795 | - if self.count: |
796 | - self.get_out() |
797 | - |
798 | - if self.frozen: |
799 | - self.outstanding += self.frozen |
800 | - self.frozen = [] |
801 | - elif not self.count: |
802 | - (jobs, tmp) = self.manager.get_next_set() |
803 | - if jobs is not None: |
804 | - self.maxjobs = jobs |
805 | - if tmp: |
806 | - self.outstanding += tmp |
807 | - break |
808 | - |
809 | - def get_out(self): |
810 | - "the tasks that are put to execute are all collected using get_out" |
811 | - ret = self.out.get() |
812 | - self.manager.add_finished(ret) |
813 | - if not self.stop and getattr(ret, 'more_tasks', None): |
814 | - self.outstanding += ret.more_tasks |
815 | - self.total += len(ret.more_tasks) |
816 | - self.count -= 1 |
817 | - |
818 | - def error_handler(self, tsk): |
819 | - "by default, errors make the build stop (not thread safe so be careful)" |
820 | - if not Options.options.keep: |
821 | - self.stop = True |
822 | - self.error = True |
823 | - |
824 | - def start(self): |
825 | - "execute the tasks" |
826 | - |
827 | - while not self.stop: |
828 | - |
829 | - self.refill_task_list() |
830 | - |
831 | - # consider the next task |
832 | - tsk = self.get_next() |
833 | - if not tsk: |
834 | - if self.count: |
835 | - # tasks may add new ones after they are run |
836 | - continue |
837 | - else: |
838 | - # no tasks to run, no tasks running, time to exit |
839 | - break |
840 | - |
841 | - if tsk.hasrun: |
842 | - # if the task is marked as "run", just skip it |
843 | - self.processed += 1 |
844 | - self.manager.add_finished(tsk) |
845 | - continue |
846 | - |
847 | - try: |
848 | - st = tsk.runnable_status() |
849 | - except Exception as e: |
850 | - self.processed += 1 |
851 | - if self.stop and not Options.options.keep: |
852 | - tsk.hasrun = SKIPPED |
853 | - self.manager.add_finished(tsk) |
854 | - continue |
855 | - self.error_handler(tsk) |
856 | - self.manager.add_finished(tsk) |
857 | - tsk.hasrun = EXCEPTION |
858 | - tsk.err_msg = Utils.ex_stack() |
859 | - continue |
860 | - |
861 | - if st == ASK_LATER: |
862 | - self.postpone(tsk) |
863 | - elif st == SKIP_ME: |
864 | - self.processed += 1 |
865 | - tsk.hasrun = SKIPPED |
866 | - self.manager.add_finished(tsk) |
867 | - else: |
868 | - # run me: put the task in ready queue |
869 | - tsk.position = (self.processed, self.total) |
870 | - self.count += 1 |
871 | - self.processed += 1 |
872 | - tsk.master = self |
873 | - |
874 | - process(tsk) |
875 | - |
876 | - # self.count represents the tasks that have been made available to the consumer threads |
877 | - # collect all the tasks after an error else the message may be incomplete |
878 | - while self.error and self.count: |
879 | - self.get_out() |
880 | - |
881 | - #print loop |
882 | - assert (self.count == 0 or self.stop) |
883 | - |
884 | - |
885 | -# enable nothreads |
886 | -import Runner |
887 | -Runner.process = process |
888 | -Runner.Parallel = Parallel |
889 | diff --git a/buildtools/wafsamba/pkgconfig.py b/buildtools/wafsamba/pkgconfig.py |
890 | index 999bad4..b83d5f3 100644 |
891 | --- a/buildtools/wafsamba/pkgconfig.py |
892 | +++ b/buildtools/wafsamba/pkgconfig.py |
893 | @@ -1,7 +1,7 @@ |
894 | # handle substitution of variables in pc files |
895 | |
896 | import os, re, sys |
897 | -import Build, Logs |
898 | +from waflib import Build, Logs |
899 | from samba_utils import SUBST_VARS_RECURSIVE, TO_LIST |
900 | |
901 | def subst_at_vars(task): |
902 | @@ -52,7 +52,7 @@ def PKG_CONFIG_FILES(bld, pc_files, vnum=None, extra_name=None): |
903 | rule=subst_at_vars, |
904 | source=f+'.in', |
905 | target=target) |
906 | - bld.add_manual_dependency(bld.path.find_or_declare(f), bld.env['PREFIX']) |
907 | + bld.add_manual_dependency(bld.path.find_or_declare(f), bld.env['PREFIX'].encode('utf8')) |
908 | t.vars = [] |
909 | if t.env.RPATH_ON_INSTALL: |
910 | t.env.LIB_RPATH = t.env.RPATH_ST % t.env.LIBDIR |
911 | diff --git a/buildtools/wafsamba/samba3.py b/buildtools/wafsamba/samba3.py |
912 | index 44daff9..5aab250 100644 |
913 | --- a/buildtools/wafsamba/samba3.py |
914 | +++ b/buildtools/wafsamba/samba3.py |
915 | @@ -1,12 +1,11 @@ |
916 | # a waf tool to add autoconf-like macros to the configure section |
917 | # and for SAMBA_ macros for building libraries, binaries etc |
918 | |
919 | -import Options, Build, os |
920 | -from samba_utils import os_path_relpath, TO_LIST, samba_add_onoff_option |
921 | +import os |
922 | +from waflib import Build |
923 | +from samba_utils import os_path_relpath, TO_LIST |
924 | from samba_autoconf import library_flags |
925 | |
926 | -Options.Handler.SAMBA3_ADD_OPTION = samba_add_onoff_option |
927 | - |
928 | def SAMBA3_IS_STATIC_MODULE(bld, module): |
929 | '''Check whether module is in static list''' |
930 | if module in bld.env['static_modules']: |
931 | @@ -32,7 +31,7 @@ def s3_fix_kwargs(bld, kwargs): |
932 | '''fix the build arguments for s3 build rules to include the |
933 | necessary includes, subdir and cflags options ''' |
934 | s3dir = os.path.join(bld.env.srcdir, 'source3') |
935 | - s3reldir = os_path_relpath(s3dir, bld.curdir) |
936 | + s3reldir = os_path_relpath(s3dir, bld.path.abspath()) |
937 | |
938 | # the extra_includes list is relative to the source3 directory |
939 | extra_includes = [ '.', 'include', 'lib' ] |
940 | diff --git a/buildtools/wafsamba/samba_abi.py b/buildtools/wafsamba/samba_abi.py |
941 | index 196b468..5e7686d 100644 |
942 | --- a/buildtools/wafsamba/samba_abi.py |
943 | +++ b/buildtools/wafsamba/samba_abi.py |
944 | @@ -1,7 +1,13 @@ |
945 | # functions for handling ABI checking of libraries |
946 | |
947 | -import Options, Utils, os, Logs, samba_utils, sys, Task, fnmatch, re, Build |
948 | -from TaskGen import feature, before, after |
949 | +import os |
950 | +import sys |
951 | +import re |
952 | +import fnmatch |
953 | + |
954 | +from waflib import Options, Utils, Logs, Task, Build, Errors |
955 | +from waflib.TaskGen import feature, before, after |
956 | +from wafsamba import samba_utils |
957 | |
958 | # these type maps cope with platform specific names for common types |
959 | # please add new type mappings into the list below |
960 | @@ -10,7 +16,7 @@ abi_type_maps = { |
961 | 'struct __va_list_tag *' : 'va_list' |
962 | } |
963 | |
964 | -version_key = lambda x: map(int, x.split(".")) |
965 | +version_key = lambda x: list(map(int, x.split("."))) |
966 | |
967 | def normalise_signature(sig): |
968 | '''normalise a signature from gdb''' |
969 | @@ -79,7 +85,7 @@ def abi_check_task(self): |
970 | libpath = self.inputs[0].abspath(self.env) |
971 | libname = os.path.basename(libpath) |
972 | |
973 | - sigs = Utils.cmd_output([abi_gen, libpath]) |
974 | + sigs = samba_utils.get_string(Utils.cmd_output([abi_gen, libpath])) |
975 | parsed_sigs = parse_sigs(sigs, self.ABI_MATCH) |
976 | |
977 | sig_file = self.ABI_FILE |
978 | @@ -87,7 +93,7 @@ def abi_check_task(self): |
979 | old_sigs = samba_utils.load_file(sig_file) |
980 | if old_sigs is None or Options.options.ABI_UPDATE: |
981 | if not save_sigs(sig_file, parsed_sigs): |
982 | - raise Utils.WafError('Failed to save ABI file "%s"' % sig_file) |
983 | + raise Errors.WafError('Failed to save ABI file "%s"' % sig_file) |
984 | Logs.warn('Generated ABI signatures %s' % sig_file) |
985 | return |
986 | |
987 | @@ -112,14 +118,14 @@ def abi_check_task(self): |
988 | got_error = True |
989 | |
990 | if got_error: |
991 | - raise Utils.WafError('ABI for %s has changed - please fix library version then build with --abi-update\nSee http://wiki.samba.org/index.php/Waf#ABI_Checking for more information\nIf you have not changed any ABI, and your platform always gives this error, please configure with --abi-check-disable to skip this check' % libname) |
992 | + raise Errors.WafError('ABI for %s has changed - please fix library version then build with --abi-update\nSee http://wiki.samba.org/index.php/Waf#ABI_Checking for more information\nIf you have not changed any ABI, and your platform always gives this error, please configure with --abi-check-disable to skip this check' % libname) |
993 | |
994 | |
995 | -t = Task.task_type_from_func('abi_check', abi_check_task, color='BLUE', ext_in='.bin') |
996 | +t = Task.task_factory('abi_check', abi_check_task, color='BLUE', ext_in='.bin') |
997 | t.quiet = True |
998 | # allow "waf --abi-check" to force re-checking the ABI |
999 | if '--abi-check' in sys.argv: |
1000 | - Task.always_run(t) |
1001 | + t.always_run = True |
1002 | |
1003 | @after('apply_link') |
1004 | @feature('abi_check') |
1005 | @@ -184,18 +190,20 @@ def abi_write_vscript(f, libname, current_version, versions, symmap, abi_match): |
1006 | f.write("}%s;\n\n" % last_key) |
1007 | last_key = " %s" % symver |
1008 | f.write("%s {\n" % current_version) |
1009 | - local_abi = filter(lambda x: x[0] == '!', abi_match) |
1010 | - global_abi = filter(lambda x: x[0] != '!', abi_match) |
1011 | + local_abi = list(filter(lambda x: x[0] == '!', abi_match)) |
1012 | + global_abi = list(filter(lambda x: x[0] != '!', abi_match)) |
1013 | f.write("\tglobal:\n") |
1014 | if len(global_abi) > 0: |
1015 | for x in global_abi: |
1016 | f.write("\t\t%s;\n" % x) |
1017 | else: |
1018 | f.write("\t\t*;\n") |
1019 | - if abi_match != ["*"]: |
1020 | - f.write("\tlocal:\n") |
1021 | - for x in local_abi: |
1022 | - f.write("\t\t%s;\n" % x[1:]) |
1023 | + # Always hide symbols that must be local if exist |
1024 | + local_abi.extend(["!_end", "!__bss_start", "!_edata"]) |
1025 | + f.write("\tlocal:\n") |
1026 | + for x in local_abi: |
1027 | + f.write("\t\t%s;\n" % x[1:]) |
1028 | + if global_abi != ["*"]: |
1029 | if len(global_abi) > 0: |
1030 | f.write("\t\t*;\n") |
1031 | f.write("};\n") |
1032 | diff --git a/buildtools/wafsamba/samba_autoconf.py b/buildtools/wafsamba/samba_autoconf.py |
1033 | index c4391d0..ee1fc23 100644 |
1034 | --- a/buildtools/wafsamba/samba_autoconf.py |
1035 | +++ b/buildtools/wafsamba/samba_autoconf.py |
1036 | @@ -1,9 +1,10 @@ |
1037 | # a waf tool to add autoconf-like macros to the configure section |
1038 | |
1039 | import os, sys |
1040 | -import Build, Options, preproc, Logs |
1041 | -from Configure import conf |
1042 | -from TaskGen import feature |
1043 | +from waflib import Build, Options, Logs, Context |
1044 | +from waflib.Configure import conf |
1045 | +from waflib.TaskGen import feature |
1046 | +from waflib.Tools import c_preproc as preproc |
1047 | from samba_utils import TO_LIST, GET_TARGET_TYPE, SET_TARGET_TYPE, unique_list, mkdir_p |
1048 | |
1049 | missing_headers = set() |
1050 | @@ -18,7 +19,7 @@ def DEFINE(conf, d, v, add_to_cflags=False, quote=False): |
1051 | '''define a config option''' |
1052 | conf.define(d, v, quote=quote) |
1053 | if add_to_cflags: |
1054 | - conf.env.append_value('CCDEFINES', d + '=' + str(v)) |
1055 | + conf.env.append_value('CFLAGS', '-D%s=%s' % (d, str(v))) |
1056 | |
1057 | def hlist_to_string(conf, headers=None): |
1058 | '''convert a headers list to a set of #include lines''' |
1059 | @@ -44,11 +45,11 @@ def COMPOUND_START(conf, msg): |
1060 | if v != [] and v != 0: |
1061 | conf.env.in_compound = v + 1 |
1062 | return |
1063 | - conf.check_message_1(msg) |
1064 | - conf.saved_check_message_1 = conf.check_message_1 |
1065 | - conf.check_message_1 = null_check_message_1 |
1066 | - conf.saved_check_message_2 = conf.check_message_2 |
1067 | - conf.check_message_2 = null_check_message_2 |
1068 | + conf.start_msg(msg) |
1069 | + conf.saved_check_message_1 = conf.start_msg |
1070 | + conf.start_msg = null_check_message_1 |
1071 | + conf.saved_check_message_2 = conf.end_msg |
1072 | + conf.end_msg = null_check_message_2 |
1073 | conf.env.in_compound = 1 |
1074 | |
1075 | |
1076 | @@ -58,9 +59,9 @@ def COMPOUND_END(conf, result): |
1077 | conf.env.in_compound -= 1 |
1078 | if conf.env.in_compound != 0: |
1079 | return |
1080 | - conf.check_message_1 = conf.saved_check_message_1 |
1081 | - conf.check_message_2 = conf.saved_check_message_2 |
1082 | - p = conf.check_message_2 |
1083 | + conf.start_msg = conf.saved_check_message_1 |
1084 | + conf.end_msg = conf.saved_check_message_2 |
1085 | + p = conf.end_msg |
1086 | if result is True: |
1087 | p('ok') |
1088 | elif not result: |
1089 | @@ -96,10 +97,10 @@ def CHECK_HEADER(conf, h, add_headers=False, lib=None): |
1090 | hdrs = hlist_to_string(conf, headers=h) |
1091 | if lib is None: |
1092 | lib = "" |
1093 | - ret = conf.check(fragment='%s\nint main(void) { return 0; }' % hdrs, |
1094 | + ret = conf.check(fragment='%s\nint main(void) { return 0; }\n' % hdrs, |
1095 | type='nolink', |
1096 | execute=0, |
1097 | - ccflags=ccflags, |
1098 | + cflags=ccflags, |
1099 | mandatory=False, |
1100 | includes=cpppath, |
1101 | uselib=lib.upper(), |
1102 | @@ -250,7 +251,10 @@ def CHECK_FUNC(conf, f, link=True, lib=None, headers=None): |
1103 | |
1104 | ret = False |
1105 | |
1106 | - conf.COMPOUND_START('Checking for %s' % f) |
1107 | + in_lib_str = "" |
1108 | + if lib: |
1109 | + in_lib_str = " in %s" % lib |
1110 | + conf.COMPOUND_START('Checking for %s%s' % (f, in_lib_str)) |
1111 | |
1112 | if link is None or link: |
1113 | ret = CHECK_CODE(conf, |
1114 | @@ -322,7 +326,7 @@ def CHECK_SIZEOF(conf, vars, headers=None, define=None, critical=True): |
1115 | ret = False |
1116 | if v_define is None: |
1117 | v_define = 'SIZEOF_%s' % v.upper().replace(' ', '_') |
1118 | - for size in list((1, 2, 4, 8, 16, 32)): |
1119 | + for size in list((1, 2, 4, 8, 16, 32, 64)): |
1120 | if CHECK_CODE(conf, |
1121 | 'static int test_array[1 - 2 * !(((long int)(sizeof(%s))) <= %d)];' % (v, size), |
1122 | define=v_define, |
1123 | @@ -383,12 +387,10 @@ def CHECK_CODE(conf, code, define, |
1124 | else: |
1125 | execute = 0 |
1126 | |
1127 | - defs = conf.get_config_header() |
1128 | - |
1129 | if addmain: |
1130 | - fragment='%s\n%s\n int main(void) { %s; return 0; }\n' % (defs, hdrs, code) |
1131 | + fragment='%s\n int main(void) { %s; return 0; }\n' % (hdrs, code) |
1132 | else: |
1133 | - fragment='%s\n%s\n%s\n' % (defs, hdrs, code) |
1134 | + fragment='%s\n%s\n' % (hdrs, code) |
1135 | |
1136 | if msg is None: |
1137 | msg="Checking for %s" % define |
1138 | @@ -398,15 +400,11 @@ def CHECK_CODE(conf, code, define, |
1139 | # Be strict when relying on a compiler check |
1140 | # Some compilers (e.g. xlc) ignore non-supported features as warnings |
1141 | if strict: |
1142 | - extra_cflags = None |
1143 | - if conf.env["CC_NAME"] == "gcc": |
1144 | - extra_cflags = "-Werror" |
1145 | - elif conf.env["CC_NAME"] == "xlc": |
1146 | - extra_cflags = "-qhalt=w" |
1147 | - cflags.append(extra_cflags) |
1148 | + if 'WERROR_CFLAGS' in conf.env: |
1149 | + cflags.extend(conf.env['WERROR_CFLAGS']) |
1150 | |
1151 | if local_include: |
1152 | - cflags.append('-I%s' % conf.curdir) |
1153 | + cflags.append('-I%s' % conf.path.abspath()) |
1154 | |
1155 | if not link: |
1156 | type='nolink' |
1157 | @@ -431,11 +429,11 @@ def CHECK_CODE(conf, code, define, |
1158 | |
1159 | conf.COMPOUND_START(msg) |
1160 | |
1161 | - ret = conf.check(fragment=fragment, |
1162 | + try: |
1163 | + ret = conf.check(fragment=fragment, |
1164 | execute=execute, |
1165 | define_name = define, |
1166 | - mandatory = mandatory, |
1167 | - ccflags=cflags, |
1168 | + cflags=cflags, |
1169 | ldflags=ldflags, |
1170 | includes=includes, |
1171 | uselib=uselib, |
1172 | @@ -444,22 +442,30 @@ def CHECK_CODE(conf, code, define, |
1173 | quote=quote, |
1174 | exec_args=exec_args, |
1175 | define_ret=define_ret) |
1176 | - if not ret and CONFIG_SET(conf, define): |
1177 | - # sometimes conf.check() returns false, but it |
1178 | - # sets the define. Maybe a waf bug? |
1179 | - ret = True |
1180 | - if ret: |
1181 | + except Exception: |
1182 | + if always: |
1183 | + conf.DEFINE(define, 0) |
1184 | + else: |
1185 | + conf.undefine(define) |
1186 | + conf.COMPOUND_END(False) |
1187 | + if mandatory: |
1188 | + raise |
1189 | + return False |
1190 | + else: |
1191 | + # Success is indicated by ret but we should unset |
1192 | + # defines set by WAF's c_config.check() because it |
1193 | + # defines it to int(ret) and we want to undefine it |
1194 | + if not ret: |
1195 | + conf.undefine(define) |
1196 | + conf.COMPOUND_END(False) |
1197 | + return False |
1198 | if not define_ret: |
1199 | conf.DEFINE(define, 1) |
1200 | conf.COMPOUND_END(True) |
1201 | else: |
1202 | - conf.COMPOUND_END(conf.env[define]) |
1203 | + conf.DEFINE(define, ret, quote=quote) |
1204 | + conf.COMPOUND_END(ret) |
1205 | return True |
1206 | - if always: |
1207 | - conf.DEFINE(define, 0) |
1208 | - conf.COMPOUND_END(False) |
1209 | - return False |
1210 | - |
1211 | |
1212 | |
1213 | @conf |
1214 | @@ -490,8 +496,9 @@ def CHECK_CFLAGS(conf, cflags, fragment='int main(void) { return 0; }\n'): |
1215 | check_cflags.extend(conf.env['WERROR_CFLAGS']) |
1216 | return conf.check(fragment=fragment, |
1217 | execute=0, |
1218 | + mandatory=False, |
1219 | type='nolink', |
1220 | - ccflags=check_cflags, |
1221 | + cflags=check_cflags, |
1222 | msg="Checking compiler accepts %s" % cflags) |
1223 | |
1224 | @conf |
1225 | @@ -547,12 +554,15 @@ def library_flags(self, libs): |
1226 | # note that we do not add the -I and -L in here, as that is added by the waf |
1227 | # core. Adding it here would just change the order that it is put on the link line |
1228 | # which can cause system paths to be added before internal libraries |
1229 | - extra_ccflags = TO_LIST(getattr(self.env, 'CCFLAGS_%s' % lib.upper(), [])) |
1230 | + extra_ccflags = TO_LIST(getattr(self.env, 'CFLAGS_%s' % lib.upper(), [])) |
1231 | extra_ldflags = TO_LIST(getattr(self.env, 'LDFLAGS_%s' % lib.upper(), [])) |
1232 | extra_cpppath = TO_LIST(getattr(self.env, 'CPPPATH_%s' % lib.upper(), [])) |
1233 | ccflags.extend(extra_ccflags) |
1234 | ldflags.extend(extra_ldflags) |
1235 | cpppath.extend(extra_cpppath) |
1236 | + |
1237 | + extra_cpppath = TO_LIST(getattr(self.env, 'INCLUDES_%s' % lib.upper(), [])) |
1238 | + cpppath.extend(extra_cpppath) |
1239 | if 'EXTRA_LDFLAGS' in self.env: |
1240 | ldflags.extend(self.env['EXTRA_LDFLAGS']) |
1241 | |
1242 | @@ -585,9 +595,9 @@ int foo() |
1243 | |
1244 | (ccflags, ldflags, cpppath) = library_flags(conf, lib) |
1245 | if shlib: |
1246 | - res = conf.check(features='c cshlib', fragment=fragment, lib=lib, uselib_store=lib, ccflags=ccflags, ldflags=ldflags, uselib=lib.upper(), mandatory=False) |
1247 | + res = conf.check(features='c cshlib', fragment=fragment, lib=lib, uselib_store=lib, cflags=ccflags, ldflags=ldflags, uselib=lib.upper(), mandatory=False) |
1248 | else: |
1249 | - res = conf.check(lib=lib, uselib_store=lib, ccflags=ccflags, ldflags=ldflags, uselib=lib.upper(), mandatory=False) |
1250 | + res = conf.check(lib=lib, uselib_store=lib, cflags=ccflags, ldflags=ldflags, uselib=lib.upper(), mandatory=False) |
1251 | |
1252 | if not res: |
1253 | if mandatory: |
1254 | @@ -661,8 +671,8 @@ def CHECK_FUNCS_IN(conf, list, library, mandatory=False, checklibc=False, |
1255 | @conf |
1256 | def IN_LAUNCH_DIR(conf): |
1257 | '''return True if this rule is being run from the launch directory''' |
1258 | - return os.path.realpath(conf.curdir) == os.path.realpath(Options.launch_dir) |
1259 | -Options.Handler.IN_LAUNCH_DIR = IN_LAUNCH_DIR |
1260 | + return os.path.realpath(conf.path.abspath()) == os.path.realpath(Context.launch_dir) |
1261 | +Options.OptionsContext.IN_LAUNCH_DIR = IN_LAUNCH_DIR |
1262 | |
1263 | |
1264 | @conf |
1265 | @@ -674,23 +684,42 @@ def SAMBA_CONFIG_H(conf, path=None): |
1266 | return |
1267 | |
1268 | # we need to build real code that can't be optimized away to test |
1269 | - if conf.check(fragment=''' |
1270 | - #include <stdio.h> |
1271 | - |
1272 | - int main(void) |
1273 | - { |
1274 | - char t[100000]; |
1275 | - while (fgets(t, sizeof(t), stdin)); |
1276 | - return 0; |
1277 | - } |
1278 | - ''', |
1279 | - execute=0, |
1280 | - ccflags='-fstack-protector', |
1281 | - ldflags='-fstack-protector', |
1282 | - mandatory=False, |
1283 | - msg='Checking if toolchain accepts -fstack-protector'): |
1284 | - conf.ADD_CFLAGS('-fstack-protector') |
1285 | - conf.ADD_LDFLAGS('-fstack-protector') |
1286 | + stack_protect_list = ['-fstack-protector-strong', '-fstack-protector'] |
1287 | + for stack_protect_flag in stack_protect_list: |
1288 | + flag_supported = conf.check(fragment=''' |
1289 | + #include <stdio.h> |
1290 | + |
1291 | + int main(void) |
1292 | + { |
1293 | + char t[100000]; |
1294 | + while (fgets(t, sizeof(t), stdin)); |
1295 | + return 0; |
1296 | + } |
1297 | + ''', |
1298 | + execute=0, |
1299 | + cflags=[ '-Werror', '-Wp,-D_FORTIFY_SOURCE=2', stack_protect_flag], |
1300 | + mandatory=False, |
1301 | + msg='Checking if compiler accepts %s' % (stack_protect_flag)) |
1302 | + if flag_supported: |
1303 | + conf.ADD_CFLAGS('%s' % (stack_protect_flag)) |
1304 | + break |
1305 | + |
1306 | + flag_supported = conf.check(fragment=''' |
1307 | + #include <stdio.h> |
1308 | + |
1309 | + int main(void) |
1310 | + { |
1311 | + char t[100000]; |
1312 | + while (fgets(t, sizeof(t), stdin)); |
1313 | + return 0; |
1314 | + } |
1315 | + ''', |
1316 | + execute=0, |
1317 | + cflags=[ '-Werror', '-fstack-clash-protection'], |
1318 | + mandatory=False, |
1319 | + msg='Checking if compiler accepts -fstack-clash-protection') |
1320 | + if flag_supported: |
1321 | + conf.ADD_CFLAGS('-fstack-clash-protection') |
1322 | |
1323 | if Options.options.debug: |
1324 | conf.ADD_CFLAGS('-g', testflags=True) |
1325 | @@ -774,9 +803,12 @@ int main(void) { |
1326 | conf.env['EXTRA_LDFLAGS'].extend(conf.env['ADDITIONAL_LDFLAGS']) |
1327 | |
1328 | if path is None: |
1329 | - conf.write_config_header('config.h', top=True) |
1330 | + conf.write_config_header('default/config.h', top=True, remove=False) |
1331 | else: |
1332 | - conf.write_config_header(path) |
1333 | + conf.write_config_header(os.path.join(conf.variant, path), remove=False) |
1334 | + for key in conf.env.define_key: |
1335 | + conf.undefine(key, from_env=False) |
1336 | + conf.env.define_key = [] |
1337 | conf.SAMBA_CROSS_CHECK_COMPLETE() |
1338 | |
1339 | |
1340 | @@ -863,9 +895,6 @@ def CHECK_CC_ENV(conf): |
1341 | The build farm sometimes puts a space at the start""" |
1342 | if os.environ.get('CC'): |
1343 | conf.env.CC = TO_LIST(os.environ.get('CC')) |
1344 | - if len(conf.env.CC) == 1: |
1345 | - # make for nicer logs if just a single command |
1346 | - conf.env.CC = conf.env.CC[0] |
1347 | |
1348 | |
1349 | @conf |
1350 | @@ -875,7 +904,7 @@ def SETUP_CONFIGURE_CACHE(conf, enable): |
1351 | # when -C is chosen, we will use a private cache and will |
1352 | # not look into system includes. This roughtly matches what |
1353 | # autoconf does with -C |
1354 | - cache_path = os.path.join(conf.blddir, '.confcache') |
1355 | + cache_path = os.path.join(conf.bldnode.abspath(), '.confcache') |
1356 | mkdir_p(cache_path) |
1357 | Options.cache_global = os.environ['WAFCACHE'] = cache_path |
1358 | else: |
1359 | @@ -899,6 +928,3 @@ def SAMBA_CHECK_UNDEFINED_SYMBOL_FLAGS(conf): |
1360 | if conf.CHECK_LDFLAGS(['-undefined', 'dynamic_lookup']): |
1361 | conf.env.undefined_ignore_ldflags = ['-undefined', 'dynamic_lookup'] |
1362 | |
1363 | -@conf |
1364 | -def CHECK_CFG(self, *k, **kw): |
1365 | - return self.check_cfg(*k, **kw) |
1366 | diff --git a/buildtools/wafsamba/samba_autoproto.py b/buildtools/wafsamba/samba_autoproto.py |
1367 | index b2b5233..ace434f 100644 |
1368 | --- a/buildtools/wafsamba/samba_autoproto.py |
1369 | +++ b/buildtools/wafsamba/samba_autoproto.py |
1370 | @@ -1,13 +1,13 @@ |
1371 | # waf build tool for building automatic prototypes from C source |
1372 | |
1373 | import os |
1374 | -import Build |
1375 | +from waflib import Build |
1376 | from samba_utils import SET_TARGET_TYPE, os_path_relpath |
1377 | |
1378 | def SAMBA_AUTOPROTO(bld, header, source): |
1379 | '''rule for samba prototype generation''' |
1380 | bld.SET_BUILD_GROUP('prototypes') |
1381 | - relpath = os_path_relpath(bld.curdir, bld.srcnode.abspath()) |
1382 | + relpath = os_path_relpath(bld.path.abspath(), bld.srcnode.abspath()) |
1383 | name = os.path.join(relpath, header) |
1384 | SET_TARGET_TYPE(bld, name, 'PROTOTYPE') |
1385 | t = bld( |
1386 | @@ -16,7 +16,7 @@ def SAMBA_AUTOPROTO(bld, header, source): |
1387 | target = header, |
1388 | update_outputs=True, |
1389 | ext_out='.c', |
1390 | - before ='cc', |
1391 | + before ='c', |
1392 | rule = '${PERL} "${SCRIPT}/mkproto.pl" --srcdir=.. --builddir=. --public=/dev/null --private="${TGT}" ${SRC}' |
1393 | ) |
1394 | t.env.SCRIPT = os.path.join(bld.srcnode.abspath(), 'source4/script') |
1395 | diff --git a/buildtools/wafsamba/samba_bundled.py b/buildtools/wafsamba/samba_bundled.py |
1396 | index 253d604..60ce7da 100644 |
1397 | --- a/buildtools/wafsamba/samba_bundled.py |
1398 | +++ b/buildtools/wafsamba/samba_bundled.py |
1399 | @@ -1,9 +1,9 @@ |
1400 | # functions to support bundled libraries |
1401 | |
1402 | import sys |
1403 | -import Build, Options, Logs |
1404 | -from Configure import conf |
1405 | -from samba_utils import TO_LIST |
1406 | +from waflib import Build, Options, Logs |
1407 | +from waflib.Configure import conf |
1408 | +from wafsamba import samba_utils |
1409 | |
1410 | def PRIVATE_NAME(bld, name, private_extension, private_library): |
1411 | '''possibly rename a library to include a bundled extension''' |
1412 | @@ -51,19 +51,19 @@ Build.BuildContext.BUILTIN_LIBRARY = BUILTIN_LIBRARY |
1413 | |
1414 | def BUILTIN_DEFAULT(opt, builtins): |
1415 | '''set a comma separated default list of builtin libraries for this package''' |
1416 | - if 'BUILTIN_LIBRARIES_DEFAULT' in Options.options: |
1417 | + if 'BUILTIN_LIBRARIES_DEFAULT' in Options.options.__dict__: |
1418 | return |
1419 | - Options.options['BUILTIN_LIBRARIES_DEFAULT'] = builtins |
1420 | -Options.Handler.BUILTIN_DEFAULT = BUILTIN_DEFAULT |
1421 | + Options.options.__dict__['BUILTIN_LIBRARIES_DEFAULT'] = builtins |
1422 | +Options.OptionsContext.BUILTIN_DEFAULT = BUILTIN_DEFAULT |
1423 | |
1424 | |
1425 | def PRIVATE_EXTENSION_DEFAULT(opt, extension, noextension=''): |
1426 | '''set a default private library extension''' |
1427 | - if 'PRIVATE_EXTENSION_DEFAULT' in Options.options: |
1428 | + if 'PRIVATE_EXTENSION_DEFAULT' in Options.options.__dict__: |
1429 | return |
1430 | - Options.options['PRIVATE_EXTENSION_DEFAULT'] = extension |
1431 | - Options.options['PRIVATE_EXTENSION_EXCEPTION'] = noextension |
1432 | -Options.Handler.PRIVATE_EXTENSION_DEFAULT = PRIVATE_EXTENSION_DEFAULT |
1433 | + Options.options.__dict__['PRIVATE_EXTENSION_DEFAULT'] = extension |
1434 | + Options.options.__dict__['PRIVATE_EXTENSION_EXCEPTION'] = noextension |
1435 | +Options.OptionsContext.PRIVATE_EXTENSION_DEFAULT = PRIVATE_EXTENSION_DEFAULT |
1436 | |
1437 | |
1438 | def minimum_library_version(conf, libname, default): |
1439 | @@ -139,7 +139,7 @@ def CHECK_BUNDLED_SYSTEM(conf, libname, minversion='0.0.0', |
1440 | # We always do a logic validation of 'onlyif' first |
1441 | missing = [] |
1442 | if onlyif: |
1443 | - for l in TO_LIST(onlyif): |
1444 | + for l in samba_utils.TO_LIST(onlyif): |
1445 | f = 'FOUND_SYSTEMLIB_%s' % l |
1446 | if not f in conf.env: |
1447 | Logs.error('ERROR: CHECK_BUNDLED_SYSTEM(%s) - ' % (libname) + |
1448 | diff --git a/buildtools/wafsamba/samba_conftests.py b/buildtools/wafsamba/samba_conftests.py |
1449 | index b52727b..ef632ba 100644 |
1450 | --- a/buildtools/wafsamba/samba_conftests.py |
1451 | +++ b/buildtools/wafsamba/samba_conftests.py |
1452 | @@ -2,34 +2,35 @@ |
1453 | # to test for commonly needed configuration options |
1454 | |
1455 | import os, shutil, re |
1456 | -import Build, Configure, Utils, Options, Logs |
1457 | -from Configure import conf |
1458 | -from samba_utils import TO_LIST, ADD_LD_LIBRARY_PATH |
1459 | +from waflib import Build, Configure, Utils, Options, Logs, Errors |
1460 | +from waflib.Configure import conf |
1461 | +from samba_utils import TO_LIST, ADD_LD_LIBRARY_PATH, get_string |
1462 | |
1463 | |
1464 | def add_option(self, *k, **kw): |
1465 | '''syntax help: provide the "match" attribute to opt.add_option() so that folders can be added to specific config tests''' |
1466 | + Options.OptionsContext.parser = self |
1467 | match = kw.get('match', []) |
1468 | if match: |
1469 | del kw['match'] |
1470 | opt = self.parser.add_option(*k, **kw) |
1471 | opt.match = match |
1472 | return opt |
1473 | -Options.Handler.add_option = add_option |
1474 | +Options.OptionsContext.add_option = add_option |
1475 | |
1476 | @conf |
1477 | def check(self, *k, **kw): |
1478 | '''Override the waf defaults to inject --with-directory options''' |
1479 | |
1480 | if not 'env' in kw: |
1481 | - kw['env'] = self.env.copy() |
1482 | + kw['env'] = self.env.derive() |
1483 | |
1484 | # match the configuration test with specific options, for example: |
1485 | # --with-libiconv -> Options.options.iconv_open -> "Checking for library iconv" |
1486 | additional_dirs = [] |
1487 | if 'msg' in kw: |
1488 | msg = kw['msg'] |
1489 | - for x in Options.Handler.parser.parser.option_list: |
1490 | + for x in Options.OptionsContext.parser.parser.option_list: |
1491 | if getattr(x, 'match', None) and msg in x.match: |
1492 | d = getattr(Options.options, x.dest, '') |
1493 | if d: |
1494 | @@ -46,12 +47,12 @@ def check(self, *k, **kw): |
1495 | add_options_dir(additional_dirs, kw['env']) |
1496 | |
1497 | self.validate_c(kw) |
1498 | - self.check_message_1(kw['msg']) |
1499 | + self.start_msg(kw['msg']) |
1500 | ret = None |
1501 | try: |
1502 | ret = self.run_c_code(*k, **kw) |
1503 | except Configure.ConfigurationError as e: |
1504 | - self.check_message_2(kw['errmsg'], 'YELLOW') |
1505 | + self.end_msg(kw['errmsg'], 'YELLOW') |
1506 | if 'mandatory' in kw and kw['mandatory']: |
1507 | if Logs.verbose > 1: |
1508 | raise |
1509 | @@ -59,7 +60,7 @@ def check(self, *k, **kw): |
1510 | self.fatal('the configuration failed (see %r)' % self.log.name) |
1511 | else: |
1512 | kw['success'] = ret |
1513 | - self.check_message_2(self.ret_msg(kw['okmsg'], kw)) |
1514 | + self.end_msg(self.ret_msg(kw['okmsg'], kw)) |
1515 | |
1516 | # success! keep the CPPPATH/LIBPATH |
1517 | add_options_dir(additional_dirs, self.env) |
1518 | @@ -85,7 +86,7 @@ def CHECK_LARGEFILE(conf, define='HAVE_LARGEFILE'): |
1519 | '''see what we need for largefile support''' |
1520 | getconf_cflags = conf.CHECK_COMMAND(['getconf', 'LFS_CFLAGS']); |
1521 | if getconf_cflags is not False: |
1522 | - if (conf.CHECK_CODE('return !(sizeof(off_t) >= 8)', |
1523 | + if (conf.CHECK_CODE('if (sizeof(off_t) < 8) return 1', |
1524 | define='WORKING_GETCONF_LFS_CFLAGS', |
1525 | execute=True, |
1526 | cflags=getconf_cflags, |
1527 | @@ -100,13 +101,13 @@ def CHECK_LARGEFILE(conf, define='HAVE_LARGEFILE'): |
1528 | else: |
1529 | conf.DEFINE(flag_split[0], flag_split[1]) |
1530 | |
1531 | - if conf.CHECK_CODE('return !(sizeof(off_t) >= 8)', |
1532 | + if conf.CHECK_CODE('if (sizeof(off_t) < 8) return 1', |
1533 | define, |
1534 | execute=True, |
1535 | msg='Checking for large file support without additional flags'): |
1536 | return True |
1537 | |
1538 | - if conf.CHECK_CODE('return !(sizeof(off_t) >= 8)', |
1539 | + if conf.CHECK_CODE('if (sizeof(off_t) < 8) return 1', |
1540 | define, |
1541 | execute=True, |
1542 | cflags='-D_FILE_OFFSET_BITS=64', |
1543 | @@ -114,7 +115,7 @@ def CHECK_LARGEFILE(conf, define='HAVE_LARGEFILE'): |
1544 | conf.DEFINE('_FILE_OFFSET_BITS', 64) |
1545 | return True |
1546 | |
1547 | - if conf.CHECK_CODE('return !(sizeof(off_t) >= 8)', |
1548 | + if conf.CHECK_CODE('if (sizeof(off_t) < 8) return 1', |
1549 | define, |
1550 | execute=True, |
1551 | cflags='-D_LARGE_FILES', |
1552 | @@ -162,7 +163,7 @@ def find_config_dir(conf): |
1553 | '''find a directory to run tests in''' |
1554 | k = 0 |
1555 | while k < 10000: |
1556 | - dir = os.path.join(conf.blddir, '.conf_check_%d' % k) |
1557 | + dir = os.path.join(conf.bldnode.abspath(), '.conf_check_%d' % k) |
1558 | try: |
1559 | shutil.rmtree(dir) |
1560 | except OSError: |
1561 | @@ -257,7 +258,8 @@ int foo(int v) { |
1562 | environ[0] = 1; |
1563 | ldb_module = PyImport_ImportModule("ldb"); |
1564 | return v * 2; |
1565 | -}''' |
1566 | +} |
1567 | +''' |
1568 | return conf.check(features='c cshlib',uselib='PYEMBED',fragment=snip,msg=msg, mandatory=False) |
1569 | |
1570 | # this one is quite complex, and should probably be broken up |
1571 | @@ -337,7 +339,8 @@ def CHECK_LIBRARY_SUPPORT(conf, rpath=False, version_script=False, msg=None): |
1572 | |
1573 | # we need to run the program, try to get its result |
1574 | args = conf.SAMBA_CROSS_ARGS(msg=msg) |
1575 | - proc = Utils.pproc.Popen([lastprog] + args, stdout=Utils.pproc.PIPE, stderr=Utils.pproc.PIPE) |
1576 | + proc = Utils.subprocess.Popen([lastprog] + args, |
1577 | + stdout=Utils.subprocess.PIPE, stderr=Utils.subprocess.PIPE) |
1578 | (out, err) = proc.communicate() |
1579 | w = conf.log.write |
1580 | w(str(out)) |
1581 | @@ -364,7 +367,7 @@ def CHECK_PERL_MANPAGE(conf, msg=None, section=None): |
1582 | else: |
1583 | msg = "perl manpage generation" |
1584 | |
1585 | - conf.check_message_1(msg) |
1586 | + conf.start_msg(msg) |
1587 | |
1588 | dir = find_config_dir(conf) |
1589 | |
1590 | @@ -381,28 +384,28 @@ WriteMakefile( |
1591 | """) |
1592 | back = os.path.abspath('.') |
1593 | os.chdir(bdir) |
1594 | - proc = Utils.pproc.Popen(['perl', 'Makefile.PL'], |
1595 | - stdout=Utils.pproc.PIPE, |
1596 | - stderr=Utils.pproc.PIPE) |
1597 | + proc = Utils.subprocess.Popen(['perl', 'Makefile.PL'], |
1598 | + stdout=Utils.subprocess.PIPE, |
1599 | + stderr=Utils.subprocess.PIPE) |
1600 | (out, err) = proc.communicate() |
1601 | os.chdir(back) |
1602 | |
1603 | ret = (proc.returncode == 0) |
1604 | if not ret: |
1605 | - conf.check_message_2('not found', color='YELLOW') |
1606 | + conf.end_msg('not found', color='YELLOW') |
1607 | return |
1608 | |
1609 | if section: |
1610 | man = Utils.readf(os.path.join(bdir,'Makefile')) |
1611 | m = re.search('MAN%sEXT\s+=\s+(\w+)' % section, man) |
1612 | if not m: |
1613 | - conf.check_message_2('not found', color='YELLOW') |
1614 | + conf.end_msg('not found', color='YELLOW') |
1615 | return |
1616 | ext = m.group(1) |
1617 | - conf.check_message_2(ext) |
1618 | + conf.end_msg(ext) |
1619 | return ext |
1620 | |
1621 | - conf.check_message_2('ok') |
1622 | + conf.end_msg('ok') |
1623 | return True |
1624 | |
1625 | |
1626 | @@ -416,7 +419,7 @@ def CHECK_COMMAND(conf, cmd, msg=None, define=None, on_target=True, boolean=Fals |
1627 | if on_target: |
1628 | cmd.extend(conf.SAMBA_CROSS_ARGS(msg=msg)) |
1629 | try: |
1630 | - ret = Utils.cmd_output(cmd) |
1631 | + ret = get_string(Utils.cmd_output(cmd)) |
1632 | except: |
1633 | conf.COMPOUND_END(False) |
1634 | return False |
1635 | @@ -461,7 +464,7 @@ def CHECK_INLINE(conf): |
1636 | ret = conf.CHECK_CODE(''' |
1637 | typedef int foo_t; |
1638 | static %s foo_t static_foo () {return 0; } |
1639 | - %s foo_t foo () {return 0; }''' % (i, i), |
1640 | + %s foo_t foo () {return 0; }\n''' % (i, i), |
1641 | define='INLINE_MACRO', |
1642 | addmain=False, |
1643 | link=False) |
1644 | @@ -486,13 +489,13 @@ def CHECK_XSLTPROC_MANPAGES(conf): |
1645 | return False |
1646 | |
1647 | s='http://docbook.sourceforge.net/release/xsl/current/manpages/docbook.xsl' |
1648 | - conf.CHECK_COMMAND('%s --nonet %s 2> /dev/null' % (conf.env.XSLTPROC, s), |
1649 | + conf.CHECK_COMMAND('%s --nonet %s 2> /dev/null' % (conf.env.get_flat('XSLTPROC'), s), |
1650 | msg='Checking for stylesheet %s' % s, |
1651 | define='XSLTPROC_MANPAGES', on_target=False, |
1652 | boolean=True) |
1653 | if not conf.CONFIG_SET('XSLTPROC_MANPAGES'): |
1654 | - print "A local copy of the docbook.xsl wasn't found on your system" \ |
1655 | - " consider installing package like docbook-xsl" |
1656 | + print("A local copy of the docbook.xsl wasn't found on your system" \ |
1657 | + " consider installing package like docbook-xsl") |
1658 | |
1659 | # |
1660 | # Determine the standard libpath for the used compiler, |
1661 | @@ -506,12 +509,12 @@ def CHECK_STANDARD_LIBPATH(conf): |
1662 | # at least gcc and clang support this: |
1663 | try: |
1664 | cmd = conf.env.CC + ['-print-search-dirs'] |
1665 | - out = Utils.cmd_output(cmd).split('\n') |
1666 | + out = get_string(Utils.cmd_output(cmd)).split('\n') |
1667 | except ValueError: |
1668 | # option not supported by compiler - use a standard list of directories |
1669 | dirlist = [ '/usr/lib', '/usr/lib64' ] |
1670 | except: |
1671 | - raise Utils.WafError('Unexpected error running "%s"' % (cmd)) |
1672 | + raise Errors.WafError('Unexpected error running "%s"' % (cmd)) |
1673 | else: |
1674 | dirlist = [] |
1675 | for line in out: |
1676 | diff --git a/buildtools/wafsamba/samba_cross.py b/buildtools/wafsamba/samba_cross.py |
1677 | index b8f2000..8863c2c 100644 |
1678 | --- a/buildtools/wafsamba/samba_cross.py |
1679 | +++ b/buildtools/wafsamba/samba_cross.py |
1680 | @@ -1,8 +1,9 @@ |
1681 | # functions for handling cross-compilation |
1682 | |
1683 | import os, sys, re, shlex |
1684 | -import Utils, Logs, Options |
1685 | -from Configure import conf |
1686 | +from waflib import Utils, Logs, Options, Errors, Context |
1687 | +from waflib.Configure import conf |
1688 | +from wafsamba import samba_utils |
1689 | |
1690 | real_Popen = None |
1691 | |
1692 | @@ -81,12 +82,12 @@ def cross_answer(ca_file, msg): |
1693 | f.close() |
1694 | return (int(m.group(1)), m.group(2)) |
1695 | else: |
1696 | - raise Utils.WafError("Bad answer format '%s' in %s" % (line, ca_file)) |
1697 | + raise Errors.WafError("Bad answer format '%s' in %s" % (line, ca_file)) |
1698 | f.close() |
1699 | return ANSWER_UNKNOWN |
1700 | |
1701 | |
1702 | -class cross_Popen(Utils.pproc.Popen): |
1703 | +class cross_Popen(Utils.subprocess.Popen): |
1704 | '''cross-compilation wrapper for Popen''' |
1705 | def __init__(*k, **kw): |
1706 | (obj, args) = k |
1707 | @@ -118,10 +119,10 @@ class cross_Popen(Utils.pproc.Popen): |
1708 | newargs.extend(args[0:i]) |
1709 | if use_answers: |
1710 | p = real_Popen(newargs, |
1711 | - stdout=Utils.pproc.PIPE, |
1712 | - stderr=Utils.pproc.PIPE) |
1713 | + stdout=Utils.subprocess.PIPE, |
1714 | + stderr=Utils.subprocess.PIPE) |
1715 | ce_out, ce_err = p.communicate() |
1716 | - ans = (p.returncode, ce_out) |
1717 | + ans = (p.returncode, samba_utils.get_string(ce_out)) |
1718 | add_answer(ca_file, msg, ans) |
1719 | else: |
1720 | args = newargs |
1721 | @@ -144,8 +145,8 @@ def SAMBA_CROSS_ARGS(conf, msg=None): |
1722 | |
1723 | global real_Popen |
1724 | if real_Popen is None: |
1725 | - real_Popen = Utils.pproc.Popen |
1726 | - Utils.pproc.Popen = cross_Popen |
1727 | + real_Popen = Utils.subprocess.Popen |
1728 | + Utils.subprocess.Popen = cross_Popen |
1729 | |
1730 | ret = [] |
1731 | |
1732 | @@ -154,11 +155,11 @@ def SAMBA_CROSS_ARGS(conf, msg=None): |
1733 | |
1734 | if conf.env.CROSS_ANSWERS: |
1735 | if msg is None: |
1736 | - raise Utils.WafError("Cannot have NULL msg in cross-answers") |
1737 | - ret.extend(['--cross-answers', os.path.join(Options.launch_dir, conf.env.CROSS_ANSWERS), msg]) |
1738 | + raise Errors.WafError("Cannot have NULL msg in cross-answers") |
1739 | + ret.extend(['--cross-answers', os.path.join(Context.launch_dir, conf.env.CROSS_ANSWERS), msg]) |
1740 | |
1741 | if ret == []: |
1742 | - raise Utils.WafError("Cannot cross-compile without either --cross-execute or --cross-answers") |
1743 | + raise Errors.WafError("Cannot cross-compile without either --cross-execute or --cross-answers") |
1744 | |
1745 | return ret |
1746 | |
1747 | @@ -167,5 +168,5 @@ def SAMBA_CROSS_CHECK_COMPLETE(conf): |
1748 | '''check if we have some unanswered questions''' |
1749 | global cross_answers_incomplete |
1750 | if conf.env.CROSS_COMPILE and cross_answers_incomplete: |
1751 | - raise Utils.WafError("Cross answers file %s is incomplete" % conf.env.CROSS_ANSWERS) |
1752 | + raise Errors.WafError("Cross answers file %s is incomplete" % conf.env.CROSS_ANSWERS) |
1753 | return True |
1754 | diff --git a/buildtools/wafsamba/samba_deps.py b/buildtools/wafsamba/samba_deps.py |
1755 | index 978a5e9..f8c3880 100644 |
1756 | --- a/buildtools/wafsamba/samba_deps.py |
1757 | +++ b/buildtools/wafsamba/samba_deps.py |
1758 | @@ -2,9 +2,10 @@ |
1759 | |
1760 | import os, sys, re, time |
1761 | |
1762 | -import Build, Environment, Options, Logs, Utils |
1763 | -from Logs import debug |
1764 | -from Configure import conf |
1765 | +from waflib import Build, Options, Logs, Utils, Errors |
1766 | +from waflib.Logs import debug |
1767 | +from waflib.Configure import conf |
1768 | +from waflib import ConfigSet |
1769 | |
1770 | from samba_bundled import BUILTIN_LIBRARY |
1771 | from samba_utils import LOCAL_CACHE, TO_LIST, get_tgt_list, unique_list, os_path_relpath |
1772 | @@ -85,7 +86,7 @@ def build_dependencies(self): |
1773 | # extra link flags from pkg_config |
1774 | libs = self.final_syslibs.copy() |
1775 | |
1776 | - (ccflags, ldflags, cpppath) = library_flags(self, list(libs)) |
1777 | + (cflags, ldflags, cpppath) = library_flags(self, list(libs)) |
1778 | new_ldflags = getattr(self, 'samba_ldflags', [])[:] |
1779 | new_ldflags.extend(ldflags) |
1780 | self.ldflags = new_ldflags |
1781 | @@ -102,7 +103,7 @@ def build_dependencies(self): |
1782 | self.sname, self.uselib, self.uselib_local, self.add_objects) |
1783 | |
1784 | if self.samba_type in ['SUBSYSTEM']: |
1785 | - # this is needed for the ccflags of libs that come from pkg_config |
1786 | + # this is needed for the cflags of libs that come from pkg_config |
1787 | self.uselib = list(self.final_syslibs) |
1788 | self.uselib.extend(list(self.direct_syslibs)) |
1789 | for lib in self.final_libs: |
1790 | @@ -235,7 +236,7 @@ def add_init_functions(self): |
1791 | if sentinel == 'NULL': |
1792 | proto = "extern void __%s_dummy_module_proto(void)" % (sname) |
1793 | cflags.append('-DSTATIC_%s_MODULES_PROTO=%s' % (sname, proto)) |
1794 | - self.ccflags = cflags |
1795 | + self.cflags = cflags |
1796 | return |
1797 | |
1798 | for m in modules: |
1799 | @@ -257,7 +258,7 @@ def add_init_functions(self): |
1800 | proto += '_MODULE_PROTO(%s)' % f |
1801 | proto += "extern void __%s_dummy_module_proto(void)" % (m) |
1802 | cflags.append('-DSTATIC_%s_MODULES_PROTO=%s' % (m, proto)) |
1803 | - self.ccflags = cflags |
1804 | + self.cflags = cflags |
1805 | |
1806 | |
1807 | def check_duplicate_sources(bld, tgt_list): |
1808 | @@ -271,6 +272,9 @@ def check_duplicate_sources(bld, tgt_list): |
1809 | tpath = os.path.normpath(os_path_relpath(t.path.abspath(bld.env), t.env.BUILD_DIRECTORY + '/default')) |
1810 | obj_sources = set() |
1811 | for s in source_list: |
1812 | + if not isinstance(s, str): |
1813 | + print('strange path in check_duplicate_sources %r' % s) |
1814 | + s = s.abspath() |
1815 | p = os.path.normpath(os.path.join(tpath, s)) |
1816 | if p in obj_sources: |
1817 | Logs.error("ERROR: source %s appears twice in target '%s'" % (p, t.sname)) |
1818 | @@ -299,7 +303,7 @@ def check_duplicate_sources(bld, tgt_list): |
1819 | Logs.warn("WARNING: source %s is in more than one target: %s" % (s, subsystems[s].keys())) |
1820 | for tname in subsystems[s]: |
1821 | if len(subsystems[s][tname]) > 1: |
1822 | - raise Utils.WafError("ERROR: source %s is in more than one subsystem of target '%s': %s" % (s, tname, subsystems[s][tname])) |
1823 | + raise Errors.WafError("ERROR: source %s is in more than one subsystem of target '%s': %s" % (s, tname, subsystems[s][tname])) |
1824 | |
1825 | return True |
1826 | |
1827 | @@ -372,7 +376,7 @@ def add_samba_attributes(bld, tgt_list): |
1828 | t.samba_abspath = t.path.abspath(bld.env) |
1829 | t.samba_deps_extended = t.samba_deps[:] |
1830 | t.samba_includes_extended = TO_LIST(t.samba_includes)[:] |
1831 | - t.ccflags = getattr(t, 'samba_cflags', '') |
1832 | + t.cflags = getattr(t, 'samba_cflags', '') |
1833 | |
1834 | def replace_grouping_libraries(bld, tgt_list): |
1835 | '''replace dependencies based on grouping libraries |
1836 | @@ -715,6 +719,11 @@ def reduce_objects(bld, tgt_list): |
1837 | if t.sname in rely_on: |
1838 | dup = dup.difference(rely_on[t.sname]) |
1839 | if dup: |
1840 | + # Do not remove duplicates of BUILTINS |
1841 | + d = next(iter(dup)) |
1842 | + if BUILTIN_LIBRARY(bld, d): |
1843 | + continue |
1844 | + |
1845 | debug('deps: removing dups from %s of type %s: %s also in %s %s', |
1846 | t.sname, t.samba_type, dup, t2.samba_type, l) |
1847 | new = new.difference(dup) |
1848 | @@ -951,7 +960,7 @@ savedeps_inputs = ['samba_deps', 'samba_includes', 'local_include', 'local_incl |
1849 | 'source', 'grouping_library', 'samba_ldflags', 'allow_undefined_symbols', |
1850 | 'use_global_deps', 'global_include' ] |
1851 | savedeps_outputs = ['uselib', 'uselib_local', 'add_objects', 'includes', |
1852 | - 'ccflags', 'ldflags', 'samba_deps_extended', 'final_libs'] |
1853 | + 'cflags', 'ldflags', 'samba_deps_extended', 'final_libs'] |
1854 | savedeps_outenv = ['INC_PATHS'] |
1855 | savedeps_envvars = ['NONSHARED_BINARIES', 'GLOBAL_DEPENDENCIES', 'EXTRA_CFLAGS', 'EXTRA_LDFLAGS', 'EXTRA_INCLUDES' ] |
1856 | savedeps_caches = ['GLOBAL_DEPENDENCIES', 'TARGET_TYPE', 'INIT_FUNCTIONS', 'SYSLIB_DEPS'] |
1857 | @@ -960,7 +969,7 @@ savedeps_files = ['buildtools/wafsamba/samba_deps.py'] |
1858 | def save_samba_deps(bld, tgt_list): |
1859 | '''save the dependency calculations between builds, to make |
1860 | further builds faster''' |
1861 | - denv = Environment.Environment() |
1862 | + denv = ConfigSet.ConfigSet() |
1863 | |
1864 | denv.version = savedeps_version |
1865 | denv.savedeps_inputs = savedeps_inputs |
1866 | @@ -1007,15 +1016,15 @@ def save_samba_deps(bld, tgt_list): |
1867 | if tdeps != {}: |
1868 | denv.outenv[t.sname] = tdeps |
1869 | |
1870 | - depsfile = os.path.join(bld.bdir, "sambadeps") |
1871 | + depsfile = os.path.join(bld.cache_dir, "sambadeps") |
1872 | denv.store_fast(depsfile) |
1873 | |
1874 | |
1875 | |
1876 | def load_samba_deps(bld, tgt_list): |
1877 | '''load a previous set of build dependencies if possible''' |
1878 | - depsfile = os.path.join(bld.bdir, "sambadeps") |
1879 | - denv = Environment.Environment() |
1880 | + depsfile = os.path.join(bld.cache_dir, "sambadeps") |
1881 | + denv = ConfigSet.ConfigSet() |
1882 | try: |
1883 | debug('deps: checking saved dependencies') |
1884 | denv.load_fast(depsfile) |
1885 | diff --git a/buildtools/wafsamba/samba_dist.py b/buildtools/wafsamba/samba_dist.py |
1886 | index 8d51632..6af7bb4 100644 |
1887 | --- a/buildtools/wafsamba/samba_dist.py |
1888 | +++ b/buildtools/wafsamba/samba_dist.py |
1889 | @@ -2,13 +2,41 @@ |
1890 | # uses git ls-files to get file lists |
1891 | |
1892 | import os, sys, tarfile |
1893 | -import Utils, Scripting, Logs, Options |
1894 | -from Configure import conf |
1895 | -from samba_utils import os_path_relpath |
1896 | +from waflib import Utils, Scripting, Logs, Options |
1897 | +from waflib.Configure import conf |
1898 | +from samba_utils import os_path_relpath, get_string |
1899 | +from waflib import Context |
1900 | |
1901 | dist_dirs = None |
1902 | dist_files = None |
1903 | dist_blacklist = "" |
1904 | +dist_archive = None |
1905 | + |
1906 | +class Dist(Context.Context): |
1907 | + # TODO remove |
1908 | + cmd = 'dist' |
1909 | + fun = 'dist' |
1910 | + def execute(self): |
1911 | + Context.g_module.dist() |
1912 | + |
1913 | +class DistCheck(Scripting.DistCheck): |
1914 | + fun = 'distcheck' |
1915 | + cmd = 'distcheck' |
1916 | + def execute(self): |
1917 | + Options.options.distcheck_args = '' |
1918 | + if Context.g_module.distcheck is Scripting.distcheck: |
1919 | + # default |
1920 | + Context.g_module.distcheck(self) |
1921 | + else: |
1922 | + Context.g_module.distcheck() |
1923 | + Context.g_module.dist() |
1924 | + self.check() |
1925 | + def get_arch_name(self): |
1926 | + global dist_archive |
1927 | + return dist_archive |
1928 | + def make_distcheck_cmd(self, tmpdir): |
1929 | + waf = os.path.abspath(sys.argv[0]) |
1930 | + return [sys.executable, waf, 'configure', 'build', 'install', 'uninstall', '--destdir=' + tmpdir] |
1931 | |
1932 | def add_symlink(tar, fname, abspath, basedir): |
1933 | '''handle symlinks to directories that may move during packaging''' |
1934 | @@ -69,7 +97,7 @@ def add_tarfile(tar, fname, abspath, basedir): |
1935 | tinfo.gid = 0 |
1936 | tinfo.uname = 'root' |
1937 | tinfo.gname = 'root' |
1938 | - fh = open(abspath) |
1939 | + fh = open(abspath, "rb") |
1940 | tar.addfile(tinfo, fileobj=fh) |
1941 | fh.close() |
1942 | |
1943 | @@ -91,7 +119,7 @@ def vcs_dir_contents(path): |
1944 | repo = os.path.dirname(repo) |
1945 | if repo == "/": |
1946 | raise Exception("unsupported or no vcs for %s" % path) |
1947 | - return Utils.cmd_output(ls_files_cmd, cwd=cwd, env=env).split() |
1948 | + return get_string(Utils.cmd_output(ls_files_cmd, cwd=cwd, env=env)).split('\n') |
1949 | |
1950 | |
1951 | def dist(appname='', version=''): |
1952 | @@ -136,12 +164,14 @@ def dist(appname='', version=''): |
1953 | |
1954 | if not isinstance(appname, str) or not appname: |
1955 | # this copes with a mismatch in the calling arguments for dist() |
1956 | - appname = Utils.g_module.APPNAME |
1957 | - version = Utils.g_module.VERSION |
1958 | + appname = Context.g_module.APPNAME |
1959 | + version = Context.g_module.VERSION |
1960 | if not version: |
1961 | - version = Utils.g_module.VERSION |
1962 | + version = Context.g_module.VERSION |
1963 | |
1964 | - srcdir = os.path.normpath(os.path.join(os.path.dirname(Utils.g_module.root_path), Utils.g_module.srcdir)) |
1965 | + srcdir = os.path.normpath( |
1966 | + os.path.join(os.path.dirname(Context.g_module.root_path), |
1967 | + Context.g_module.top)) |
1968 | |
1969 | if not dist_dirs: |
1970 | Logs.error('You must use samba_dist.DIST_DIRS() to set which directories to package') |
1971 | @@ -218,6 +248,9 @@ def dist(appname='', version=''): |
1972 | else: |
1973 | Logs.info('Created %s' % dist_name) |
1974 | |
1975 | + # TODO use the ctx object instead |
1976 | + global dist_archive |
1977 | + dist_archive = dist_name |
1978 | return dist_name |
1979 | |
1980 | |
1981 | diff --git a/buildtools/wafsamba/samba_git.py b/buildtools/wafsamba/samba_git.py |
1982 | index c58a579..09a204f 100644 |
1983 | --- a/buildtools/wafsamba/samba_git.py |
1984 | +++ b/buildtools/wafsamba/samba_git.py |
1985 | @@ -4,7 +4,7 @@ import subprocess |
1986 | def find_git(env=None): |
1987 | """Find the git binary.""" |
1988 | if env is not None and 'GIT' in env: |
1989 | - return env['GIT'] |
1990 | + return env.get_flat('GIT') |
1991 | |
1992 | # Get version from GIT |
1993 | if os.path.exists("/usr/bin/git"): |
1994 | diff --git a/buildtools/wafsamba/samba_headers.py b/buildtools/wafsamba/samba_headers.py |
1995 | index 0a80082..a268c01 100644 |
1996 | --- a/buildtools/wafsamba/samba_headers.py |
1997 | +++ b/buildtools/wafsamba/samba_headers.py |
1998 | @@ -1,7 +1,7 @@ |
1999 | # specialist handling of header files for Samba |
2000 | |
2001 | import os, re, sys, fnmatch |
2002 | -import Build, Logs, Utils |
2003 | +from waflib import Build, Logs, Utils, Errors |
2004 | from samba_utils import TO_LIST, os_path_relpath |
2005 | |
2006 | |
2007 | @@ -99,7 +99,7 @@ def create_public_header(task): |
2008 | os.unlink(tgt) |
2009 | sys.stderr.write("%s:%u:Error: unable to resolve public header %s (maybe try one of %s)\n" % ( |
2010 | os.path.relpath(src, os.getcwd()), linenumber, hpath, suggested)) |
2011 | - raise Utils.WafError("Unable to resolve header path '%s' in public header '%s' in directory %s" % ( |
2012 | + raise Errors.WafError("Unable to resolve header path '%s' in public header '%s' in directory %s" % ( |
2013 | hpath, relsrc, task.env.RELPATH)) |
2014 | infile.close() |
2015 | outfile.close() |
2016 | @@ -148,11 +148,12 @@ def PUBLIC_HEADERS(bld, public_headers, header_path=None, public_headers_install |
2017 | else: |
2018 | h_name = h |
2019 | inst_name = os.path.basename(h) |
2020 | - relpath1 = os_path_relpath(bld.srcnode.abspath(), bld.curdir) |
2021 | - relpath2 = os_path_relpath(bld.curdir, bld.srcnode.abspath()) |
2022 | + curdir = bld.path.abspath() |
2023 | + relpath1 = os_path_relpath(bld.srcnode.abspath(), curdir) |
2024 | + relpath2 = os_path_relpath(curdir, bld.srcnode.abspath()) |
2025 | targetdir = os.path.normpath(os.path.join(relpath1, bld.env.build_public_headers, inst_path)) |
2026 | - if not os.path.exists(os.path.join(bld.curdir, targetdir)): |
2027 | - raise Utils.WafError("missing source directory %s for public header %s" % (targetdir, inst_name)) |
2028 | + if not os.path.exists(os.path.join(curdir, targetdir)): |
2029 | + raise Errors.WafError("missing source directory %s for public header %s" % (targetdir, inst_name)) |
2030 | target = os.path.join(targetdir, inst_name) |
2031 | |
2032 | # the source path of the header, relative to the top of the source tree |
2033 | diff --git a/buildtools/wafsamba/samba_install.py b/buildtools/wafsamba/samba_install.py |
2034 | index 21035bf..47bc0cb 100644 |
2035 | --- a/buildtools/wafsamba/samba_install.py |
2036 | +++ b/buildtools/wafsamba/samba_install.py |
2037 | @@ -4,8 +4,8 @@ |
2038 | # library use |
2039 | |
2040 | import os |
2041 | -import Utils |
2042 | -from TaskGen import feature, before, after |
2043 | +from waflib import Utils, Errors |
2044 | +from waflib.TaskGen import feature, before, after |
2045 | from samba_utils import LIB_PATH, MODE_755, install_rpath, build_rpath |
2046 | |
2047 | @feature('install_bin') |
2048 | @@ -45,7 +45,7 @@ def install_binary(self): |
2049 | |
2050 | # tell waf to install the right binary |
2051 | bld.install_as(os.path.join(install_path, orig_target), |
2052 | - os.path.join(self.path.abspath(bld.env), self.target), |
2053 | + self.path.find_or_declare(self.target), |
2054 | chmod=MODE_755) |
2055 | |
2056 | |
2057 | @@ -143,8 +143,9 @@ def install_library(self): |
2058 | |
2059 | # tell waf to install the library |
2060 | bld.install_as(os.path.join(install_path, install_name), |
2061 | - os.path.join(self.path.abspath(bld.env), inst_name), |
2062 | + self.path.find_or_declare(inst_name), |
2063 | chmod=MODE_755) |
2064 | + |
2065 | if install_link and install_link != install_name: |
2066 | # and the symlink if needed |
2067 | bld.symlink_as(os.path.join(install_path, install_link), os.path.basename(install_name)) |
2068 | @@ -227,7 +228,7 @@ def symlink_bin(self): |
2069 | return |
2070 | |
2071 | if not self.link_task.outputs or not self.link_task.outputs[0]: |
2072 | - raise Utils.WafError('no outputs found for %s in symlink_bin' % self.name) |
2073 | + raise Errors.WafError('no outputs found for %s in symlink_bin' % self.name) |
2074 | binpath = self.link_task.outputs[0].abspath(self.env) |
2075 | bldpath = os.path.join(self.bld.env.BUILD_DIRECTORY, self.link_task.outputs[0].name) |
2076 | |
2077 | diff --git a/buildtools/wafsamba/samba_optimisation.py b/buildtools/wafsamba/samba_optimisation.py |
2078 | deleted file mode 100644 |
2079 | index 5008f83..0000000 |
2080 | --- a/buildtools/wafsamba/samba_optimisation.py |
2081 | +++ /dev/null |
2082 | @@ -1,269 +0,0 @@ |
2083 | -# This file contains waf optimisations for Samba |
2084 | - |
2085 | -# most of these optimisations are possible because of the restricted build environment |
2086 | -# that Samba has. For example, Samba doesn't attempt to cope with Win32 paths during the |
2087 | -# build, and Samba doesn't need build varients |
2088 | - |
2089 | -# overall this makes some build tasks quite a bit faster |
2090 | - |
2091 | -import os |
2092 | -import Build, Utils, Node |
2093 | -from TaskGen import feature, after, before |
2094 | -import preproc |
2095 | - |
2096 | -@feature('c', 'cc', 'cxx') |
2097 | -@after('apply_type_vars', 'apply_lib_vars', 'apply_core') |
2098 | -def apply_incpaths(self): |
2099 | - lst = [] |
2100 | - |
2101 | - try: |
2102 | - kak = self.bld.kak |
2103 | - except AttributeError: |
2104 | - kak = self.bld.kak = {} |
2105 | - |
2106 | - # TODO move the uselib processing out of here |
2107 | - for lib in self.to_list(self.uselib): |
2108 | - for path in self.env['CPPPATH_' + lib]: |
2109 | - if not path in lst: |
2110 | - lst.append(path) |
2111 | - if preproc.go_absolute: |
2112 | - for path in preproc.standard_includes: |
2113 | - if not path in lst: |
2114 | - lst.append(path) |
2115 | - |
2116 | - for path in self.to_list(self.includes): |
2117 | - if not path in lst: |
2118 | - if preproc.go_absolute or path[0] != '/': # os.path.isabs(path): |
2119 | - lst.append(path) |
2120 | - else: |
2121 | - self.env.prepend_value('CPPPATH', path) |
2122 | - |
2123 | - for path in lst: |
2124 | - node = None |
2125 | - if path[0] == '/': # os.path.isabs(path): |
2126 | - if preproc.go_absolute: |
2127 | - node = self.bld.root.find_dir(path) |
2128 | - elif path[0] == '#': |
2129 | - node = self.bld.srcnode |
2130 | - if len(path) > 1: |
2131 | - try: |
2132 | - node = kak[path] |
2133 | - except KeyError: |
2134 | - kak[path] = node = node.find_dir(path[1:]) |
2135 | - else: |
2136 | - try: |
2137 | - node = kak[(self.path.id, path)] |
2138 | - except KeyError: |
2139 | - kak[(self.path.id, path)] = node = self.path.find_dir(path) |
2140 | - |
2141 | - if node: |
2142 | - self.env.append_value('INC_PATHS', node) |
2143 | - |
2144 | -@feature('c', 'cc') |
2145 | -@after('apply_incpaths') |
2146 | -def apply_obj_vars_cc(self): |
2147 | - """after apply_incpaths for INC_PATHS""" |
2148 | - env = self.env |
2149 | - app = env.append_unique |
2150 | - cpppath_st = env['CPPPATH_ST'] |
2151 | - |
2152 | - lss = env['_CCINCFLAGS'] |
2153 | - |
2154 | - try: |
2155 | - cac = self.bld.cac |
2156 | - except AttributeError: |
2157 | - cac = self.bld.cac = {} |
2158 | - |
2159 | - # local flags come first |
2160 | - # set the user-defined includes paths |
2161 | - for i in env['INC_PATHS']: |
2162 | - |
2163 | - try: |
2164 | - lss.extend(cac[i.id]) |
2165 | - except KeyError: |
2166 | - |
2167 | - cac[i.id] = [cpppath_st % i.bldpath(env), cpppath_st % i.srcpath(env)] |
2168 | - lss.extend(cac[i.id]) |
2169 | - |
2170 | - env['_CCINCFLAGS'] = lss |
2171 | - # set the library include paths |
2172 | - for i in env['CPPPATH']: |
2173 | - app('_CCINCFLAGS', cpppath_st % i) |
2174 | - |
2175 | -import Node, Environment |
2176 | - |
2177 | -def vari(self): |
2178 | - return "default" |
2179 | -Environment.Environment.variant = vari |
2180 | - |
2181 | -def variant(self, env): |
2182 | - if not env: return 0 |
2183 | - elif self.id & 3 == Node.FILE: return 0 |
2184 | - else: return "default" |
2185 | -Node.Node.variant = variant |
2186 | - |
2187 | - |
2188 | -import TaskGen, Task |
2189 | - |
2190 | -def create_task(self, name, src=None, tgt=None): |
2191 | - task = Task.TaskBase.classes[name](self.env, generator=self) |
2192 | - if src: |
2193 | - task.set_inputs(src) |
2194 | - if tgt: |
2195 | - task.set_outputs(tgt) |
2196 | - return task |
2197 | -TaskGen.task_gen.create_task = create_task |
2198 | - |
2199 | -def hash_constraints(self): |
2200 | - a = self.attr |
2201 | - sum = hash((str(a('before', '')), |
2202 | - str(a('after', '')), |
2203 | - str(a('ext_in', '')), |
2204 | - str(a('ext_out', '')), |
2205 | - self.__class__.maxjobs)) |
2206 | - return sum |
2207 | -Task.TaskBase.hash_constraints = hash_constraints |
2208 | - |
2209 | -def hash_env_vars(self, env, vars_lst): |
2210 | - idx = str(id(env)) + str(vars_lst) |
2211 | - try: |
2212 | - return self.cache_sig_vars[idx] |
2213 | - except KeyError: |
2214 | - pass |
2215 | - |
2216 | - m = Utils.md5() |
2217 | - m.update(''.join([str(env[a]) for a in vars_lst])) |
2218 | - |
2219 | - ret = self.cache_sig_vars[idx] = m.digest() |
2220 | - return ret |
2221 | -Build.BuildContext.hash_env_vars = hash_env_vars |
2222 | - |
2223 | - |
2224 | -def store_fast(self, filename): |
2225 | - file = open(filename, 'wb') |
2226 | - data = self.get_merged_dict() |
2227 | - try: |
2228 | - Build.cPickle.dump(data, file, -1) |
2229 | - finally: |
2230 | - file.close() |
2231 | -Environment.Environment.store_fast = store_fast |
2232 | - |
2233 | -def load_fast(self, filename): |
2234 | - file = open(filename, 'rb') |
2235 | - try: |
2236 | - data = Build.cPickle.load(file) |
2237 | - finally: |
2238 | - file.close() |
2239 | - self.table.update(data) |
2240 | -Environment.Environment.load_fast = load_fast |
2241 | - |
2242 | -def is_this_a_static_lib(self, name): |
2243 | - try: |
2244 | - cache = self.cache_is_this_a_static_lib |
2245 | - except AttributeError: |
2246 | - cache = self.cache_is_this_a_static_lib = {} |
2247 | - try: |
2248 | - return cache[name] |
2249 | - except KeyError: |
2250 | - ret = cache[name] = 'cstaticlib' in self.bld.get_tgen_by_name(name).features |
2251 | - return ret |
2252 | -TaskGen.task_gen.is_this_a_static_lib = is_this_a_static_lib |
2253 | - |
2254 | -def shared_ancestors(self): |
2255 | - try: |
2256 | - cache = self.cache_is_this_a_static_lib |
2257 | - except AttributeError: |
2258 | - cache = self.cache_is_this_a_static_lib = {} |
2259 | - try: |
2260 | - return cache[id(self)] |
2261 | - except KeyError: |
2262 | - |
2263 | - ret = [] |
2264 | - if 'cshlib' in self.features: # or 'cprogram' in self.features: |
2265 | - if getattr(self, 'uselib_local', None): |
2266 | - lst = self.to_list(self.uselib_local) |
2267 | - ret = [x for x in lst if not self.is_this_a_static_lib(x)] |
2268 | - cache[id(self)] = ret |
2269 | - return ret |
2270 | -TaskGen.task_gen.shared_ancestors = shared_ancestors |
2271 | - |
2272 | -@feature('c', 'cc', 'cxx') |
2273 | -@after('apply_link', 'init_cc', 'init_cxx', 'apply_core') |
2274 | -def apply_lib_vars(self): |
2275 | - """after apply_link because of 'link_task' |
2276 | - after default_cc because of the attribute 'uselib'""" |
2277 | - |
2278 | - # after 'apply_core' in case if 'cc' if there is no link |
2279 | - |
2280 | - env = self.env |
2281 | - app = env.append_value |
2282 | - seen_libpaths = set([]) |
2283 | - |
2284 | - # OPTIMIZATION 1: skip uselib variables already added (700ms) |
2285 | - seen_uselib = set([]) |
2286 | - |
2287 | - # 1. the case of the libs defined in the project (visit ancestors first) |
2288 | - # the ancestors external libraries (uselib) will be prepended |
2289 | - self.uselib = self.to_list(self.uselib) |
2290 | - names = self.to_list(self.uselib_local) |
2291 | - |
2292 | - seen = set([]) |
2293 | - tmp = Utils.deque(names) # consume a copy of the list of names |
2294 | - while tmp: |
2295 | - lib_name = tmp.popleft() |
2296 | - # visit dependencies only once |
2297 | - if lib_name in seen: |
2298 | - continue |
2299 | - |
2300 | - y = self.get_tgen_by_name(lib_name) |
2301 | - if not y: |
2302 | - raise Utils.WafError('object %r was not found in uselib_local (required by %r)' % (lib_name, self.name)) |
2303 | - y.post() |
2304 | - seen.add(lib_name) |
2305 | - |
2306 | - # OPTIMIZATION 2: pre-compute ancestors shared libraries (100ms) |
2307 | - tmp.extend(y.shared_ancestors()) |
2308 | - |
2309 | - # link task and flags |
2310 | - if getattr(y, 'link_task', None): |
2311 | - |
2312 | - link_name = y.target[y.target.rfind('/') + 1:] |
2313 | - if 'cstaticlib' in y.features: |
2314 | - app('STATICLIB', link_name) |
2315 | - elif 'cshlib' in y.features or 'cprogram' in y.features: |
2316 | - # WARNING some linkers can link against programs |
2317 | - app('LIB', link_name) |
2318 | - |
2319 | - # the order |
2320 | - self.link_task.set_run_after(y.link_task) |
2321 | - |
2322 | - # for the recompilation |
2323 | - dep_nodes = getattr(self.link_task, 'dep_nodes', []) |
2324 | - self.link_task.dep_nodes = dep_nodes + y.link_task.outputs |
2325 | - |
2326 | - # OPTIMIZATION 3: reduce the amount of function calls |
2327 | - # add the link path too |
2328 | - par = y.link_task.outputs[0].parent |
2329 | - if id(par) not in seen_libpaths: |
2330 | - seen_libpaths.add(id(par)) |
2331 | - tmp_path = par.bldpath(self.env) |
2332 | - if not tmp_path in env['LIBPATH']: |
2333 | - env.prepend_value('LIBPATH', tmp_path) |
2334 | - |
2335 | - |
2336 | - # add ancestors uselib too - but only propagate those that have no staticlib |
2337 | - for v in self.to_list(y.uselib): |
2338 | - if v not in seen_uselib: |
2339 | - seen_uselib.add(v) |
2340 | - if not env['STATICLIB_' + v]: |
2341 | - if not v in self.uselib: |
2342 | - self.uselib.insert(0, v) |
2343 | - |
2344 | - # 2. the case of the libs defined outside |
2345 | - for x in self.uselib: |
2346 | - for v in self.p_flag_vars: |
2347 | - val = self.env[v + '_' + x] |
2348 | - if val: |
2349 | - self.env.append_value(v, val) |
2350 | - |
2351 | - |
2352 | diff --git a/buildtools/wafsamba/samba_patterns.py b/buildtools/wafsamba/samba_patterns.py |
2353 | index 2b93937..d0fe965 100644 |
2354 | --- a/buildtools/wafsamba/samba_patterns.py |
2355 | +++ b/buildtools/wafsamba/samba_patterns.py |
2356 | @@ -1,6 +1,7 @@ |
2357 | # a waf tool to add extension based build patterns for Samba |
2358 | |
2359 | -import Build |
2360 | +import sys |
2361 | +from waflib import Build |
2362 | from wafsamba import samba_version_file |
2363 | |
2364 | def write_version_header(task): |
2365 | @@ -146,13 +147,19 @@ def write_build_options_section(fp, keys, section): |
2366 | fp.write("\n") |
2367 | |
2368 | def write_build_options(task): |
2369 | - tbl = task.env['defines'] |
2370 | + tbl = task.env |
2371 | keys_option_with = [] |
2372 | keys_option_utmp = [] |
2373 | keys_option_have = [] |
2374 | keys_header_sys = [] |
2375 | keys_header_other = [] |
2376 | keys_misc = [] |
2377 | + if sys.hexversion>0x300000f: |
2378 | + trans_table = bytes.maketrans(b'.-()', b'____') |
2379 | + else: |
2380 | + import string |
2381 | + trans_table = string.maketrans('.-()', '____') |
2382 | + |
2383 | for key in tbl: |
2384 | if key.startswith("HAVE_UT_UT_") or key.find("UTMP") >= 0: |
2385 | keys_option_utmp.append(key) |
2386 | @@ -169,7 +176,7 @@ def write_build_options(task): |
2387 | l = key.split("(") |
2388 | keys_misc.append(l[0]) |
2389 | else: |
2390 | - keys_misc.append(key) |
2391 | + keys_misc.append(key.translate(trans_table)) |
2392 | |
2393 | tgt = task.outputs[0].bldpath(task.env) |
2394 | f = open(tgt, 'w') |
2395 | diff --git a/buildtools/wafsamba/samba_perl.py b/buildtools/wafsamba/samba_perl.py |
2396 | index 2e9a53a..e019acb 100644 |
2397 | --- a/buildtools/wafsamba/samba_perl.py |
2398 | +++ b/buildtools/wafsamba/samba_perl.py |
2399 | @@ -1,6 +1,6 @@ |
2400 | -import Utils |
2401 | -from Configure import conf |
2402 | - |
2403 | +from waflib import Utils |
2404 | +from waflib.Configure import conf |
2405 | +from samba_utils import get_string |
2406 | done = {} |
2407 | |
2408 | @conf |
2409 | @@ -9,13 +9,16 @@ def SAMBA_CHECK_PERL(conf, mandatory=True, version=(5,0,0)): |
2410 | return |
2411 | done["done"] = True |
2412 | conf.find_program('perl', var='PERL', mandatory=mandatory) |
2413 | - conf.check_tool('perl') |
2414 | + conf.load('perl') |
2415 | path_perl = conf.find_program('perl') |
2416 | conf.env.PERL_SPECIFIED = (conf.env.PERL != path_perl) |
2417 | conf.check_perl_version(version) |
2418 | |
2419 | def read_perl_config_var(cmd): |
2420 | - return Utils.to_list(Utils.cmd_output([conf.env.PERL, '-MConfig', '-e', cmd])) |
2421 | + output = Utils.cmd_output([conf.env.get_flat('PERL'), '-MConfig', '-e', cmd]) |
2422 | + if not isinstance(output, str): |
2423 | + output = get_string(output) |
2424 | + return Utils.to_list(output) |
2425 | |
2426 | def check_perl_config_var(var): |
2427 | conf.start_msg("Checking for perl $Config{%s}:" % var) |
2428 | diff --git a/buildtools/wafsamba/samba_pidl.py b/buildtools/wafsamba/samba_pidl.py |
2429 | index 9651e4d..3fecfa9 100644 |
2430 | --- a/buildtools/wafsamba/samba_pidl.py |
2431 | +++ b/buildtools/wafsamba/samba_pidl.py |
2432 | @@ -1,8 +1,8 @@ |
2433 | # waf build tool for building IDL files with pidl |
2434 | |
2435 | import os |
2436 | -import Build |
2437 | -from TaskGen import feature, before |
2438 | +from waflib import Build, Utils |
2439 | +from waflib.TaskGen import feature, before |
2440 | from samba_utils import SET_TARGET_TYPE, TO_LIST, LOCAL_CACHE |
2441 | |
2442 | def SAMBA_PIDL(bld, pname, source, |
2443 | @@ -76,9 +76,9 @@ def SAMBA_PIDL(bld, pname, source, |
2444 | else: |
2445 | cc = 'CC="%s"' % bld.CONFIG_GET("CC") |
2446 | |
2447 | - t = bld(rule='cd .. && %s %s ${PERL} "${PIDL}" --quiet ${OPTIONS} --outputdir ${OUTPUTDIR} -- "${SRC[0].abspath(env)}"' % (cpp, cc), |
2448 | + t = bld(rule='cd ${PIDL_LAUNCH_DIR} && %s %s ${PERL} ${PIDL} --quiet ${OPTIONS} --outputdir ${OUTPUTDIR} -- "${IDLSRC}"' % (cpp, cc), |
2449 | ext_out = '.c', |
2450 | - before = 'cc', |
2451 | + before = 'c', |
2452 | update_outputs = True, |
2453 | shell = True, |
2454 | source = source, |
2455 | @@ -86,18 +86,22 @@ def SAMBA_PIDL(bld, pname, source, |
2456 | name = name, |
2457 | samba_type = 'PIDL') |
2458 | |
2459 | - # prime the list of nodes we are dependent on with the cached pidl sources |
2460 | - t.allnodes = pidl_src_nodes |
2461 | |
2462 | - t.env.PIDL = os.path.join(bld.srcnode.abspath(), 'pidl/pidl') |
2463 | + t.env.PIDL_LAUNCH_DIR = bld.srcnode.path_from(bld.bldnode) |
2464 | + pnode = bld.srcnode.find_resource('pidl/pidl') |
2465 | + t.env.PIDL = pnode.path_from(bld.srcnode) |
2466 | t.env.OPTIONS = TO_LIST(options) |
2467 | - t.env.OUTPUTDIR = bld.bldnode.name + '/' + bld.path.find_dir(output_dir).bldpath(t.env) |
2468 | + snode = t.path.find_resource(source[0]) |
2469 | + t.env.IDLSRC = snode.path_from(bld.srcnode) |
2470 | + t.env.OUTPUTDIR = bld.bldnode.path_from(bld.srcnode) + '/' + bld.path.find_dir(output_dir).path_from(bld.srcnode) |
2471 | + |
2472 | + bld.add_manual_dependency(snode, pidl_src_nodes) |
2473 | |
2474 | if generate_tables and table_header_idx is not None: |
2475 | pidl_headers = LOCAL_CACHE(bld, 'PIDL_HEADERS') |
2476 | pidl_headers[name] = [bld.path.find_or_declare(out_files[table_header_idx])] |
2477 | |
2478 | - t.more_includes = '#' + bld.path.relpath_gen(bld.srcnode) |
2479 | + t.more_includes = '#' + bld.path.path_from(bld.srcnode) |
2480 | Build.BuildContext.SAMBA_PIDL = SAMBA_PIDL |
2481 | |
2482 | |
2483 | @@ -117,13 +121,15 @@ Build.BuildContext.SAMBA_PIDL_LIST = SAMBA_PIDL_LIST |
2484 | @before('exec_rule') |
2485 | def collect(self): |
2486 | pidl_headers = LOCAL_CACHE(self.bld, 'PIDL_HEADERS') |
2487 | + # The first source is tables.pl itself |
2488 | + self.source = Utils.to_list(self.source) |
2489 | for (name, hd) in pidl_headers.items(): |
2490 | y = self.bld.get_tgen_by_name(name) |
2491 | self.bld.ASSERT(y is not None, 'Failed to find PIDL header %s' % name) |
2492 | y.post() |
2493 | for node in hd: |
2494 | self.bld.ASSERT(node is not None, 'Got None as build node generating PIDL table for %s' % name) |
2495 | - self.source += " " + node.relpath_gen(self.path) |
2496 | + self.source.append(node) |
2497 | |
2498 | |
2499 | def SAMBA_PIDL_TABLES(bld, name, target): |
2500 | @@ -131,9 +137,9 @@ def SAMBA_PIDL_TABLES(bld, name, target): |
2501 | bld.SET_BUILD_GROUP('main') |
2502 | t = bld( |
2503 | features = 'collect', |
2504 | - rule = '${PERL} ${SRC} --output ${TGT} | sed "s|default/||" > ${TGT}', |
2505 | + rule = '${PERL} ${SRC} > ${TGT}', |
2506 | ext_out = '.c', |
2507 | - before = 'cc', |
2508 | + before = 'c', |
2509 | update_outputs = True, |
2510 | shell = True, |
2511 | source = '../../librpc/tables.pl', |
2512 | diff --git a/buildtools/wafsamba/samba_python.py b/buildtools/wafsamba/samba_python.py |
2513 | index cb99fe9..fac0e34 100644 |
2514 | --- a/buildtools/wafsamba/samba_python.py |
2515 | +++ b/buildtools/wafsamba/samba_python.py |
2516 | @@ -1,11 +1,11 @@ |
2517 | # waf build tool for building IDL files with pidl |
2518 | |
2519 | -import os |
2520 | -import Build, Logs, Utils, Configure |
2521 | -from Configure import conf |
2522 | +import os, sys |
2523 | +from waflib import Build, Logs, Utils, Configure, Errors |
2524 | +from waflib.Configure import conf |
2525 | |
2526 | @conf |
2527 | -def SAMBA_CHECK_PYTHON(conf, mandatory=True, version=(2,4,2)): |
2528 | +def SAMBA_CHECK_PYTHON(conf, mandatory=True, version=(2,6,0)): |
2529 | # enable tool to build python extensions |
2530 | if conf.env.HAVE_PYTHON_H: |
2531 | conf.check_python_version(version) |
2532 | @@ -14,23 +14,25 @@ def SAMBA_CHECK_PYTHON(conf, mandatory=True, version=(2,4,2)): |
2533 | interpreters = [] |
2534 | |
2535 | if conf.env['EXTRA_PYTHON']: |
2536 | - conf.all_envs['extrapython'] = conf.env.copy() |
2537 | + conf.all_envs['extrapython'] = conf.env.derive() |
2538 | conf.setenv('extrapython') |
2539 | conf.env['PYTHON'] = conf.env['EXTRA_PYTHON'] |
2540 | conf.env['IS_EXTRA_PYTHON'] = 'yes' |
2541 | conf.find_program('python', var='PYTHON', mandatory=True) |
2542 | - conf.check_tool('python') |
2543 | + conf.load('python') |
2544 | try: |
2545 | - conf.check_python_version((3, 3, 0)) |
2546 | + conf.check_python_version(version) |
2547 | except Exception: |
2548 | - Logs.warn('extra-python needs to be Python 3.3 or later') |
2549 | + Logs.warn('extra-python needs to be Python %s.%s.%s or later' % |
2550 | + (version[0], version[1], version[2])) |
2551 | raise |
2552 | interpreters.append(conf.env['PYTHON']) |
2553 | conf.setenv('default') |
2554 | |
2555 | - conf.find_program('python', var='PYTHON', mandatory=mandatory) |
2556 | - conf.check_tool('python') |
2557 | - path_python = conf.find_program('python') |
2558 | + conf.find_program('python3', var='PYTHON', mandatory=mandatory) |
2559 | + conf.load('python') |
2560 | + path_python = conf.find_program('python3') |
2561 | + |
2562 | conf.env.PYTHON_SPECIFIED = (conf.env.PYTHON != path_python) |
2563 | conf.check_python_version(version) |
2564 | |
2565 | @@ -42,14 +44,16 @@ def SAMBA_CHECK_PYTHON(conf, mandatory=True, version=(2,4,2)): |
2566 | def SAMBA_CHECK_PYTHON_HEADERS(conf, mandatory=True): |
2567 | if conf.env.disable_python: |
2568 | if mandatory: |
2569 | - raise Utils.WafError("Cannot check for python headers when " |
2570 | + raise Errors.WafError("Cannot check for python headers when " |
2571 | "--disable-python specified") |
2572 | |
2573 | conf.msg("python headers", "Check disabled due to --disable-python") |
2574 | # we don't want PYTHONDIR in config.h, as otherwise changing |
2575 | # --prefix causes a complete rebuild |
2576 | - del(conf.env.defines['PYTHONDIR']) |
2577 | - del(conf.env.defines['PYTHONARCHDIR']) |
2578 | + conf.env.DEFINES = [x for x in conf.env.DEFINES |
2579 | + if not x.startswith('PYTHONDIR=') |
2580 | + and not x.startswith('PYTHONARCHDIR=')] |
2581 | + |
2582 | return |
2583 | |
2584 | if conf.env["python_headers_checked"] == []: |
2585 | @@ -64,21 +68,22 @@ def SAMBA_CHECK_PYTHON_HEADERS(conf, mandatory=True): |
2586 | if conf.env['EXTRA_PYTHON']: |
2587 | extraversion = conf.all_envs['extrapython']['PYTHON_VERSION'] |
2588 | if extraversion == conf.env['PYTHON_VERSION']: |
2589 | - raise Utils.WafError("extrapython %s is same as main python %s" % ( |
2590 | + raise Errors.WafError("extrapython %s is same as main python %s" % ( |
2591 | extraversion, conf.env['PYTHON_VERSION'])) |
2592 | else: |
2593 | conf.msg("python headers", "using cache") |
2594 | |
2595 | # we don't want PYTHONDIR in config.h, as otherwise changing |
2596 | # --prefix causes a complete rebuild |
2597 | - del(conf.env.defines['PYTHONDIR']) |
2598 | - del(conf.env.defines['PYTHONARCHDIR']) |
2599 | + conf.env.DEFINES = [x for x in conf.env.DEFINES |
2600 | + if not x.startswith('PYTHONDIR=') |
2601 | + and not x.startswith('PYTHONARCHDIR=')] |
2602 | |
2603 | def _check_python_headers(conf, mandatory): |
2604 | try: |
2605 | - Configure.ConfigurationError |
2606 | - conf.check_python_headers(mandatory=mandatory) |
2607 | - except Configure.ConfigurationError: |
2608 | + conf.errors.ConfigurationError |
2609 | + conf.check_python_headers() |
2610 | + except conf.errors.ConfigurationError: |
2611 | if mandatory: |
2612 | raise |
2613 | |
2614 | @@ -95,6 +100,11 @@ def _check_python_headers(conf, mandatory): |
2615 | conf.env.append_unique('LIBPATH_PYEMBED', lib[2:]) # strip '-L' |
2616 | conf.env['LINKFLAGS_PYEMBED'].remove(lib) |
2617 | |
2618 | + # same as in waf 1.5, keep only '-fno-strict-aliasing' |
2619 | + # and ignore defines such as NDEBUG _FORTIFY_SOURCE=2 |
2620 | + conf.env.DEFINES_PYEXT = [] |
2621 | + conf.env.CFLAGS_PYEXT = ['-fno-strict-aliasing'] |
2622 | + |
2623 | return |
2624 | |
2625 | def PYTHON_BUILD_IS_ENABLED(self): |
2626 | @@ -145,7 +155,7 @@ def SAMBA_PYTHON(bld, name, |
2627 | source = bld.EXPAND_VARIABLES(source, vars=vars) |
2628 | |
2629 | if realname is not None: |
2630 | - link_name = 'python_modules/%s' % realname |
2631 | + link_name = 'python/%s' % realname |
2632 | else: |
2633 | link_name = None |
2634 | |
2635 | diff --git a/buildtools/wafsamba/samba_third_party.py b/buildtools/wafsamba/samba_third_party.py |
2636 | index 1144f81..e0dd3e1 100644 |
2637 | --- a/buildtools/wafsamba/samba_third_party.py |
2638 | +++ b/buildtools/wafsamba/samba_third_party.py |
2639 | @@ -1,12 +1,12 @@ |
2640 | # functions to support third party libraries |
2641 | |
2642 | import os |
2643 | -import Utils, Build |
2644 | -from Configure import conf |
2645 | +from waflib import Utils, Build, Context |
2646 | +from waflib.Configure import conf |
2647 | |
2648 | @conf |
2649 | def CHECK_FOR_THIRD_PARTY(conf): |
2650 | - return os.path.exists(os.path.join(Utils.g_module.srcdir, 'third_party')) |
2651 | + return os.path.exists(os.path.join(Context.g_module.top, 'third_party')) |
2652 | |
2653 | Build.BuildContext.CHECK_FOR_THIRD_PARTY = CHECK_FOR_THIRD_PARTY |
2654 | |
2655 | @@ -36,18 +36,18 @@ Build.BuildContext.CHECK_POPT = CHECK_POPT |
2656 | |
2657 | @conf |
2658 | def CHECK_CMOCKA(conf): |
2659 | - return conf.CHECK_BUNDLED_SYSTEM_PKG('cmocka', minversion='1.1.1') |
2660 | + return conf.CHECK_BUNDLED_SYSTEM_PKG('cmocka', minversion='1.1.3') |
2661 | |
2662 | Build.BuildContext.CHECK_CMOCKA = CHECK_CMOCKA |
2663 | |
2664 | @conf |
2665 | def CHECK_SOCKET_WRAPPER(conf): |
2666 | - return conf.CHECK_BUNDLED_SYSTEM_PKG('socket_wrapper', minversion='1.1.9') |
2667 | + return conf.CHECK_BUNDLED_SYSTEM_PKG('socket_wrapper', minversion='1.2.1') |
2668 | Build.BuildContext.CHECK_SOCKET_WRAPPER = CHECK_SOCKET_WRAPPER |
2669 | |
2670 | @conf |
2671 | def CHECK_NSS_WRAPPER(conf): |
2672 | - return conf.CHECK_BUNDLED_SYSTEM_PKG('nss_wrapper', minversion='1.1.3') |
2673 | + return conf.CHECK_BUNDLED_SYSTEM_PKG('nss_wrapper', minversion='1.1.5') |
2674 | Build.BuildContext.CHECK_NSS_WRAPPER = CHECK_NSS_WRAPPER |
2675 | |
2676 | @conf |
2677 | @@ -62,5 +62,5 @@ Build.BuildContext.CHECK_UID_WRAPPER = CHECK_UID_WRAPPER |
2678 | |
2679 | @conf |
2680 | def CHECK_PAM_WRAPPER(conf): |
2681 | - return conf.CHECK_BUNDLED_SYSTEM_PKG('pam_wrapper', minversion='1.0.4') |
2682 | + return conf.CHECK_BUNDLED_SYSTEM_PKG('pam_wrapper', minversion='1.0.7') |
2683 | Build.BuildContext.CHECK_PAM_WRAPPER = CHECK_PAM_WRAPPER |
2684 | diff --git a/buildtools/wafsamba/samba_utils.py b/buildtools/wafsamba/samba_utils.py |
2685 | index 0f95c12..ad97de1 100644 |
2686 | --- a/buildtools/wafsamba/samba_utils.py |
2687 | +++ b/buildtools/wafsamba/samba_utils.py |
2688 | @@ -1,30 +1,92 @@ |
2689 | # a waf tool to add autoconf-like macros to the configure section |
2690 | # and for SAMBA_ macros for building libraries, binaries etc |
2691 | |
2692 | -import os, sys, re, fnmatch, shlex |
2693 | +import errno |
2694 | +import os, sys, re, fnmatch, shlex, inspect |
2695 | from optparse import SUPPRESS_HELP |
2696 | -import Build, Options, Utils, Task, Logs, Configure |
2697 | -from TaskGen import feature, before, after |
2698 | -from Configure import conf, ConfigurationContext |
2699 | -from Logs import debug |
2700 | +from waflib import Build, Options, Utils, Task, Logs, Configure, Errors, Context |
2701 | +from waflib import Scripting |
2702 | +from waflib.TaskGen import feature, before, after |
2703 | +from waflib.Configure import ConfigurationContext |
2704 | +from waflib.Logs import debug |
2705 | +from waflib import ConfigSet |
2706 | +from waflib.Build import CACHE_SUFFIX |
2707 | |
2708 | # TODO: make this a --option |
2709 | LIB_PATH="shared" |
2710 | |
2711 | |
2712 | +PY3 = sys.version_info[0] == 3 |
2713 | + |
2714 | +if PY3: |
2715 | + |
2716 | + # helper function to get a string from a variable that maybe 'str' or |
2717 | + # 'bytes' if 'bytes' then it is decoded using 'utf8'. If 'str' is passed |
2718 | + # it is returned unchanged |
2719 | + # Using this function is PY2/PY3 code should ensure in most cases |
2720 | + # the PY2 code runs unchanged in PY2 whereas the code in PY3 possibly |
2721 | + # decodes the variable (see PY2 implementation of this function below) |
2722 | + def get_string(bytesorstring): |
2723 | + tmp = bytesorstring |
2724 | + if isinstance(bytesorstring, bytes): |
2725 | + tmp = bytesorstring.decode('utf8') |
2726 | + elif not isinstance(bytesorstring, str): |
2727 | + raise ValueError('Expected byte of string for %s:%s' % (type(bytesorstring), bytesorstring)) |
2728 | + return tmp |
2729 | + |
2730 | +else: |
2731 | + |
2732 | + # Helper function to return string. |
2733 | + # if 'str' or 'unicode' passed in they are returned unchanged |
2734 | + # otherwise an exception is generated |
2735 | + # Using this function is PY2/PY3 code should ensure in most cases |
2736 | + # the PY2 code runs unchanged in PY2 whereas the code in PY3 possibly |
2737 | + # decodes the variable (see PY3 implementation of this function above) |
2738 | + def get_string(bytesorstring): |
2739 | + tmp = bytesorstring |
2740 | + if not(isinstance(bytesorstring, str) or isinstance(bytesorstring, unicode)): |
2741 | + raise ValueError('Expected str or unicode for %s:%s' % (type(bytesorstring), bytesorstring)) |
2742 | + return tmp |
2743 | + |
2744 | # sigh, python octal constants are a mess |
2745 | MODE_644 = int('644', 8) |
2746 | +MODE_744 = int('744', 8) |
2747 | MODE_755 = int('755', 8) |
2748 | +MODE_777 = int('777', 8) |
2749 | + |
2750 | +def conf(f): |
2751 | + # override in order to propagate the argument "mandatory" |
2752 | + def fun(*k, **kw): |
2753 | + mandatory = True |
2754 | + if 'mandatory' in kw: |
2755 | + mandatory = kw['mandatory'] |
2756 | + del kw['mandatory'] |
2757 | + |
2758 | + try: |
2759 | + return f(*k, **kw) |
2760 | + except Errors.ConfigurationError: |
2761 | + if mandatory: |
2762 | + raise |
2763 | + |
2764 | + fun.__name__ = f.__name__ |
2765 | + if 'mandatory' in inspect.getsource(f): |
2766 | + fun = f |
2767 | + |
2768 | + setattr(Configure.ConfigurationContext, f.__name__, fun) |
2769 | + setattr(Build.BuildContext, f.__name__, fun) |
2770 | + return f |
2771 | +Configure.conf = conf |
2772 | +Configure.conftest = conf |
2773 | |
2774 | @conf |
2775 | def SET_TARGET_TYPE(ctx, target, value): |
2776 | '''set the target type of a target''' |
2777 | cache = LOCAL_CACHE(ctx, 'TARGET_TYPE') |
2778 | if target in cache and cache[target] != 'EMPTY': |
2779 | - Logs.error("ERROR: Target '%s' in directory %s re-defined as %s - was %s" % (target, ctx.curdir, value, cache[target])) |
2780 | + Logs.error("ERROR: Target '%s' in directory %s re-defined as %s - was %s" % (target, ctx.path.abspath(), value, cache[target])) |
2781 | sys.exit(1) |
2782 | LOCAL_CACHE_SET(ctx, 'TARGET_TYPE', target, value) |
2783 | - debug("task_gen: Target '%s' created of type '%s' in %s" % (target, value, ctx.curdir)) |
2784 | + debug("task_gen: Target '%s' created of type '%s' in %s" % (target, value, ctx.path.abspath())) |
2785 | return True |
2786 | |
2787 | |
2788 | @@ -101,7 +163,7 @@ def LOCAL_CACHE_SET(ctx, cachename, key, value): |
2789 | def ASSERT(ctx, expression, msg): |
2790 | '''a build assert call''' |
2791 | if not expression: |
2792 | - raise Utils.WafError("ERROR: %s\n" % msg) |
2793 | + raise Errors.WafError("ERROR: %s\n" % msg) |
2794 | Build.BuildContext.ASSERT = ASSERT |
2795 | |
2796 | |
2797 | @@ -122,9 +184,9 @@ def dict_concat(d1, d2): |
2798 | |
2799 | def ADD_COMMAND(opt, name, function): |
2800 | '''add a new top level command to waf''' |
2801 | - Utils.g_module.__dict__[name] = function |
2802 | + Context.g_module.__dict__[name] = function |
2803 | opt.name = function |
2804 | -Options.Handler.ADD_COMMAND = ADD_COMMAND |
2805 | +Options.OptionsContext.ADD_COMMAND = ADD_COMMAND |
2806 | |
2807 | |
2808 | @feature('c', 'cc', 'cshlib', 'cprogram') |
2809 | @@ -199,8 +261,10 @@ def subst_vars_error(string, env): |
2810 | if re.match('\$\{\w+\}', v): |
2811 | vname = v[2:-1] |
2812 | if not vname in env: |
2813 | - raise KeyError("Failed to find variable %s in %s" % (vname, string)) |
2814 | + raise KeyError("Failed to find variable %s in %s in env %s <%s>" % (vname, string, env.__class__, str(env))) |
2815 | v = env[vname] |
2816 | + if isinstance(v, list): |
2817 | + v = ' '.join(v) |
2818 | out.append(v) |
2819 | return ''.join(out) |
2820 | |
2821 | @@ -212,51 +276,6 @@ def SUBST_ENV_VAR(ctx, varname): |
2822 | Build.BuildContext.SUBST_ENV_VAR = SUBST_ENV_VAR |
2823 | |
2824 | |
2825 | -def ENFORCE_GROUP_ORDERING(bld): |
2826 | - '''enforce group ordering for the project. This |
2827 | - makes the group ordering apply only when you specify |
2828 | - a target with --target''' |
2829 | - if Options.options.compile_targets: |
2830 | - @feature('*') |
2831 | - @before('exec_rule', 'apply_core', 'collect') |
2832 | - def force_previous_groups(self): |
2833 | - if getattr(self.bld, 'enforced_group_ordering', False): |
2834 | - return |
2835 | - self.bld.enforced_group_ordering = True |
2836 | - |
2837 | - def group_name(g): |
2838 | - tm = self.bld.task_manager |
2839 | - return [x for x in tm.groups_names if id(tm.groups_names[x]) == id(g)][0] |
2840 | - |
2841 | - my_id = id(self) |
2842 | - bld = self.bld |
2843 | - stop = None |
2844 | - for g in bld.task_manager.groups: |
2845 | - for t in g.tasks_gen: |
2846 | - if id(t) == my_id: |
2847 | - stop = id(g) |
2848 | - debug('group: Forcing up to group %s for target %s', |
2849 | - group_name(g), self.name or self.target) |
2850 | - break |
2851 | - if stop is not None: |
2852 | - break |
2853 | - if stop is None: |
2854 | - return |
2855 | - |
2856 | - for i in xrange(len(bld.task_manager.groups)): |
2857 | - g = bld.task_manager.groups[i] |
2858 | - bld.task_manager.current_group = i |
2859 | - if id(g) == stop: |
2860 | - break |
2861 | - debug('group: Forcing group %s', group_name(g)) |
2862 | - for t in g.tasks_gen: |
2863 | - if not getattr(t, 'forced_groups', False): |
2864 | - debug('group: Posting %s', t.name or t.target) |
2865 | - t.forced_groups = True |
2866 | - t.post() |
2867 | -Build.BuildContext.ENFORCE_GROUP_ORDERING = ENFORCE_GROUP_ORDERING |
2868 | - |
2869 | - |
2870 | def recursive_dirlist(dir, relbase, pattern=None): |
2871 | '''recursive directory list''' |
2872 | ret = [] |
2873 | @@ -271,6 +290,18 @@ def recursive_dirlist(dir, relbase, pattern=None): |
2874 | return ret |
2875 | |
2876 | |
2877 | +def symlink(src, dst, force=True): |
2878 | + """Can create symlink by force""" |
2879 | + try: |
2880 | + os.symlink(src, dst) |
2881 | + except OSError as exc: |
2882 | + if exc.errno == errno.EEXIST and force: |
2883 | + os.remove(dst) |
2884 | + os.symlink(src, dst) |
2885 | + else: |
2886 | + raise |
2887 | + |
2888 | + |
2889 | def mkdir_p(dir): |
2890 | '''like mkdir -p''' |
2891 | if not dir: |
2892 | @@ -312,8 +343,7 @@ def EXPAND_VARIABLES(ctx, varstr, vars=None): |
2893 | if not isinstance(varstr, str): |
2894 | return varstr |
2895 | |
2896 | - import Environment |
2897 | - env = Environment.Environment() |
2898 | + env = ConfigSet.ConfigSet() |
2899 | ret = varstr |
2900 | # substitute on user supplied dict if avaiilable |
2901 | if vars is not None: |
2902 | @@ -345,16 +375,18 @@ def RUN_COMMAND(cmd, |
2903 | return os.WEXITSTATUS(status) |
2904 | if os.WIFSIGNALED(status): |
2905 | return - os.WTERMSIG(status) |
2906 | - Logs.error("Unknown exit reason %d for command: %s" (status, cmd)) |
2907 | + Logs.error("Unknown exit reason %d for command: %s" % (status, cmd)) |
2908 | return -1 |
2909 | |
2910 | |
2911 | def RUN_PYTHON_TESTS(testfiles, pythonpath=None, extra_env=None): |
2912 | env = LOAD_ENVIRONMENT() |
2913 | if pythonpath is None: |
2914 | - pythonpath = os.path.join(Utils.g_module.blddir, 'python') |
2915 | + pythonpath = os.path.join(Context.g_module.out, 'python') |
2916 | result = 0 |
2917 | for interp in env.python_interpreters: |
2918 | + if not isinstance(interp, str): |
2919 | + interp = ' '.join(interp) |
2920 | for testfile in testfiles: |
2921 | cmd = "PYTHONPATH=%s %s %s" % (pythonpath, interp, testfile) |
2922 | if extra_env: |
2923 | @@ -374,16 +406,15 @@ try: |
2924 | # Even if hashlib.md5 exists, it may be unusable. |
2925 | # Try to use MD5 function. In FIPS mode this will cause an exception |
2926 | # and we'll get to the replacement code |
2927 | - foo = md5('abcd') |
2928 | + foo = md5(b'abcd') |
2929 | except: |
2930 | try: |
2931 | import md5 |
2932 | # repeat the same check here, mere success of import is not enough. |
2933 | # Try to use MD5 function. In FIPS mode this will cause an exception |
2934 | - foo = md5.md5('abcd') |
2935 | + foo = md5.md5(b'abcd') |
2936 | except: |
2937 | - import Constants |
2938 | - Constants.SIG_NIL = hash('abcd') |
2939 | + Context.SIG_NIL = hash('abcd') |
2940 | class replace_md5(object): |
2941 | def __init__(self): |
2942 | self.val = None |
2943 | @@ -409,20 +440,20 @@ except: |
2944 | def LOAD_ENVIRONMENT(): |
2945 | '''load the configuration environment, allowing access to env vars |
2946 | from new commands''' |
2947 | - import Environment |
2948 | - env = Environment.Environment() |
2949 | + env = ConfigSet.ConfigSet() |
2950 | try: |
2951 | - env.load('.lock-wscript') |
2952 | - env.load(env.blddir + '/c4che/default.cache.py') |
2953 | - except: |
2954 | + p = os.path.join(Context.g_module.out, 'c4che/default'+CACHE_SUFFIX) |
2955 | + env.load(p) |
2956 | + except (OSError, IOError): |
2957 | pass |
2958 | return env |
2959 | |
2960 | |
2961 | def IS_NEWER(bld, file1, file2): |
2962 | '''return True if file1 is newer than file2''' |
2963 | - t1 = os.stat(os.path.join(bld.curdir, file1)).st_mtime |
2964 | - t2 = os.stat(os.path.join(bld.curdir, file2)).st_mtime |
2965 | + curdir = bld.path.abspath() |
2966 | + t1 = os.stat(os.path.join(curdir, file1)).st_mtime |
2967 | + t2 = os.stat(os.path.join(curdir, file2)).st_mtime |
2968 | return t1 > t2 |
2969 | Build.BuildContext.IS_NEWER = IS_NEWER |
2970 | |
2971 | @@ -432,47 +463,46 @@ def RECURSE(ctx, directory): |
2972 | '''recurse into a directory, relative to the curdir or top level''' |
2973 | try: |
2974 | visited_dirs = ctx.visited_dirs |
2975 | - except: |
2976 | + except AttributeError: |
2977 | visited_dirs = ctx.visited_dirs = set() |
2978 | - d = os.path.join(ctx.curdir, directory) |
2979 | + d = os.path.join(ctx.path.abspath(), directory) |
2980 | if os.path.exists(d): |
2981 | abspath = os.path.abspath(d) |
2982 | else: |
2983 | - abspath = os.path.abspath(os.path.join(Utils.g_module.srcdir, directory)) |
2984 | + abspath = os.path.abspath(os.path.join(Context.g_module.top, directory)) |
2985 | ctxclass = ctx.__class__.__name__ |
2986 | key = ctxclass + ':' + abspath |
2987 | if key in visited_dirs: |
2988 | # already done it |
2989 | return |
2990 | visited_dirs.add(key) |
2991 | - relpath = os_path_relpath(abspath, ctx.curdir) |
2992 | - if ctxclass == 'Handler': |
2993 | - return ctx.sub_options(relpath) |
2994 | - if ctxclass == 'ConfigurationContext': |
2995 | - return ctx.sub_config(relpath) |
2996 | - if ctxclass == 'BuildContext': |
2997 | - return ctx.add_subdirs(relpath) |
2998 | - Logs.error('Unknown RECURSE context class', ctxclass) |
2999 | + relpath = os_path_relpath(abspath, ctx.path.abspath()) |
3000 | + if ctxclass in ['tmp', 'OptionsContext', 'ConfigurationContext', 'BuildContext']: |
3001 | + return ctx.recurse(relpath) |
3002 | + if 'waflib.extras.compat15' in sys.modules: |
3003 | + return ctx.recurse(relpath) |
3004 | + Logs.error('Unknown RECURSE context class: {}'.format(ctxclass)) |
3005 | raise |
3006 | -Options.Handler.RECURSE = RECURSE |
3007 | +Options.OptionsContext.RECURSE = RECURSE |
3008 | Build.BuildContext.RECURSE = RECURSE |
3009 | |
3010 | |
3011 | -def CHECK_MAKEFLAGS(bld): |
3012 | +def CHECK_MAKEFLAGS(options): |
3013 | '''check for MAKEFLAGS environment variable in case we are being |
3014 | called from a Makefile try to honor a few make command line flags''' |
3015 | if not 'WAF_MAKE' in os.environ: |
3016 | return |
3017 | makeflags = os.environ.get('MAKEFLAGS') |
3018 | if makeflags is None: |
3019 | - return |
3020 | + makeflags = "" |
3021 | jobs_set = False |
3022 | + jobs = None |
3023 | # we need to use shlex.split to cope with the escaping of spaces |
3024 | # in makeflags |
3025 | for opt in shlex.split(makeflags): |
3026 | # options can come either as -x or as x |
3027 | if opt[0:2] == 'V=': |
3028 | - Options.options.verbose = Logs.verbose = int(opt[2:]) |
3029 | + options.verbose = Logs.verbose = int(opt[2:]) |
3030 | if Logs.verbose > 0: |
3031 | Logs.zones = ['runner'] |
3032 | if Logs.verbose > 2: |
3033 | @@ -486,22 +516,53 @@ def CHECK_MAKEFLAGS(bld): |
3034 | # this is also how "make test TESTS=testpattern" works, and |
3035 | # "make VERBOSE=1" as well as things like "make SYMBOLCHECK=1" |
3036 | loc = opt.find('=') |
3037 | - setattr(Options.options, opt[0:loc], opt[loc+1:]) |
3038 | + setattr(options, opt[0:loc], opt[loc+1:]) |
3039 | elif opt[0] != '-': |
3040 | for v in opt: |
3041 | - if v == 'j': |
3042 | + if re.search(r'j[0-9]*$', v): |
3043 | jobs_set = True |
3044 | + jobs = opt.strip('j') |
3045 | elif v == 'k': |
3046 | - Options.options.keep = True |
3047 | - elif opt == '-j': |
3048 | + options.keep = True |
3049 | + elif re.search(r'-j[0-9]*$', opt): |
3050 | jobs_set = True |
3051 | + jobs = opt.strip('-j') |
3052 | elif opt == '-k': |
3053 | - Options.options.keep = True |
3054 | + options.keep = True |
3055 | if not jobs_set: |
3056 | # default to one job |
3057 | - Options.options.jobs = 1 |
3058 | - |
3059 | -Build.BuildContext.CHECK_MAKEFLAGS = CHECK_MAKEFLAGS |
3060 | + options.jobs = 1 |
3061 | + elif jobs_set and jobs: |
3062 | + options.jobs = int(jobs) |
3063 | + |
3064 | +waflib_options_parse_cmd_args = Options.OptionsContext.parse_cmd_args |
3065 | +def wafsamba_options_parse_cmd_args(self, _args=None, cwd=None, allow_unknown=False): |
3066 | + (options, commands, envvars) = \ |
3067 | + waflib_options_parse_cmd_args(self, |
3068 | + _args=_args, |
3069 | + cwd=cwd, |
3070 | + allow_unknown=allow_unknown) |
3071 | + CHECK_MAKEFLAGS(options) |
3072 | + if options.jobs == 1: |
3073 | + # |
3074 | + # waflib.Runner.Parallel processes jobs inline if the possible number |
3075 | + # of jobs is just 1. But (at least in waf <= 2.0.12) it still calls |
3076 | + # create a waflib.Runner.Spawner() which creates a single |
3077 | + # waflib.Runner.Consumer() thread that tries to process jobs from the |
3078 | + # queue. |
3079 | + # |
3080 | + # This has strange effects, which are not noticed typically, |
3081 | + # but at least on AIX python has broken threading and fails |
3082 | + # in random ways. |
3083 | + # |
3084 | + # So we just add a dummy Spawner class. |
3085 | + class NoOpSpawner(object): |
3086 | + def __init__(self, master): |
3087 | + return |
3088 | + from waflib import Runner |
3089 | + Runner.Spawner = NoOpSpawner |
3090 | + return options, commands, envvars |
3091 | +Options.OptionsContext.parse_cmd_args = wafsamba_options_parse_cmd_args |
3092 | |
3093 | option_groups = {} |
3094 | |
3095 | @@ -513,7 +574,7 @@ def option_group(opt, name): |
3096 | gr = opt.add_option_group(name) |
3097 | option_groups[name] = gr |
3098 | return gr |
3099 | -Options.Handler.option_group = option_group |
3100 | +Options.OptionsContext.option_group = option_group |
3101 | |
3102 | |
3103 | def save_file(filename, contents, create_dir=False): |
3104 | @@ -542,9 +603,9 @@ def load_file(filename): |
3105 | |
3106 | def reconfigure(ctx): |
3107 | '''rerun configure if necessary''' |
3108 | - import Configure, samba_wildcard, Scripting |
3109 | - if not os.path.exists(".lock-wscript"): |
3110 | - raise Utils.WafError('configure has not been run') |
3111 | + if not os.path.exists(os.environ.get('WAFLOCK', '.lock-wscript')): |
3112 | + raise Errors.WafError('configure has not been run') |
3113 | + import samba_wildcard |
3114 | bld = samba_wildcard.fake_build_environment() |
3115 | Configure.autoconfig = True |
3116 | Scripting.check_configured(bld) |
3117 | @@ -561,7 +622,7 @@ def map_shlib_extension(ctx, name, python=False): |
3118 | if python: |
3119 | return ctx.env.pyext_PATTERN % root1 |
3120 | else: |
3121 | - (root2, ext2) = os.path.splitext(ctx.env.shlib_PATTERN) |
3122 | + (root2, ext2) = os.path.splitext(ctx.env.cshlib_PATTERN) |
3123 | return root1+ext2 |
3124 | Build.BuildContext.map_shlib_extension = map_shlib_extension |
3125 | |
3126 | @@ -583,7 +644,7 @@ def make_libname(ctx, name, nolibprefix=False, version=None, python=False): |
3127 | if python: |
3128 | libname = apply_pattern(name, ctx.env.pyext_PATTERN) |
3129 | else: |
3130 | - libname = apply_pattern(name, ctx.env.shlib_PATTERN) |
3131 | + libname = apply_pattern(name, ctx.env.cshlib_PATTERN) |
3132 | if nolibprefix and libname[0:3] == 'lib': |
3133 | libname = libname[3:] |
3134 | if version: |
3135 | @@ -617,7 +678,7 @@ def get_tgt_list(bld): |
3136 | tgt_list.append(t) |
3137 | return tgt_list |
3138 | |
3139 | -from Constants import WSCRIPT_FILE |
3140 | +from waflib.Context import WSCRIPT_FILE |
3141 | def PROCESS_SEPARATE_RULE(self, rule): |
3142 | ''' cause waf to process additional script based on `rule'. |
3143 | You should have file named wscript_<stage>_rule in the current directory |
3144 | @@ -628,15 +689,21 @@ def PROCESS_SEPARATE_RULE(self, rule): |
3145 | stage = 'configure' |
3146 | elif isinstance(self, Build.BuildContext): |
3147 | stage = 'build' |
3148 | - file_path = os.path.join(self.curdir, WSCRIPT_FILE+'_'+stage+'_'+rule) |
3149 | - txt = load_file(file_path) |
3150 | - if txt: |
3151 | - dc = {'ctx': self} |
3152 | - if getattr(self.__class__, 'pre_recurse', None): |
3153 | - dc = self.pre_recurse(txt, file_path, self.curdir) |
3154 | - exec(compile(txt, file_path, 'exec'), dc) |
3155 | - if getattr(self.__class__, 'post_recurse', None): |
3156 | - dc = self.post_recurse(txt, file_path, self.curdir) |
3157 | + file_path = os.path.join(self.path.abspath(), WSCRIPT_FILE+'_'+stage+'_'+rule) |
3158 | + node = self.root.find_node(file_path) |
3159 | + if node: |
3160 | + try: |
3161 | + cache = self.recurse_cache |
3162 | + except AttributeError: |
3163 | + cache = self.recurse_cache = {} |
3164 | + if node not in cache: |
3165 | + cache[node] = True |
3166 | + self.pre_recurse(node) |
3167 | + try: |
3168 | + function_code = node.read('rU', None) |
3169 | + exec(compile(function_code, node.abspath(), 'exec'), self.exec_dict) |
3170 | + finally: |
3171 | + self.post_recurse(node) |
3172 | |
3173 | Build.BuildContext.PROCESS_SEPARATE_RULE = PROCESS_SEPARATE_RULE |
3174 | ConfigurationContext.PROCESS_SEPARATE_RULE = PROCESS_SEPARATE_RULE |
3175 | @@ -693,4 +760,4 @@ def samba_add_onoff_option(opt, option, help=(), dest=None, default=True, |
3176 | default=default) |
3177 | opt.add_option(without_val, help=SUPPRESS_HELP, action="store_false", |
3178 | dest=dest) |
3179 | -Options.Handler.samba_add_onoff_option = samba_add_onoff_option |
3180 | +Options.OptionsContext.samba_add_onoff_option = samba_add_onoff_option |
3181 | diff --git a/buildtools/wafsamba/samba_version.py b/buildtools/wafsamba/samba_version.py |
3182 | index be26439..f0e7b4d 100644 |
3183 | --- a/buildtools/wafsamba/samba_version.py |
3184 | +++ b/buildtools/wafsamba/samba_version.py |
3185 | @@ -1,5 +1,5 @@ |
3186 | -import os |
3187 | -import Utils |
3188 | +import os, sys |
3189 | +from waflib import Utils, Context |
3190 | import samba_utils |
3191 | from samba_git import find_git |
3192 | |
3193 | @@ -14,7 +14,7 @@ def git_version_summary(path, env=None): |
3194 | environ = dict(os.environ) |
3195 | environ["GIT_DIR"] = '%s/.git' % path |
3196 | environ["GIT_WORK_TREE"] = path |
3197 | - git = Utils.cmd_output(env.GIT + ' show --pretty=format:"%h%n%ct%n%H%n%cd" --stat HEAD', silent=True, env=environ) |
3198 | + git = samba_utils.get_string(Utils.cmd_output(env.GIT + ' show --pretty=format:"%h%n%ct%n%H%n%cd" --stat HEAD', silent=True, env=environ)) |
3199 | |
3200 | lines = git.splitlines() |
3201 | if not lines or len(lines) < 4: |
3202 | @@ -198,7 +198,10 @@ also accepted as dictionary entries here |
3203 | for name in sorted(self.vcs_fields.keys()): |
3204 | string+="#define SAMBA_VERSION_%s " % name |
3205 | value = self.vcs_fields[name] |
3206 | - if isinstance(value, basestring): |
3207 | + string_types = str |
3208 | + if sys.version_info[0] < 3: |
3209 | + string_types = basestring |
3210 | + if isinstance(value, string_types): |
3211 | string += "\"%s\"" % value |
3212 | elif type(value) is int: |
3213 | string += "%d" % value |
3214 | @@ -260,5 +263,5 @@ def load_version(env=None, is_install=True): |
3215 | env = samba_utils.LOAD_ENVIRONMENT() |
3216 | |
3217 | version = samba_version_file("./VERSION", ".", env, is_install=is_install) |
3218 | - Utils.g_module.VERSION = version.STRING |
3219 | + Context.g_module.VERSION = version.STRING |
3220 | return version |
3221 | diff --git a/buildtools/wafsamba/samba_waf18.py b/buildtools/wafsamba/samba_waf18.py |
3222 | new file mode 100644 |
3223 | index 0000000..cc310fb |
3224 | --- /dev/null |
3225 | +++ b/buildtools/wafsamba/samba_waf18.py |
3226 | @@ -0,0 +1,429 @@ |
3227 | +# compatibility layer for building with more recent waf versions |
3228 | + |
3229 | +import os, shlex, sys |
3230 | +from waflib import Build, Configure, Node, Utils, Options, Logs, TaskGen |
3231 | +from waflib import ConfigSet |
3232 | +from waflib.TaskGen import feature, after |
3233 | +from waflib.Configure import conf, ConfigurationContext |
3234 | + |
3235 | +from waflib.Tools.flex import decide_ext |
3236 | + |
3237 | +# This version of flexfun runs in tsk.get_cwd() as opposed to the |
3238 | +# bld.variant_dir: since input paths adjusted against tsk.get_cwd(), we have to |
3239 | +# use tsk.get_cwd() for the work directory as well. |
3240 | +def flexfun(tsk): |
3241 | + env = tsk.env |
3242 | + bld = tsk.generator.bld |
3243 | + def to_list(xx): |
3244 | + if isinstance(xx, str): |
3245 | + return [xx] |
3246 | + return xx |
3247 | + tsk.last_cmd = lst = [] |
3248 | + lst.extend(to_list(env.FLEX)) |
3249 | + lst.extend(to_list(env.FLEXFLAGS)) |
3250 | + inputs = [a.path_from(tsk.get_cwd()) for a in tsk.inputs] |
3251 | + if env.FLEX_MSYS: |
3252 | + inputs = [x.replace(os.sep, '/') for x in inputs] |
3253 | + lst.extend(inputs) |
3254 | + lst = [x for x in lst if x] |
3255 | + txt = bld.cmd_and_log(lst, cwd=tsk.get_cwd(), env=env.env or None, quiet=0) |
3256 | + tsk.outputs[0].write(txt.replace('\r\n', '\n').replace('\r', '\n')) # issue #1207 |
3257 | + |
3258 | +TaskGen.declare_chain( |
3259 | + name = 'flex', |
3260 | + rule = flexfun, # issue #854 |
3261 | + ext_in = '.l', |
3262 | + decider = decide_ext, |
3263 | +) |
3264 | + |
3265 | + |
3266 | +for y in (Build.BuildContext, Build.CleanContext, Build.InstallContext, Build.UninstallContext, Build.ListContext): |
3267 | + class tmp(y): |
3268 | + variant = 'default' |
3269 | + |
3270 | +def abspath(self, env=None): |
3271 | + if env and hasattr(self, 'children'): |
3272 | + return self.get_bld().abspath() |
3273 | + return self.old_abspath() |
3274 | +Node.Node.old_abspath = Node.Node.abspath |
3275 | +Node.Node.abspath = abspath |
3276 | + |
3277 | +def bldpath(self, env=None): |
3278 | + return self.abspath() |
3279 | + #return self.path_from(self.ctx.bldnode.parent) |
3280 | +Node.Node.bldpath = bldpath |
3281 | + |
3282 | +def srcpath(self, env=None): |
3283 | + return self.abspath() |
3284 | + #return self.path_from(self.ctx.bldnode.parent) |
3285 | +Node.Node.srcpath = srcpath |
3286 | + |
3287 | +def store_fast(self, filename): |
3288 | + file = open(filename, 'wb') |
3289 | + data = self.get_merged_dict() |
3290 | + try: |
3291 | + Build.cPickle.dump(data, file, -1) |
3292 | + finally: |
3293 | + file.close() |
3294 | +ConfigSet.ConfigSet.store_fast = store_fast |
3295 | + |
3296 | +def load_fast(self, filename): |
3297 | + file = open(filename, 'rb') |
3298 | + try: |
3299 | + data = Build.cPickle.load(file) |
3300 | + finally: |
3301 | + file.close() |
3302 | + self.table.update(data) |
3303 | +ConfigSet.ConfigSet.load_fast = load_fast |
3304 | + |
3305 | +@feature('c', 'cxx', 'd', 'asm', 'fc', 'includes') |
3306 | +@after('propagate_uselib_vars', 'process_source') |
3307 | +def apply_incpaths(self): |
3308 | + lst = self.to_incnodes(self.to_list(getattr(self, 'includes', [])) + self.env['INCLUDES']) |
3309 | + self.includes_nodes = lst |
3310 | + cwdx = getattr(self.bld, 'cwdx', self.bld.bldnode) |
3311 | + self.env['INCPATHS'] = [x.path_from(cwdx) for x in lst] |
3312 | + |
3313 | +@conf |
3314 | +def define(self, key, val, quote=True, comment=None): |
3315 | + assert key and isinstance(key, str) |
3316 | + |
3317 | + if val is None: |
3318 | + val = () |
3319 | + elif isinstance(val, bool): |
3320 | + val = int(val) |
3321 | + |
3322 | + # waf 1.5 |
3323 | + self.env[key] = val |
3324 | + |
3325 | + if isinstance(val, int) or isinstance(val, float): |
3326 | + s = '%s=%s' |
3327 | + else: |
3328 | + s = quote and '%s="%s"' or '%s=%s' |
3329 | + app = s % (key, str(val)) |
3330 | + |
3331 | + ban = key + '=' |
3332 | + lst = self.env.DEFINES |
3333 | + for x in lst: |
3334 | + if x.startswith(ban): |
3335 | + lst[lst.index(x)] = app |
3336 | + break |
3337 | + else: |
3338 | + self.env.append_value('DEFINES', app) |
3339 | + |
3340 | + self.env.append_unique('define_key', key) |
3341 | + |
3342 | +# compat15 removes this but we want to keep it |
3343 | +@conf |
3344 | +def undefine(self, key, from_env=True, comment=None): |
3345 | + assert key and isinstance(key, str) |
3346 | + |
3347 | + ban = key + '=' |
3348 | + self.env.DEFINES = [x for x in self.env.DEFINES if not x.startswith(ban)] |
3349 | + self.env.append_unique('define_key', key) |
3350 | + # waf 1.5 |
3351 | + if from_env: |
3352 | + self.env[key] = () |
3353 | + |
3354 | +class ConfigurationContext(Configure.ConfigurationContext): |
3355 | + def init_dirs(self): |
3356 | + self.setenv('default') |
3357 | + self.env.merge_config_header = True |
3358 | + return super(ConfigurationContext, self).init_dirs() |
3359 | + |
3360 | +def find_program_samba(self, *k, **kw): |
3361 | + kw['mandatory'] = False |
3362 | + ret = self.find_program_old(*k, **kw) |
3363 | + return ret |
3364 | +Configure.ConfigurationContext.find_program_old = Configure.ConfigurationContext.find_program |
3365 | +Configure.ConfigurationContext.find_program = find_program_samba |
3366 | + |
3367 | +Build.BuildContext.ENFORCE_GROUP_ORDERING = Utils.nada |
3368 | +Build.BuildContext.AUTOCLEANUP_STALE_FILES = Utils.nada |
3369 | + |
3370 | +@conf |
3371 | +def check(self, *k, **kw): |
3372 | + '''Override the waf defaults to inject --with-directory options''' |
3373 | + |
3374 | + # match the configuration test with speficic options, for example: |
3375 | + # --with-libiconv -> Options.options.iconv_open -> "Checking for library iconv" |
3376 | + self.validate_c(kw) |
3377 | + |
3378 | + additional_dirs = [] |
3379 | + if 'msg' in kw: |
3380 | + msg = kw['msg'] |
3381 | + for x in Options.OptionsContext.parser.parser.option_list: |
3382 | + if getattr(x, 'match', None) and msg in x.match: |
3383 | + d = getattr(Options.options, x.dest, '') |
3384 | + if d: |
3385 | + additional_dirs.append(d) |
3386 | + |
3387 | + # we add the additional dirs twice: once for the test data, and again if the compilation test suceeds below |
3388 | + def add_options_dir(dirs, env): |
3389 | + for x in dirs: |
3390 | + if not x in env.CPPPATH: |
3391 | + env.CPPPATH = [os.path.join(x, 'include')] + env.CPPPATH |
3392 | + if not x in env.LIBPATH: |
3393 | + env.LIBPATH = [os.path.join(x, 'lib')] + env.LIBPATH |
3394 | + |
3395 | + add_options_dir(additional_dirs, kw['env']) |
3396 | + |
3397 | + self.start_msg(kw['msg'], **kw) |
3398 | + ret = None |
3399 | + try: |
3400 | + ret = self.run_build(*k, **kw) |
3401 | + except self.errors.ConfigurationError: |
3402 | + self.end_msg(kw['errmsg'], 'YELLOW', **kw) |
3403 | + if Logs.verbose > 1: |
3404 | + raise |
3405 | + else: |
3406 | + self.fatal('The configuration failed') |
3407 | + else: |
3408 | + kw['success'] = ret |
3409 | + # success! time for brandy |
3410 | + add_options_dir(additional_dirs, self.env) |
3411 | + |
3412 | + ret = self.post_check(*k, **kw) |
3413 | + if not ret: |
3414 | + self.end_msg(kw['errmsg'], 'YELLOW', **kw) |
3415 | + self.fatal('The configuration failed %r' % ret) |
3416 | + else: |
3417 | + self.end_msg(self.ret_msg(kw['okmsg'], kw), **kw) |
3418 | + return ret |
3419 | + |
3420 | +@conf |
3421 | +def CHECK_LIBRARY_SUPPORT(conf, rpath=False, version_script=False, msg=None): |
3422 | + '''see if the platform supports building libraries''' |
3423 | + |
3424 | + if msg is None: |
3425 | + if rpath: |
3426 | + msg = "rpath library support" |
3427 | + else: |
3428 | + msg = "building library support" |
3429 | + |
3430 | + def build(bld): |
3431 | + lib_node = bld.srcnode.make_node('libdir/liblc1.c') |
3432 | + lib_node.parent.mkdir() |
3433 | + lib_node.write('int lib_func(void) { return 42; }\n', 'w') |
3434 | + main_node = bld.srcnode.make_node('main.c') |
3435 | + main_node.write('int main(void) {return !(lib_func() == 42);}', 'w') |
3436 | + linkflags = [] |
3437 | + if version_script: |
3438 | + script = bld.srcnode.make_node('ldscript') |
3439 | + script.write('TEST_1.0A2 { global: *; };\n', 'w') |
3440 | + linkflags.append('-Wl,--version-script=%s' % script.abspath()) |
3441 | + bld(features='c cshlib', source=lib_node, target='lib1', linkflags=linkflags, name='lib1') |
3442 | + o = bld(features='c cprogram', source=main_node, target='prog1', uselib_local='lib1') |
3443 | + if rpath: |
3444 | + o.rpath = [lib_node.parent.abspath()] |
3445 | + def run_app(self): |
3446 | + args = conf.SAMBA_CROSS_ARGS(msg=msg) |
3447 | + env = dict(os.environ) |
3448 | + env['LD_LIBRARY_PATH'] = self.inputs[0].parent.abspath() + os.pathsep + env.get('LD_LIBRARY_PATH', '') |
3449 | + self.generator.bld.cmd_and_log([self.inputs[0].abspath()] + args, env=env) |
3450 | + o.post() |
3451 | + bld(rule=run_app, source=o.link_task.outputs[0]) |
3452 | + |
3453 | + # ok, so it builds |
3454 | + try: |
3455 | + conf.check(build_fun=build, msg='Checking for %s' % msg) |
3456 | + except conf.errors.ConfigurationError: |
3457 | + return False |
3458 | + return True |
3459 | + |
3460 | +@conf |
3461 | +def CHECK_NEED_LC(conf, msg): |
3462 | + '''check if we need -lc''' |
3463 | + def build(bld): |
3464 | + lib_node = bld.srcnode.make_node('libdir/liblc1.c') |
3465 | + lib_node.parent.mkdir() |
3466 | + lib_node.write('#include <stdio.h>\nint lib_func(void) { FILE *f = fopen("foo", "r");}\n', 'w') |
3467 | + bld(features='c cshlib', source=[lib_node], linkflags=conf.env.EXTRA_LDFLAGS, target='liblc') |
3468 | + try: |
3469 | + conf.check(build_fun=build, msg=msg, okmsg='-lc is unnecessary', errmsg='-lc is necessary') |
3470 | + except conf.errors.ConfigurationError: |
3471 | + return False |
3472 | + return True |
3473 | + |
3474 | +# already implemented on "waf -v" |
3475 | +def order(bld, tgt_list): |
3476 | + return True |
3477 | +Build.BuildContext.check_group_ordering = order |
3478 | + |
3479 | +@conf |
3480 | +def CHECK_CFG(self, *k, **kw): |
3481 | + if 'args' in kw: |
3482 | + kw['args'] = shlex.split(kw['args']) |
3483 | + if not 'mandatory' in kw: |
3484 | + kw['mandatory'] = False |
3485 | + kw['global_define'] = True |
3486 | + return self.check_cfg(*k, **kw) |
3487 | + |
3488 | +def cmd_output(cmd, **kw): |
3489 | + |
3490 | + silent = False |
3491 | + if 'silent' in kw: |
3492 | + silent = kw['silent'] |
3493 | + del(kw['silent']) |
3494 | + |
3495 | + if 'e' in kw: |
3496 | + tmp = kw['e'] |
3497 | + del(kw['e']) |
3498 | + kw['env'] = tmp |
3499 | + |
3500 | + kw['shell'] = isinstance(cmd, str) |
3501 | + kw['stdout'] = Utils.subprocess.PIPE |
3502 | + if silent: |
3503 | + kw['stderr'] = Utils.subprocess.PIPE |
3504 | + |
3505 | + try: |
3506 | + p = Utils.subprocess.Popen(cmd, **kw) |
3507 | + output = p.communicate()[0] |
3508 | + except OSError as e: |
3509 | + raise ValueError(str(e)) |
3510 | + |
3511 | + if p.returncode: |
3512 | + if not silent: |
3513 | + msg = "command execution failed: %s -> %r" % (cmd, str(output)) |
3514 | + raise ValueError(msg) |
3515 | + output = '' |
3516 | + return output |
3517 | +Utils.cmd_output = cmd_output |
3518 | + |
3519 | + |
3520 | +@TaskGen.feature('c', 'cxx', 'd') |
3521 | +@TaskGen.before('apply_incpaths', 'propagate_uselib_vars') |
3522 | +@TaskGen.after('apply_link', 'process_source') |
3523 | +def apply_uselib_local(self): |
3524 | + """ |
3525 | + process the uselib_local attribute |
3526 | + execute after apply_link because of the execution order set on 'link_task' |
3527 | + """ |
3528 | + env = self.env |
3529 | + from waflib.Tools.ccroot import stlink_task |
3530 | + |
3531 | + # 1. the case of the libs defined in the project (visit ancestors first) |
3532 | + # the ancestors external libraries (uselib) will be prepended |
3533 | + self.uselib = self.to_list(getattr(self, 'uselib', [])) |
3534 | + self.includes = self.to_list(getattr(self, 'includes', [])) |
3535 | + names = self.to_list(getattr(self, 'uselib_local', [])) |
3536 | + get = self.bld.get_tgen_by_name |
3537 | + seen = set() |
3538 | + seen_uselib = set() |
3539 | + tmp = Utils.deque(names) # consume a copy of the list of names |
3540 | + if tmp: |
3541 | + if Logs.verbose: |
3542 | + Logs.warn('compat: "uselib_local" is deprecated, replace by "use"') |
3543 | + while tmp: |
3544 | + lib_name = tmp.popleft() |
3545 | + # visit dependencies only once |
3546 | + if lib_name in seen: |
3547 | + continue |
3548 | + |
3549 | + y = get(lib_name) |
3550 | + y.post() |
3551 | + seen.add(lib_name) |
3552 | + |
3553 | + # object has ancestors to process (shared libraries): add them to the end of the list |
3554 | + if getattr(y, 'uselib_local', None): |
3555 | + for x in self.to_list(getattr(y, 'uselib_local', [])): |
3556 | + obj = get(x) |
3557 | + obj.post() |
3558 | + if getattr(obj, 'link_task', None): |
3559 | + if not isinstance(obj.link_task, stlink_task): |
3560 | + tmp.append(x) |
3561 | + |
3562 | + # link task and flags |
3563 | + if getattr(y, 'link_task', None): |
3564 | + |
3565 | + link_name = y.target[y.target.rfind(os.sep) + 1:] |
3566 | + if isinstance(y.link_task, stlink_task): |
3567 | + env.append_value('STLIB', [link_name]) |
3568 | + else: |
3569 | + # some linkers can link against programs |
3570 | + env.append_value('LIB', [link_name]) |
3571 | + |
3572 | + # the order |
3573 | + self.link_task.set_run_after(y.link_task) |
3574 | + |
3575 | + # for the recompilation |
3576 | + self.link_task.dep_nodes += y.link_task.outputs |
3577 | + |
3578 | + # add the link path too |
3579 | + tmp_path = y.link_task.outputs[0].parent.bldpath() |
3580 | + if not tmp_path in env['LIBPATH']: |
3581 | + env.prepend_value('LIBPATH', [tmp_path]) |
3582 | + |
3583 | + # add ancestors uselib too - but only propagate those that have no staticlib defined |
3584 | + for v in self.to_list(getattr(y, 'uselib', [])): |
3585 | + if v not in seen_uselib: |
3586 | + seen_uselib.add(v) |
3587 | + if not env['STLIB_' + v]: |
3588 | + if not v in self.uselib: |
3589 | + self.uselib.insert(0, v) |
3590 | + |
3591 | + # if the library task generator provides 'export_includes', add to the include path |
3592 | + # the export_includes must be a list of paths relative to the other library |
3593 | + if getattr(y, 'export_includes', None): |
3594 | + self.includes.extend(y.to_incnodes(y.export_includes)) |
3595 | + |
3596 | +@TaskGen.feature('cprogram', 'cxxprogram', 'cstlib', 'cxxstlib', 'cshlib', 'cxxshlib', 'dprogram', 'dstlib', 'dshlib') |
3597 | +@TaskGen.after('apply_link') |
3598 | +def apply_objdeps(self): |
3599 | + "add the .o files produced by some other object files in the same manner as uselib_local" |
3600 | + names = getattr(self, 'add_objects', []) |
3601 | + if not names: |
3602 | + return |
3603 | + names = self.to_list(names) |
3604 | + |
3605 | + get = self.bld.get_tgen_by_name |
3606 | + seen = [] |
3607 | + while names: |
3608 | + x = names[0] |
3609 | + |
3610 | + # visit dependencies only once |
3611 | + if x in seen: |
3612 | + names = names[1:] |
3613 | + continue |
3614 | + |
3615 | + # object does not exist ? |
3616 | + y = get(x) |
3617 | + |
3618 | + # object has ancestors to process first ? update the list of names |
3619 | + if getattr(y, 'add_objects', None): |
3620 | + added = 0 |
3621 | + lst = y.to_list(y.add_objects) |
3622 | + lst.reverse() |
3623 | + for u in lst: |
3624 | + if u in seen: |
3625 | + continue |
3626 | + added = 1 |
3627 | + names = [u]+names |
3628 | + if added: |
3629 | + continue # list of names modified, loop |
3630 | + |
3631 | + # safe to process the current object |
3632 | + y.post() |
3633 | + seen.append(x) |
3634 | + |
3635 | + for t in getattr(y, 'compiled_tasks', []): |
3636 | + self.link_task.inputs.extend(t.outputs) |
3637 | + |
3638 | +@TaskGen.after('apply_link') |
3639 | +def process_obj_files(self): |
3640 | + if not hasattr(self, 'obj_files'): |
3641 | + return |
3642 | + for x in self.obj_files: |
3643 | + node = self.path.find_resource(x) |
3644 | + self.link_task.inputs.append(node) |
3645 | + |
3646 | +@TaskGen.taskgen_method |
3647 | +def add_obj_file(self, file): |
3648 | + """Small example on how to link object files as if they were source |
3649 | + obj = bld.create_obj('cc') |
3650 | + obj.add_obj_file('foo.o')""" |
3651 | + if not hasattr(self, 'obj_files'): |
3652 | + self.obj_files = [] |
3653 | + if not 'process_obj_files' in self.meths: |
3654 | + self.meths.append('process_obj_files') |
3655 | + self.obj_files.append(file) |
3656 | diff --git a/buildtools/wafsamba/samba_wildcard.py b/buildtools/wafsamba/samba_wildcard.py |
3657 | index ed3e0c2..6173ce8 100644 |
3658 | --- a/buildtools/wafsamba/samba_wildcard.py |
3659 | +++ b/buildtools/wafsamba/samba_wildcard.py |
3660 | @@ -1,15 +1,15 @@ |
3661 | # based on playground/evil in the waf svn tree |
3662 | |
3663 | import os, datetime, fnmatch |
3664 | -import Scripting, Utils, Options, Logs, Environment |
3665 | -from Constants import SRCDIR, BLDDIR |
3666 | +from waflib import Scripting, Utils, Options, Logs, Errors |
3667 | +from waflib import ConfigSet, Context |
3668 | from samba_utils import LOCAL_CACHE, os_path_relpath |
3669 | |
3670 | def run_task(t, k): |
3671 | '''run a single build task''' |
3672 | ret = t.run() |
3673 | if ret: |
3674 | - raise Utils.WafError("Failed to build %s: %u" % (k, ret)) |
3675 | + raise Errors.WafError("Failed to build %s: %u" % (k, ret)) |
3676 | |
3677 | |
3678 | def run_named_build_task(cmd): |
3679 | @@ -45,7 +45,7 @@ def run_named_build_task(cmd): |
3680 | |
3681 | |
3682 | if not found: |
3683 | - raise Utils.WafError("Unable to find build target matching %s" % cmd) |
3684 | + raise Errors.WafError("Unable to find build target matching %s" % cmd) |
3685 | |
3686 | |
3687 | def rewrite_compile_targets(): |
3688 | @@ -125,7 +125,7 @@ def wildcard_main(missing_cmd_fn): |
3689 | def fake_build_environment(info=True, flush=False): |
3690 | """create all the tasks for the project, but do not run the build |
3691 | return the build context in use""" |
3692 | - bld = getattr(Utils.g_module, 'build_context', Utils.Context)() |
3693 | + bld = getattr(Context.g_module, 'build_context', Utils.Context)() |
3694 | bld = Scripting.check_configured(bld) |
3695 | |
3696 | Options.commands['install'] = False |
3697 | @@ -134,16 +134,15 @@ def fake_build_environment(info=True, flush=False): |
3698 | bld.is_install = 0 # False |
3699 | |
3700 | try: |
3701 | - proj = Environment.Environment(Options.lockfile) |
3702 | + proj = ConfigSet.ConfigSet(Options.lockfile) |
3703 | except IOError: |
3704 | - raise Utils.WafError("Project not configured (run 'waf configure' first)") |
3705 | + raise Errors.WafError("Project not configured (run 'waf configure' first)") |
3706 | |
3707 | - bld.load_dirs(proj[SRCDIR], proj[BLDDIR]) |
3708 | bld.load_envs() |
3709 | |
3710 | if info: |
3711 | Logs.info("Waf: Entering directory `%s'" % bld.bldnode.abspath()) |
3712 | - bld.add_subdirs([os.path.split(Utils.g_module.root_path)[0]]) |
3713 | + bld.add_subdirs([os.path.split(Context.g_module.root_path)[0]]) |
3714 | |
3715 | bld.pre_build() |
3716 | if flush: |
3717 | diff --git a/buildtools/wafsamba/stale_files.py b/buildtools/wafsamba/stale_files.py |
3718 | index 2dd08e1..175f573 100644 |
3719 | --- a/buildtools/wafsamba/stale_files.py |
3720 | +++ b/buildtools/wafsamba/stale_files.py |
3721 | @@ -14,7 +14,9 @@ nodes/tasks, in which case the method will have to be modified |
3722 | to exclude some folders for example. |
3723 | """ |
3724 | |
3725 | -import Logs, Build, os, samba_utils, Options, Utils |
3726 | +from waflib import Logs, Build, Options, Utils, Errors |
3727 | +import os |
3728 | +from wafsamba import samba_utils |
3729 | from Runner import Parallel |
3730 | |
3731 | old_refill_task_list = Parallel.refill_task_list |
3732 | @@ -46,7 +48,7 @@ def replace_refill_task_list(self): |
3733 | |
3734 | # paranoia |
3735 | if bin_base[-4:] != '/bin': |
3736 | - raise Utils.WafError("Invalid bin base: %s" % bin_base) |
3737 | + raise Errors.WafError("Invalid bin base: %s" % bin_base) |
3738 | |
3739 | # obtain the expected list of files |
3740 | expected = [] |
3741 | diff --git a/buildtools/wafsamba/symbols.py b/buildtools/wafsamba/symbols.py |
3742 | index 7ff4bac..3eca3d4 100644 |
3743 | --- a/buildtools/wafsamba/symbols.py |
3744 | +++ b/buildtools/wafsamba/symbols.py |
3745 | @@ -2,8 +2,8 @@ |
3746 | # using nm, producing a set of exposed defined/undefined symbols |
3747 | |
3748 | import os, re, subprocess |
3749 | -import Utils, Build, Options, Logs |
3750 | -from Logs import debug |
3751 | +from waflib import Utils, Build, Options, Logs, Errors |
3752 | +from waflib.Logs import debug |
3753 | from samba_utils import TO_LIST, LOCAL_CACHE, get_tgt_list, os_path_relpath |
3754 | |
3755 | # these are the data structures used in symbols.py: |
3756 | @@ -59,12 +59,12 @@ def symbols_extract(bld, objfiles, dynamic=False): |
3757 | |
3758 | for line in nmpipe: |
3759 | line = line.strip() |
3760 | - if line.endswith(':'): |
3761 | + if line.endswith(b':'): |
3762 | filename = line[:-1] |
3763 | ret[filename] = { "PUBLIC": set(), "UNDEFINED" : set() } |
3764 | continue |
3765 | - cols = line.split(" ") |
3766 | - if cols == ['']: |
3767 | + cols = line.split(b" ") |
3768 | + if cols == [b'']: |
3769 | continue |
3770 | # see if the line starts with an address |
3771 | if len(cols) == 3: |
3772 | @@ -73,10 +73,10 @@ def symbols_extract(bld, objfiles, dynamic=False): |
3773 | else: |
3774 | symbol_type = cols[0] |
3775 | symbol = cols[1] |
3776 | - if symbol_type in "BDGTRVWSi": |
3777 | + if symbol_type in b"BDGTRVWSi": |
3778 | # its a public symbol |
3779 | ret[filename]["PUBLIC"].add(symbol) |
3780 | - elif symbol_type in "U": |
3781 | + elif symbol_type in b"U": |
3782 | ret[filename]["UNDEFINED"].add(symbol) |
3783 | |
3784 | # add to the cache |
3785 | @@ -106,10 +106,10 @@ def find_ldd_path(bld, libname, binary): |
3786 | lddpipe = subprocess.Popen(['ldd', binary], stdout=subprocess.PIPE).stdout |
3787 | for line in lddpipe: |
3788 | line = line.strip() |
3789 | - cols = line.split(" ") |
3790 | - if len(cols) < 3 or cols[1] != "=>": |
3791 | + cols = line.split(b" ") |
3792 | + if len(cols) < 3 or cols[1] != b"=>": |
3793 | continue |
3794 | - if cols[0].startswith("libc."): |
3795 | + if cols[0].startswith(b"libc."): |
3796 | # save this one too |
3797 | bld.env.libc_path = cols[2] |
3798 | if cols[0].startswith(libname): |
3799 | @@ -119,8 +119,9 @@ def find_ldd_path(bld, libname, binary): |
3800 | |
3801 | |
3802 | # some regular expressions for parsing readelf output |
3803 | -re_sharedlib = re.compile('Shared library: \[(.*)\]') |
3804 | -re_rpath = re.compile('Library rpath: \[(.*)\]') |
3805 | +re_sharedlib = re.compile(b'Shared library: \[(.*)\]') |
3806 | +# output from readelf could be `Library rpath` or `Libray runpath` |
3807 | +re_rpath = re.compile(b'Library (rpath|runpath): \[(.*)\]') |
3808 | |
3809 | def get_libs(bld, binname): |
3810 | '''find the list of linked libraries for any binary or library |
3811 | @@ -146,7 +147,8 @@ def get_libs(bld, binname): |
3812 | libs.add(m.group(1)) |
3813 | m = re_rpath.search(line) |
3814 | if m: |
3815 | - rpath.extend(m.group(1).split(":")) |
3816 | + # output from Popen is always bytestr even in py3 |
3817 | + rpath.extend(m.group(2).split(b":")) |
3818 | |
3819 | ret = set() |
3820 | for lib in libs: |
3821 | @@ -410,7 +412,7 @@ def check_library_deps(bld, t): |
3822 | if dep2 == name and t.in_library != t2.in_library: |
3823 | Logs.warn("WARNING: mutual dependency %s <=> %s" % (name, real_name(t2.sname))) |
3824 | Logs.warn("Libraries should match. %s != %s" % (t.in_library, t2.in_library)) |
3825 | - # raise Utils.WafError("illegal mutual dependency") |
3826 | + # raise Errors.WafError("illegal mutual dependency") |
3827 | |
3828 | |
3829 | def check_syslib_collisions(bld, tgt_list): |
3830 | @@ -430,7 +432,7 @@ def check_syslib_collisions(bld, tgt_list): |
3831 | Logs.error("ERROR: Target '%s' has symbols '%s' which is also in syslib '%s'" % (t.sname, common, lib)) |
3832 | has_error = True |
3833 | if has_error: |
3834 | - raise Utils.WafError("symbols in common with system libraries") |
3835 | + raise Errors.WafError("symbols in common with system libraries") |
3836 | |
3837 | |
3838 | def check_dependencies(bld, t): |
3839 | @@ -546,7 +548,7 @@ def symbols_whyneeded(task): |
3840 | |
3841 | why = Options.options.WHYNEEDED.split(":") |
3842 | if len(why) != 2: |
3843 | - raise Utils.WafError("usage: WHYNEEDED=TARGET:DEPENDENCY") |
3844 | + raise Errors.WafError("usage: WHYNEEDED=TARGET:DEPENDENCY") |
3845 | target = why[0] |
3846 | subsystem = why[1] |
3847 | |
3848 | @@ -579,7 +581,7 @@ def report_duplicate(bld, binname, sym, libs, fail_on_error): |
3849 | else: |
3850 | libnames.append(lib) |
3851 | if fail_on_error: |
3852 | - raise Utils.WafError("%s: Symbol %s linked in multiple libraries %s" % (binname, sym, libnames)) |
3853 | + raise Errors.WafError("%s: Symbol %s linked in multiple libraries %s" % (binname, sym, libnames)) |
3854 | else: |
3855 | print("%s: Symbol %s linked in multiple libraries %s" % (binname, sym, libnames)) |
3856 | |
3857 | diff --git a/buildtools/wafsamba/test_duplicate_symbol.sh b/buildtools/wafsamba/test_duplicate_symbol.sh |
3858 | index 89a4027..46f44a6 100755 |
3859 | --- a/buildtools/wafsamba/test_duplicate_symbol.sh |
3860 | +++ b/buildtools/wafsamba/test_duplicate_symbol.sh |
3861 | @@ -5,7 +5,7 @@ |
3862 | |
3863 | subunit_start_test duplicate_symbols |
3864 | |
3865 | -if ./buildtools/bin/waf build --dup-symbol-check; then |
3866 | +if $PYTHON ./buildtools/bin/waf build --dup-symbol-check; then |
3867 | subunit_pass_test duplicate_symbols |
3868 | else |
3869 | echo | subunit_fail_test duplicate_symbols |
3870 | diff --git a/buildtools/wafsamba/tests/test_abi.py b/buildtools/wafsamba/tests/test_abi.py |
3871 | index bba78c1..d6bdb04 100644 |
3872 | --- a/buildtools/wafsamba/tests/test_abi.py |
3873 | +++ b/buildtools/wafsamba/tests/test_abi.py |
3874 | @@ -21,7 +21,7 @@ from wafsamba.samba_abi import ( |
3875 | normalise_signature, |
3876 | ) |
3877 | |
3878 | -from cStringIO import StringIO |
3879 | +from samba.compat import StringIO |
3880 | |
3881 | |
3882 | class NormaliseSignatureTests(TestCase): |
3883 | @@ -66,6 +66,10 @@ class WriteVscriptTests(TestCase): |
3884 | 1.0 { |
3885 | \tglobal: |
3886 | \t\t*; |
3887 | +\tlocal: |
3888 | +\t\t_end; |
3889 | +\t\t__bss_start; |
3890 | +\t\t_edata; |
3891 | }; |
3892 | """) |
3893 | |
3894 | @@ -84,6 +88,10 @@ MYLIB_0.1 { |
3895 | 1.0 { |
3896 | \tglobal: |
3897 | \t\t*; |
3898 | +\tlocal: |
3899 | +\t\t_end; |
3900 | +\t\t__bss_start; |
3901 | +\t\t_edata; |
3902 | }; |
3903 | """) |
3904 | |
3905 | @@ -99,6 +107,9 @@ MYLIB_0.1 { |
3906 | \t\t*; |
3907 | \tlocal: |
3908 | \t\texc_*; |
3909 | +\t\t_end; |
3910 | +\t\t__bss_start; |
3911 | +\t\t_edata; |
3912 | }; |
3913 | """) |
3914 | |
3915 | @@ -115,6 +126,9 @@ MYLIB_0.1 { |
3916 | \t\tpub_*; |
3917 | \tlocal: |
3918 | \t\texc_*; |
3919 | +\t\t_end; |
3920 | +\t\t__bss_start; |
3921 | +\t\t_edata; |
3922 | \t\t*; |
3923 | }; |
3924 | """) |
3925 | diff --git a/buildtools/wafsamba/tru64cc.py b/buildtools/wafsamba/tru64cc.py |
3926 | deleted file mode 100644 |
3927 | index e1bbb1d..0000000 |
3928 | --- a/buildtools/wafsamba/tru64cc.py |
3929 | +++ /dev/null |
3930 | @@ -1,77 +0,0 @@ |
3931 | - |
3932 | -# compiler definition for tru64/OSF1 cc compiler |
3933 | -# based on suncc.py from waf |
3934 | - |
3935 | -import os, optparse |
3936 | -import Utils, Options, Configure |
3937 | -import ccroot, ar |
3938 | -from Configure import conftest |
3939 | - |
3940 | -from compiler_cc import c_compiler |
3941 | - |
3942 | -c_compiler['osf1V'] = ['gcc', 'tru64cc'] |
3943 | - |
3944 | -@conftest |
3945 | -def find_tru64cc(conf): |
3946 | - v = conf.env |
3947 | - cc = None |
3948 | - if v['CC']: cc = v['CC'] |
3949 | - elif 'CC' in conf.environ: cc = conf.environ['CC'] |
3950 | - if not cc: cc = conf.find_program('cc', var='CC') |
3951 | - if not cc: conf.fatal('tru64cc was not found') |
3952 | - cc = conf.cmd_to_list(cc) |
3953 | - |
3954 | - try: |
3955 | - if not Utils.cmd_output(cc + ['-V']): |
3956 | - conf.fatal('tru64cc %r was not found' % cc) |
3957 | - except ValueError: |
3958 | - conf.fatal('tru64cc -V could not be executed') |
3959 | - |
3960 | - v['CC'] = cc |
3961 | - v['CC_NAME'] = 'tru64' |
3962 | - |
3963 | -@conftest |
3964 | -def tru64cc_common_flags(conf): |
3965 | - v = conf.env |
3966 | - |
3967 | - v['CC_SRC_F'] = '' |
3968 | - v['CC_TGT_F'] = ['-c', '-o', ''] |
3969 | - v['CPPPATH_ST'] = '-I%s' # template for adding include paths |
3970 | - |
3971 | - # linker |
3972 | - if not v['LINK_CC']: v['LINK_CC'] = v['CC'] |
3973 | - v['CCLNK_SRC_F'] = '' |
3974 | - v['CCLNK_TGT_F'] = ['-o', ''] |
3975 | - |
3976 | - v['LIB_ST'] = '-l%s' # template for adding libs |
3977 | - v['LIBPATH_ST'] = '-L%s' # template for adding libpaths |
3978 | - v['STATICLIB_ST'] = '-l%s' |
3979 | - v['STATICLIBPATH_ST'] = '-L%s' |
3980 | - v['CCDEFINES_ST'] = '-D%s' |
3981 | - |
3982 | -# v['SONAME_ST'] = '-Wl,-h -Wl,%s' |
3983 | -# v['SHLIB_MARKER'] = '-Bdynamic' |
3984 | -# v['STATICLIB_MARKER'] = '-Bstatic' |
3985 | - |
3986 | - # program |
3987 | - v['program_PATTERN'] = '%s' |
3988 | - |
3989 | - # shared library |
3990 | -# v['shlib_CCFLAGS'] = ['-Kpic', '-DPIC'] |
3991 | - v['shlib_LINKFLAGS'] = ['-shared'] |
3992 | - v['shlib_PATTERN'] = 'lib%s.so' |
3993 | - |
3994 | - # static lib |
3995 | -# v['staticlib_LINKFLAGS'] = ['-Bstatic'] |
3996 | -# v['staticlib_PATTERN'] = 'lib%s.a' |
3997 | - |
3998 | -detect = ''' |
3999 | -find_tru64cc |
4000 | -find_cpp |
4001 | -find_ar |
4002 | -tru64cc_common_flags |
4003 | -cc_load_tools |
4004 | -cc_add_flags |
4005 | -link_add_flags |
4006 | -''' |
4007 | - |
4008 | diff --git a/buildtools/wafsamba/wafsamba.py b/buildtools/wafsamba/wafsamba.py |
4009 | index 12d5421..1b98e1c 100644 |
4010 | --- a/buildtools/wafsamba/wafsamba.py |
4011 | +++ b/buildtools/wafsamba/wafsamba.py |
4012 | @@ -1,15 +1,16 @@ |
4013 | # a waf tool to add autoconf-like macros to the configure section |
4014 | # and for SAMBA_ macros for building libraries, binaries etc |
4015 | |
4016 | -import Build, os, sys, Options, Task, Utils, cc, TaskGen, fnmatch, re, shutil, Logs, Constants |
4017 | -from Configure import conf |
4018 | -from Logs import debug |
4019 | +import os, sys, re, shutil, fnmatch |
4020 | +from waflib import Build, Options, Task, Utils, TaskGen, Logs, Context, Errors |
4021 | +from waflib.Configure import conf |
4022 | +from waflib.Logs import debug |
4023 | from samba_utils import SUBST_VARS_RECURSIVE |
4024 | TaskGen.task_gen.apply_verif = Utils.nada |
4025 | |
4026 | # bring in the other samba modules |
4027 | -from samba_optimisation import * |
4028 | from samba_utils import * |
4029 | +from samba_utils import symlink |
4030 | from samba_version import * |
4031 | from samba_autoconf import * |
4032 | from samba_patterns import * |
4033 | @@ -25,27 +26,19 @@ import samba_install |
4034 | import samba_conftests |
4035 | import samba_abi |
4036 | import samba_headers |
4037 | -import tru64cc |
4038 | -import irixcc |
4039 | -import hpuxcc |
4040 | import generic_cc |
4041 | import samba_dist |
4042 | import samba_wildcard |
4043 | -import stale_files |
4044 | import symbols |
4045 | import pkgconfig |
4046 | import configure_file |
4047 | - |
4048 | -# some systems have broken threading in python |
4049 | -if os.environ.get('WAF_NOTHREADS') == '1': |
4050 | - import nothreads |
4051 | +import samba_waf18 |
4052 | |
4053 | LIB_PATH="shared" |
4054 | |
4055 | os.environ['PYTHONUNBUFFERED'] = '1' |
4056 | |
4057 | - |
4058 | -if Constants.HEXVERSION < 0x105019: |
4059 | +if Context.HEXVERSION not in (0x2000800,): |
4060 | Logs.error(''' |
4061 | Please use the version of waf that comes with Samba, not |
4062 | a system installed version. See http://wiki.samba.org/index.php/Waf |
4063 | @@ -55,26 +48,25 @@ Alternatively, please run ./configure and make as usual. That will |
4064 | call the right version of waf.''') |
4065 | sys.exit(1) |
4066 | |
4067 | - |
4068 | @conf |
4069 | def SAMBA_BUILD_ENV(conf): |
4070 | '''create the samba build environment''' |
4071 | - conf.env.BUILD_DIRECTORY = conf.blddir |
4072 | - mkdir_p(os.path.join(conf.blddir, LIB_PATH)) |
4073 | - mkdir_p(os.path.join(conf.blddir, LIB_PATH, "private")) |
4074 | - mkdir_p(os.path.join(conf.blddir, "modules")) |
4075 | - mkdir_p(os.path.join(conf.blddir, 'python/samba/dcerpc')) |
4076 | + conf.env.BUILD_DIRECTORY = conf.bldnode.abspath() |
4077 | + mkdir_p(os.path.join(conf.env.BUILD_DIRECTORY, LIB_PATH)) |
4078 | + mkdir_p(os.path.join(conf.env.BUILD_DIRECTORY, LIB_PATH, "private")) |
4079 | + mkdir_p(os.path.join(conf.env.BUILD_DIRECTORY, "modules")) |
4080 | + mkdir_p(os.path.join(conf.env.BUILD_DIRECTORY, 'python/samba/dcerpc')) |
4081 | # this allows all of the bin/shared and bin/python targets |
4082 | # to be expressed in terms of build directory paths |
4083 | - mkdir_p(os.path.join(conf.blddir, 'default')) |
4084 | - for (source, target) in [('shared', 'shared'), ('modules', 'modules'), ('python', 'python_modules')]: |
4085 | - link_target = os.path.join(conf.blddir, 'default/' + target) |
4086 | + mkdir_p(os.path.join(conf.env.BUILD_DIRECTORY, 'default')) |
4087 | + for (source, target) in [('shared', 'shared'), ('modules', 'modules'), ('python', 'python')]: |
4088 | + link_target = os.path.join(conf.env.BUILD_DIRECTORY, 'default/' + target) |
4089 | if not os.path.lexists(link_target): |
4090 | - os.symlink('../' + source, link_target) |
4091 | + symlink('../' + source, link_target) |
4092 | |
4093 | # get perl to put the blib files in the build directory |
4094 | - blib_bld = os.path.join(conf.blddir, 'default/pidl/blib') |
4095 | - blib_src = os.path.join(conf.srcdir, 'pidl/blib') |
4096 | + blib_bld = os.path.join(conf.env.BUILD_DIRECTORY, 'default/pidl/blib') |
4097 | + blib_src = os.path.join(conf.srcnode.abspath(), 'pidl/blib') |
4098 | mkdir_p(blib_bld + '/man1') |
4099 | mkdir_p(blib_bld + '/man3') |
4100 | if os.path.islink(blib_src): |
4101 | @@ -148,7 +140,7 @@ def SAMBA_LIBRARY(bld, libname, source, |
4102 | public_headers = None |
4103 | |
4104 | if private_library and public_headers: |
4105 | - raise Utils.WafError("private library '%s' must not have public header files" % |
4106 | + raise Errors.WafError("private library '%s' must not have public header files" % |
4107 | libname) |
4108 | |
4109 | if LIB_MUST_BE_PRIVATE(bld, libname): |
4110 | @@ -225,13 +217,13 @@ def SAMBA_LIBRARY(bld, libname, source, |
4111 | # we don't want any public libraries without version numbers |
4112 | if (not private_library and target_type != 'PYTHON' and not realname): |
4113 | if vnum is None and soname is None: |
4114 | - raise Utils.WafError("public library '%s' must have a vnum" % |
4115 | + raise Errors.WafError("public library '%s' must have a vnum" % |
4116 | libname) |
4117 | if pc_files is None: |
4118 | - raise Utils.WafError("public library '%s' must have pkg-config file" % |
4119 | + raise Errors.WafError("public library '%s' must have pkg-config file" % |
4120 | libname) |
4121 | if public_headers is None and not bld.env['IS_EXTRA_PYTHON']: |
4122 | - raise Utils.WafError("public library '%s' must have header files" % |
4123 | + raise Errors.WafError("public library '%s' must have header files" % |
4124 | libname) |
4125 | |
4126 | if bundled_name is not None: |
4127 | @@ -273,7 +265,7 @@ def SAMBA_LIBRARY(bld, libname, source, |
4128 | vscript = None |
4129 | if bld.env.HAVE_LD_VERSION_SCRIPT: |
4130 | if private_library: |
4131 | - version = "%s_%s" % (Utils.g_module.APPNAME, Utils.g_module.VERSION) |
4132 | + version = "%s_%s" % (Context.g_module.APPNAME, Context.g_module.VERSION) |
4133 | elif vnum: |
4134 | version = "%s_%s" % (libname, vnum) |
4135 | else: |
4136 | @@ -282,17 +274,17 @@ def SAMBA_LIBRARY(bld, libname, source, |
4137 | vscript = "%s.vscript" % libname |
4138 | bld.ABI_VSCRIPT(version_libname, abi_directory, version, vscript, |
4139 | abi_match) |
4140 | - fullname = apply_pattern(bundled_name, bld.env.shlib_PATTERN) |
4141 | + fullname = apply_pattern(bundled_name, bld.env.cshlib_PATTERN) |
4142 | fullpath = bld.path.find_or_declare(fullname) |
4143 | vscriptpath = bld.path.find_or_declare(vscript) |
4144 | if not fullpath: |
4145 | - raise Utils.WafError("unable to find fullpath for %s" % fullname) |
4146 | + raise Errors.WafError("unable to find fullpath for %s" % fullname) |
4147 | if not vscriptpath: |
4148 | - raise Utils.WafError("unable to find vscript path for %s" % vscript) |
4149 | + raise Errors.WafError("unable to find vscript path for %s" % vscript) |
4150 | bld.add_manual_dependency(fullpath, vscriptpath) |
4151 | if bld.is_install: |
4152 | # also make the .inst file depend on the vscript |
4153 | - instname = apply_pattern(bundled_name + '.inst', bld.env.shlib_PATTERN) |
4154 | + instname = apply_pattern(bundled_name + '.inst', bld.env.cshlib_PATTERN) |
4155 | bld.add_manual_dependency(bld.path.find_or_declare(instname), bld.path.find_or_declare(vscript)) |
4156 | vscript = os.path.join(bld.path.abspath(bld.env), vscript) |
4157 | |
4158 | @@ -327,10 +319,12 @@ def SAMBA_LIBRARY(bld, libname, source, |
4159 | link_name = 'shared/%s' % realname |
4160 | |
4161 | if link_name: |
4162 | + if 'waflib.extras.compat15' in sys.modules: |
4163 | + link_name = 'default/' + link_name |
4164 | t.link_name = link_name |
4165 | |
4166 | if pc_files is not None and not private_library: |
4167 | - if pyembed and bld.env['IS_EXTRA_PYTHON']: |
4168 | + if pyembed: |
4169 | bld.PKG_CONFIG_FILES(pc_files, vnum=vnum, extra_name=bld.env['PYTHON_SO_ABI_FLAG']) |
4170 | else: |
4171 | bld.PKG_CONFIG_FILES(pc_files, vnum=vnum) |
4172 | @@ -674,7 +668,7 @@ def SAMBA_GENERATOR(bld, name, rule, source='', target='', |
4173 | target=target, |
4174 | shell=isinstance(rule, str), |
4175 | update_outputs=True, |
4176 | - before='cc', |
4177 | + before='c', |
4178 | ext_out='.c', |
4179 | samba_type='GENERATOR', |
4180 | dep_vars = dep_vars, |
4181 | @@ -728,22 +722,6 @@ Build.BuildContext.SET_BUILD_GROUP = SET_BUILD_GROUP |
4182 | |
4183 | |
4184 | |
4185 | -@conf |
4186 | -def ENABLE_TIMESTAMP_DEPENDENCIES(conf): |
4187 | - """use timestamps instead of file contents for deps |
4188 | - this currently doesn't work""" |
4189 | - def h_file(filename): |
4190 | - import stat |
4191 | - st = os.stat(filename) |
4192 | - if stat.S_ISDIR(st[stat.ST_MODE]): raise IOError('not a file') |
4193 | - m = Utils.md5() |
4194 | - m.update(str(st.st_mtime)) |
4195 | - m.update(str(st.st_size)) |
4196 | - m.update(filename) |
4197 | - return m.digest() |
4198 | - Utils.h_file = h_file |
4199 | - |
4200 | - |
4201 | def SAMBA_SCRIPT(bld, name, pattern, installdir, installname=None): |
4202 | '''used to copy scripts from the source tree into the build directory |
4203 | for use by selftest''' |
4204 | @@ -758,14 +736,14 @@ def SAMBA_SCRIPT(bld, name, pattern, installdir, installname=None): |
4205 | target = os.path.join(installdir, iname) |
4206 | tgtdir = os.path.dirname(os.path.join(bld.srcnode.abspath(bld.env), '..', target)) |
4207 | mkdir_p(tgtdir) |
4208 | - link_src = os.path.normpath(os.path.join(bld.curdir, s)) |
4209 | + link_src = os.path.normpath(os.path.join(bld.path.abspath(), s)) |
4210 | link_dst = os.path.join(tgtdir, os.path.basename(iname)) |
4211 | if os.path.islink(link_dst) and os.readlink(link_dst) == link_src: |
4212 | continue |
4213 | - if os.path.exists(link_dst): |
4214 | + if os.path.islink(link_dst): |
4215 | os.unlink(link_dst) |
4216 | Logs.info("symlink: %s -> %s/%s" % (s, installdir, iname)) |
4217 | - os.symlink(link_src, link_dst) |
4218 | + symlink(link_src, link_dst) |
4219 | Build.BuildContext.SAMBA_SCRIPT = SAMBA_SCRIPT |
4220 | |
4221 | |
4222 | @@ -779,10 +757,10 @@ def copy_and_fix_python_path(task): |
4223 | replacement="""sys.path.insert(0, "%s") |
4224 | sys.path.insert(1, "%s")""" % (task.env["PYTHONARCHDIR"], task.env["PYTHONDIR"]) |
4225 | |
4226 | - if task.env["PYTHON"][0] == "/": |
4227 | - replacement_shebang = "#!%s\n" % task.env["PYTHON"] |
4228 | + if task.env["PYTHON"][0].startswith("/"): |
4229 | + replacement_shebang = "#!%s\n" % task.env["PYTHON"][0] |
4230 | else: |
4231 | - replacement_shebang = "#!/usr/bin/env %s\n" % task.env["PYTHON"] |
4232 | + replacement_shebang = "#!/usr/bin/env %s\n" % task.env["PYTHON"][0] |
4233 | |
4234 | installed_location=task.outputs[0].bldpath(task.env) |
4235 | source_file = open(task.inputs[0].srcpath(task.env)) |
4236 | @@ -790,7 +768,7 @@ sys.path.insert(1, "%s")""" % (task.env["PYTHONARCHDIR"], task.env["PYTHONDIR"]) |
4237 | lineno = 0 |
4238 | for line in source_file: |
4239 | newline = line |
4240 | - if (lineno == 0 and task.env["PYTHON_SPECIFIED"] is True and |
4241 | + if (lineno == 0 and |
4242 | line[:2] == "#!"): |
4243 | newline = replacement_shebang |
4244 | elif pattern in line: |
4245 | @@ -798,7 +776,7 @@ sys.path.insert(1, "%s")""" % (task.env["PYTHONARCHDIR"], task.env["PYTHONDIR"]) |
4246 | installed_file.write(newline) |
4247 | lineno = lineno + 1 |
4248 | installed_file.close() |
4249 | - os.chmod(installed_location, 0755) |
4250 | + os.chmod(installed_location, 0o755) |
4251 | return 0 |
4252 | |
4253 | def copy_and_fix_perl_path(task): |
4254 | @@ -826,7 +804,7 @@ def copy_and_fix_perl_path(task): |
4255 | installed_file.write(newline) |
4256 | lineno = lineno + 1 |
4257 | installed_file.close() |
4258 | - os.chmod(installed_location, 0755) |
4259 | + os.chmod(installed_location, 0o755) |
4260 | return 0 |
4261 | |
4262 | |
4263 | @@ -834,6 +812,8 @@ def install_file(bld, destdir, file, chmod=MODE_644, flat=False, |
4264 | python_fixup=False, perl_fixup=False, |
4265 | destname=None, base_name=None): |
4266 | '''install a file''' |
4267 | + if not isinstance(file, str): |
4268 | + file = file.abspath() |
4269 | destdir = bld.EXPAND_VARIABLES(destdir) |
4270 | if not destname: |
4271 | destname = file |
4272 | @@ -898,16 +878,19 @@ def INSTALL_DIR(bld, path, chmod=0o755, env=None): |
4273 | if not path: |
4274 | return [] |
4275 | |
4276 | - destpath = bld.get_install_path(path, env) |
4277 | + destpath = bld.EXPAND_VARIABLES(path) |
4278 | + if Options.options.destdir: |
4279 | + destpath = os.path.join(Options.options.destdir, destpath.lstrip(os.sep)) |
4280 | |
4281 | if bld.is_install > 0: |
4282 | if not os.path.isdir(destpath): |
4283 | try: |
4284 | + Logs.info('* create %s', destpath) |
4285 | os.makedirs(destpath) |
4286 | os.chmod(destpath, chmod) |
4287 | except OSError as e: |
4288 | if not os.path.isdir(destpath): |
4289 | - raise Utils.WafError("Cannot create the folder '%s' (error: %s)" % (path, e)) |
4290 | + raise Errors.WafError("Cannot create the folder '%s' (error: %s)" % (path, e)) |
4291 | Build.BuildContext.INSTALL_DIR = INSTALL_DIR |
4292 | |
4293 | def INSTALL_DIRS(bld, destdir, dirs, chmod=0o755, env=None): |
4294 | @@ -938,7 +921,7 @@ def SAMBAMANPAGES(bld, manpages, extra_source=None): |
4295 | '''build and install manual pages''' |
4296 | bld.env.SAMBA_EXPAND_XSL = bld.srcnode.abspath() + '/docs-xml/xslt/expand-sambadoc.xsl' |
4297 | bld.env.SAMBA_MAN_XSL = bld.srcnode.abspath() + '/docs-xml/xslt/man.xsl' |
4298 | - bld.env.SAMBA_CATALOG = bld.srcnode.abspath() + '/bin/default/docs-xml/build/catalog.xml' |
4299 | + bld.env.SAMBA_CATALOG = bld.bldnode.abspath() + '/docs-xml/build/catalog.xml' |
4300 | bld.env.SAMBA_CATALOGS = 'file:///etc/xml/catalog file:///usr/local/share/xml/catalog file://' + bld.env.SAMBA_CATALOG |
4301 | |
4302 | for m in manpages.split(): |
4303 | @@ -958,59 +941,6 @@ def SAMBAMANPAGES(bld, manpages, extra_source=None): |
4304 | bld.INSTALL_FILES('${MANDIR}/man%s' % m[-1], m, flat=True) |
4305 | Build.BuildContext.SAMBAMANPAGES = SAMBAMANPAGES |
4306 | |
4307 | -############################################################# |
4308 | -# give a nicer display when building different types of files |
4309 | -def progress_display(self, msg, fname): |
4310 | - col1 = Logs.colors(self.color) |
4311 | - col2 = Logs.colors.NORMAL |
4312 | - total = self.position[1] |
4313 | - n = len(str(total)) |
4314 | - fs = '[%%%dd/%%%dd] %s %%s%%s%%s\n' % (n, n, msg) |
4315 | - return fs % (self.position[0], self.position[1], col1, fname, col2) |
4316 | - |
4317 | -def link_display(self): |
4318 | - if Options.options.progress_bar != 0: |
4319 | - return Task.Task.old_display(self) |
4320 | - fname = self.outputs[0].bldpath(self.env) |
4321 | - return progress_display(self, 'Linking', fname) |
4322 | -Task.TaskBase.classes['cc_link'].display = link_display |
4323 | - |
4324 | -def samba_display(self): |
4325 | - if Options.options.progress_bar != 0: |
4326 | - return Task.Task.old_display(self) |
4327 | - |
4328 | - targets = LOCAL_CACHE(self, 'TARGET_TYPE') |
4329 | - if self.name in targets: |
4330 | - target_type = targets[self.name] |
4331 | - type_map = { 'GENERATOR' : 'Generating', |
4332 | - 'PROTOTYPE' : 'Generating' |
4333 | - } |
4334 | - if target_type in type_map: |
4335 | - return progress_display(self, type_map[target_type], self.name) |
4336 | - |
4337 | - if len(self.inputs) == 0: |
4338 | - return Task.Task.old_display(self) |
4339 | - |
4340 | - fname = self.inputs[0].bldpath(self.env) |
4341 | - if fname[0:3] == '../': |
4342 | - fname = fname[3:] |
4343 | - ext_loc = fname.rfind('.') |
4344 | - if ext_loc == -1: |
4345 | - return Task.Task.old_display(self) |
4346 | - ext = fname[ext_loc:] |
4347 | - |
4348 | - ext_map = { '.idl' : 'Compiling IDL', |
4349 | - '.et' : 'Compiling ERRTABLE', |
4350 | - '.asn1': 'Compiling ASN1', |
4351 | - '.c' : 'Compiling' } |
4352 | - if ext in ext_map: |
4353 | - return progress_display(self, ext_map[ext], fname) |
4354 | - return Task.Task.old_display(self) |
4355 | - |
4356 | -Task.TaskBase.classes['Task'].old_display = Task.TaskBase.classes['Task'].display |
4357 | -Task.TaskBase.classes['Task'].display = samba_display |
4358 | - |
4359 | - |
4360 | @after('apply_link') |
4361 | @feature('cshlib') |
4362 | def apply_bundle_remove_dynamiclib_patch(self): |
4363 | diff --git a/buildtools/wafsamba/wscript b/buildtools/wafsamba/wscript |
4364 | index 0eef330..ab19859 100644 |
4365 | --- a/buildtools/wafsamba/wscript |
4366 | +++ b/buildtools/wafsamba/wscript |
4367 | @@ -3,7 +3,8 @@ |
4368 | # this is a base set of waf rules that everything else pulls in first |
4369 | |
4370 | import os, sys |
4371 | -import wafsamba, Configure, Logs, Options, Utils |
4372 | +from waflib import Configure, Logs, Options, Utils, Context, Errors |
4373 | +import wafsamba |
4374 | from samba_utils import os_path_relpath |
4375 | from optparse import SUPPRESS_HELP |
4376 | |
4377 | @@ -14,12 +15,17 @@ from optparse import SUPPRESS_HELP |
4378 | # are resolved related to WAFCACHE. It will need a lot of testing |
4379 | # before it is enabled by default. |
4380 | if '--enable-auto-reconfigure' in sys.argv: |
4381 | - Configure.autoconfig = True |
4382 | + Configure.autoconfig = 'clobber' |
4383 | |
4384 | -def set_options(opt): |
4385 | - opt.tool_options('compiler_cc') |
4386 | +def default_value(option, default=''): |
4387 | + if option in Options.options.__dict__: |
4388 | + return Options.options.__dict__[option] |
4389 | + return default |
4390 | |
4391 | - opt.tool_options('gnu_dirs') |
4392 | +def options(opt): |
4393 | + opt.load('compiler_cc') |
4394 | + |
4395 | + opt.load('gnu_dirs') |
4396 | |
4397 | gr = opt.option_group('library handling options') |
4398 | |
4399 | @@ -31,17 +37,17 @@ def set_options(opt): |
4400 | help=("comma separated list of normally public libraries to build instead as private libraries. May include !LIBNAME to disable making a library private. Can be 'NONE' or 'ALL' [auto]"), |
4401 | action="store", dest='PRIVATE_LIBS', default='') |
4402 | |
4403 | - extension_default = Options.options['PRIVATE_EXTENSION_DEFAULT'] |
4404 | + extension_default = default_value('PRIVATE_EXTENSION_DEFAULT') |
4405 | gr.add_option('--private-library-extension', |
4406 | help=("name extension for private libraries [%s]" % extension_default), |
4407 | action="store", dest='PRIVATE_EXTENSION', default=extension_default) |
4408 | |
4409 | - extension_exception = Options.options['PRIVATE_EXTENSION_EXCEPTION'] |
4410 | + extension_exception = default_value('PRIVATE_EXTENSION_EXCEPTION') |
4411 | gr.add_option('--private-extension-exception', |
4412 | help=("comma separated list of libraries to not apply extension to [%s]" % extension_exception), |
4413 | action="store", dest='PRIVATE_EXTENSION_EXCEPTION', default=extension_exception) |
4414 | |
4415 | - builtin_default = Options.options['BUILTIN_LIBRARIES_DEFAULT'] |
4416 | + builtin_default = default_value('BUILTIN_LIBRARIES_DEFAULT') |
4417 | gr.add_option('--builtin-libraries', |
4418 | help=("command separated list of libraries to build directly into binaries [%s]" % builtin_default), |
4419 | action="store", dest='BUILTIN_LIBRARIES', default=builtin_default) |
4420 | @@ -71,7 +77,7 @@ def set_options(opt): |
4421 | action="store", dest='MODULESDIR', default='${PREFIX}/modules') |
4422 | |
4423 | opt.add_option('--with-privatelibdir', |
4424 | - help=("private library directory [PREFIX/lib/%s]" % Utils.g_module.APPNAME), |
4425 | + help=("private library directory [PREFIX/lib/%s]" % Context.g_module.APPNAME), |
4426 | action="store", dest='PRIVATELIBDIR', default=None) |
4427 | |
4428 | opt.add_option('--with-libiconv', |
4429 | @@ -109,9 +115,6 @@ def set_options(opt): |
4430 | gr.add_option('--enable-gccdeps', |
4431 | help=("Enable use of gcc -MD dependency module"), |
4432 | action="store_true", dest='enable_gccdeps', default=True) |
4433 | - gr.add_option('--timestamp-dependencies', |
4434 | - help=("use file timestamps instead of content for build dependencies (BROKEN)"), |
4435 | - action="store_true", dest='timestamp_dependencies', default=False) |
4436 | gr.add_option('--pedantic', |
4437 | help=("Enable even more compiler warnings"), |
4438 | action='store_true', dest='pedantic', default=False) |
4439 | @@ -210,23 +213,19 @@ def set_options(opt): |
4440 | @Utils.run_once |
4441 | def configure(conf): |
4442 | conf.env.hlist = [] |
4443 | - conf.env.srcdir = conf.srcdir |
4444 | + conf.env.srcdir = conf.srcnode.abspath() |
4445 | |
4446 | conf.define('SRCDIR', conf.env['srcdir']) |
4447 | |
4448 | - if Options.options.timestamp_dependencies: |
4449 | - conf.ENABLE_TIMESTAMP_DEPENDENCIES() |
4450 | - |
4451 | conf.SETUP_CONFIGURE_CACHE(Options.options.enable_configure_cache) |
4452 | |
4453 | # load our local waf extensions |
4454 | - conf.check_tool('gnu_dirs') |
4455 | - conf.check_tool('wafsamba') |
4456 | - conf.check_tool('print_commands') |
4457 | + conf.load('gnu_dirs') |
4458 | + conf.load('wafsamba') |
4459 | |
4460 | conf.CHECK_CC_ENV() |
4461 | |
4462 | - conf.check_tool('compiler_cc') |
4463 | + conf.load('compiler_c') |
4464 | |
4465 | conf.CHECK_STANDARD_LIBPATH() |
4466 | |
4467 | @@ -236,31 +235,10 @@ def configure(conf): |
4468 | # older gcc versions (< 4.4) does not work with gccdeps, so we have to see if the .d file is generated |
4469 | if Options.options.enable_gccdeps: |
4470 | # stale file removal - the configuration may pick up the old .pyc file |
4471 | - p = os.path.join(conf.srcdir, 'buildtools/wafsamba/gccdeps.pyc') |
4472 | + p = os.path.join(conf.env.srcdir, 'buildtools/wafsamba/gccdeps.pyc') |
4473 | if os.path.exists(p): |
4474 | os.remove(p) |
4475 | - |
4476 | - from TaskGen import feature, after |
4477 | - @feature('testd') |
4478 | - @after('apply_core') |
4479 | - def check_d(self): |
4480 | - tsk = self.compiled_tasks[0] |
4481 | - tsk.outputs.append(tsk.outputs[0].change_ext('.d')) |
4482 | - |
4483 | - import Task |
4484 | - cc = Task.TaskBase.classes['cc'] |
4485 | - oldmeth = cc.run |
4486 | - |
4487 | - cc.run = Task.compile_fun_noshell('cc', '${CC} ${CCFLAGS} ${CPPFLAGS} ${_CCINCFLAGS} ${_CCDEFFLAGS} ${CC_SRC_F}${SRC} ${CC_TGT_F}${TGT[0].abspath(env)}')[0] |
4488 | - try: |
4489 | - try: |
4490 | - conf.check(features='c testd', fragment='int main() {return 0;}\n', ccflags=['-MD'], mandatory=True, msg='Check for -MD') |
4491 | - except: |
4492 | - pass |
4493 | - else: |
4494 | - conf.check_tool('gccdeps', tooldir=conf.srcdir + "/buildtools/wafsamba") |
4495 | - finally: |
4496 | - cc.run = oldmeth |
4497 | + conf.load('gccdeps') |
4498 | |
4499 | # make the install paths available in environment |
4500 | conf.env.LIBDIR = Options.options.LIBDIR or '${PREFIX}/lib' |
4501 | @@ -330,15 +308,16 @@ def configure(conf): |
4502 | "-qhalt=w", # IBM xlc |
4503 | "-w2", # Tru64 |
4504 | ]: |
4505 | - if conf.CHECK_CFLAGS([f], ''' |
4506 | -'''): |
4507 | + if conf.CHECK_CFLAGS([f]): |
4508 | if not 'WERROR_CFLAGS' in conf.env: |
4509 | conf.env['WERROR_CFLAGS'] = [] |
4510 | conf.env['WERROR_CFLAGS'].extend([f]) |
4511 | break |
4512 | |
4513 | # check which compiler/linker flags are needed for rpath support |
4514 | - if not conf.CHECK_LDFLAGS(['-Wl,-rpath,.']) and conf.CHECK_LDFLAGS(['-Wl,-R,.']): |
4515 | + if conf.CHECK_LDFLAGS(['-Wl,-rpath,.']): |
4516 | + conf.env['RPATH_ST'] = '-Wl,-rpath,%s' |
4517 | + elif conf.CHECK_LDFLAGS(['-Wl,-R,.']): |
4518 | conf.env['RPATH_ST'] = '-Wl,-R,%s' |
4519 | |
4520 | # check for rpath |
4521 | @@ -348,7 +327,7 @@ def configure(conf): |
4522 | conf.env.RPATH_ON_INSTALL = (conf.env.RPATH_ON_BUILD and |
4523 | not Options.options.disable_rpath_install) |
4524 | if not conf.env.PRIVATELIBDIR: |
4525 | - conf.env.PRIVATELIBDIR = '%s/%s' % (conf.env.LIBDIR, Utils.g_module.APPNAME) |
4526 | + conf.env.PRIVATELIBDIR = '%s/%s' % (conf.env.LIBDIR, Context.g_module.APPNAME) |
4527 | conf.env.RPATH_ON_INSTALL_PRIVATE = ( |
4528 | not Options.options.disable_rpath_private_install) |
4529 | else: |
4530 | @@ -370,10 +349,10 @@ def configure(conf): |
4531 | else: |
4532 | conf.env.HAVE_LD_VERSION_SCRIPT = False |
4533 | |
4534 | - if conf.CHECK_CFLAGS(['-fvisibility=hidden'] + conf.env.WERROR_CFLAGS): |
4535 | + if conf.CHECK_CFLAGS(['-fvisibility=hidden']): |
4536 | conf.env.VISIBILITY_CFLAGS = '-fvisibility=hidden' |
4537 | conf.CHECK_CODE('''int main(void) { return 0; } |
4538 | - __attribute__((visibility("default"))) void vis_foo2(void) {}''', |
4539 | + __attribute__((visibility("default"))) void vis_foo2(void) {}\n''', |
4540 | cflags=conf.env.VISIBILITY_CFLAGS, |
4541 | strict=True, |
4542 | define='HAVE_VISIBILITY_ATTR', addmain=False) |
4543 | @@ -466,6 +445,15 @@ def configure(conf): |
4544 | conf.DEFINE('_GNU_SOURCE', 1, add_to_cflags=True) |
4545 | conf.DEFINE('_XOPEN_SOURCE_EXTENDED', 1, add_to_cflags=True) |
4546 | |
4547 | + # |
4548 | + # Needs to be defined before std*.h and string*.h are included |
4549 | + # As Python.h already brings string.h we need it in CFLAGS. |
4550 | + # See memset_s() details here: |
4551 | + # https://en.cppreference.com/w/c/string/byte/memset |
4552 | + # |
4553 | + if conf.CHECK_CFLAGS(['-D__STDC_WANT_LIB_EXT1__=1']): |
4554 | + conf.ADD_CFLAGS('-D__STDC_WANT_LIB_EXT1__=1') |
4555 | + |
4556 | # on Tru64 certain features are only available with _OSF_SOURCE set to 1 |
4557 | # and _XOPEN_SOURCE set to 600 |
4558 | if conf.env['SYSTEM_UNAME_SYSNAME'] == 'OSF1': |
4559 | @@ -501,7 +489,7 @@ struct foo bar = { .y = 'X', .x = 1 }; |
4560 | |
4561 | # see if we need special largefile flags |
4562 | if not conf.CHECK_LARGEFILE(): |
4563 | - raise Utils.WafError('Samba requires large file support support, but not available on this platform: sizeof(off_t) < 8') |
4564 | + raise Errors.WafError('Samba requires large file support support, but not available on this platform: sizeof(off_t) < 8') |
4565 | |
4566 | if conf.env.HAVE_STDDEF_H and conf.env.HAVE_STDLIB_H: |
4567 | conf.DEFINE('STDC_HEADERS', 1) |
4568 | @@ -512,7 +500,7 @@ struct foo bar = { .y = 'X', .x = 1 }; |
4569 | conf.DEFINE('TIME_WITH_SYS_TIME', 1) |
4570 | |
4571 | # cope with different extensions for libraries |
4572 | - (root, ext) = os.path.splitext(conf.env.shlib_PATTERN) |
4573 | + (root, ext) = os.path.splitext(conf.env.cshlib_PATTERN) |
4574 | if ext[0] == '.': |
4575 | conf.define('SHLIBEXT', ext[1:], quote=True) |
4576 | else: |
4577 | @@ -534,7 +522,7 @@ struct foo bar = { .y = 'X', .x = 1 }; |
4578 | #if !defined(LITTLE) || !defined(B) || LITTLE != B |
4579 | #error Not little endian. |
4580 | #endif |
4581 | - int main(void) { return 0; }""", |
4582 | + int main(void) { return 0; }\n""", |
4583 | addmain=False, |
4584 | headers="endian.h sys/endian.h", |
4585 | define="HAVE_LITTLE_ENDIAN") |
4586 | @@ -553,7 +541,7 @@ struct foo bar = { .y = 'X', .x = 1 }; |
4587 | #if !defined(BIG) || !defined(B) || BIG != B |
4588 | #error Not big endian. |
4589 | #endif |
4590 | - int main(void) { return 0; }""", |
4591 | + int main(void) { return 0; }\n""", |
4592 | addmain=False, |
4593 | headers="endian.h sys/endian.h", |
4594 | define="HAVE_BIG_ENDIAN") |
4595 | @@ -576,7 +564,7 @@ struct foo bar = { .y = 'X', .x = 1 }; |
4596 | # Extra sanity check. |
4597 | if conf.CONFIG_SET("HAVE_BIG_ENDIAN") == conf.CONFIG_SET("HAVE_LITTLE_ENDIAN"): |
4598 | Logs.error("Failed endian determination. The PDP-11 is back?") |
4599 | - sys.exit(1) |
4600 | + sys.exit(1) |
4601 | else: |
4602 | if conf.CONFIG_SET("HAVE_BIG_ENDIAN"): |
4603 | conf.DEFINE('WORDS_BIGENDIAN', 1) |
4604 | @@ -607,12 +595,13 @@ struct foo bar = { .y = 'X', .x = 1 }; |
4605 | |
4606 | def build(bld): |
4607 | # give a more useful message if the source directory has moved |
4608 | - relpath = os_path_relpath(bld.curdir, bld.srcnode.abspath()) |
4609 | + curdir = bld.path.abspath() |
4610 | + srcdir = bld.srcnode.abspath() |
4611 | + relpath = os_path_relpath(curdir, srcdir) |
4612 | if relpath.find('../') != -1: |
4613 | - Logs.error('bld.curdir %s is not a child of %s' % (bld.curdir, bld.srcnode.abspath())) |
4614 | - raise Utils.WafError('''The top source directory has moved. Please run distclean and reconfigure''') |
4615 | + Logs.error('bld.path %s is not a child of %s' % (curdir, srcdir)) |
4616 | + raise Errors.WafError('''The top source directory has moved. Please run distclean and reconfigure''') |
4617 | |
4618 | - bld.CHECK_MAKEFLAGS() |
4619 | bld.SETUP_BUILD_GROUPS() |
4620 | bld.ENFORCE_GROUP_ORDERING() |
4621 | bld.CHECK_PROJECT_RULES() |
4622 | diff --git a/common/dump.c b/common/dump.c |
4623 | index 73286b8..adcf591 100644 |
4624 | --- a/common/dump.c |
4625 | +++ b/common/dump.c |
4626 | @@ -60,6 +60,7 @@ static tdb_off_t tdb_dump_record(struct tdb_context *tdb, int hash, |
4627 | |
4628 | static int tdb_dump_chain(struct tdb_context *tdb, int i) |
4629 | { |
4630 | + struct tdb_chainwalk_ctx chainwalk; |
4631 | tdb_off_t rec_ptr, top; |
4632 | |
4633 | if (i == -1) { |
4634 | @@ -74,11 +75,19 @@ static int tdb_dump_chain(struct tdb_context *tdb, int i) |
4635 | if (tdb_ofs_read(tdb, top, &rec_ptr) == -1) |
4636 | return tdb_unlock(tdb, i, F_WRLCK); |
4637 | |
4638 | + tdb_chainwalk_init(&chainwalk, rec_ptr); |
4639 | + |
4640 | if (rec_ptr) |
4641 | printf("hash=%d\n", i); |
4642 | |
4643 | while (rec_ptr) { |
4644 | + bool ok; |
4645 | rec_ptr = tdb_dump_record(tdb, i, rec_ptr); |
4646 | + ok = tdb_chainwalk_check(tdb, &chainwalk, rec_ptr); |
4647 | + if (!ok) { |
4648 | + printf("circular hash chain %d\n", i); |
4649 | + break; |
4650 | + } |
4651 | } |
4652 | |
4653 | return tdb_unlock(tdb, i, F_WRLCK); |
4654 | @@ -86,7 +95,7 @@ static int tdb_dump_chain(struct tdb_context *tdb, int i) |
4655 | |
4656 | _PUBLIC_ void tdb_dump_all(struct tdb_context *tdb) |
4657 | { |
4658 | - int i; |
4659 | + uint32_t i; |
4660 | for (i=0;i<tdb->hash_size;i++) { |
4661 | tdb_dump_chain(tdb, i); |
4662 | } |
4663 | diff --git a/common/freelist.c b/common/freelist.c |
4664 | index 86fac2f..37a4c16 100644 |
4665 | --- a/common/freelist.c |
4666 | +++ b/common/freelist.c |
4667 | @@ -27,12 +27,6 @@ |
4668 | |
4669 | #include "tdb_private.h" |
4670 | |
4671 | -/* 'right' merges can involve O(n^2) cost when combined with a |
4672 | - traverse, so they are disabled until we find a way to do them in |
4673 | - O(1) time |
4674 | -*/ |
4675 | -#define USE_RIGHT_MERGES 0 |
4676 | - |
4677 | /* read a freelist record and check for simple errors */ |
4678 | int tdb_rec_free_read(struct tdb_context *tdb, tdb_off_t off, struct tdb_record *rec) |
4679 | { |
4680 | @@ -61,30 +55,6 @@ int tdb_rec_free_read(struct tdb_context *tdb, tdb_off_t off, struct tdb_record |
4681 | return 0; |
4682 | } |
4683 | |
4684 | - |
4685 | -#if USE_RIGHT_MERGES |
4686 | -/* Remove an element from the freelist. Must have alloc lock. */ |
4687 | -static int remove_from_freelist(struct tdb_context *tdb, tdb_off_t off, tdb_off_t next) |
4688 | -{ |
4689 | - tdb_off_t last_ptr, i; |
4690 | - |
4691 | - /* read in the freelist top */ |
4692 | - last_ptr = FREELIST_TOP; |
4693 | - while (tdb_ofs_read(tdb, last_ptr, &i) != -1 && i != 0) { |
4694 | - if (i == off) { |
4695 | - /* We've found it! */ |
4696 | - return tdb_ofs_write(tdb, last_ptr, &next); |
4697 | - } |
4698 | - /* Follow chain (next offset is at start of record) */ |
4699 | - last_ptr = i; |
4700 | - } |
4701 | - tdb->ecode = TDB_ERR_CORRUPT; |
4702 | - TDB_LOG((tdb, TDB_DEBUG_FATAL,"remove_from_freelist: not on list at off=%u\n", off)); |
4703 | - return -1; |
4704 | -} |
4705 | -#endif |
4706 | - |
4707 | - |
4708 | /* update a record tailer (must hold allocation lock) */ |
4709 | static int update_tailer(struct tdb_context *tdb, tdb_off_t offset, |
4710 | const struct tdb_record *rec) |
4711 | @@ -199,7 +169,7 @@ static int merge_with_left_record(struct tdb_context *tdb, |
4712 | * 0 if left was not a free record |
4713 | * 1 if left was free and successfully merged. |
4714 | * |
4715 | - * The currend record is handed in with pointer and fully read record. |
4716 | + * The current record is handed in with pointer and fully read record. |
4717 | * |
4718 | * The left record pointer and struct can be retrieved as result |
4719 | * in lp and lr; |
4720 | @@ -318,33 +288,6 @@ int tdb_free(struct tdb_context *tdb, tdb_off_t offset, struct tdb_record *rec) |
4721 | goto fail; |
4722 | } |
4723 | |
4724 | -#if USE_RIGHT_MERGES |
4725 | - /* Look right first (I'm an Australian, dammit) */ |
4726 | - if (offset + sizeof(*rec) + rec->rec_len + sizeof(*rec) <= tdb->map_size) { |
4727 | - tdb_off_t right = offset + sizeof(*rec) + rec->rec_len; |
4728 | - struct tdb_record r; |
4729 | - |
4730 | - if (tdb->methods->tdb_read(tdb, right, &r, sizeof(r), DOCONV()) == -1) { |
4731 | - TDB_LOG((tdb, TDB_DEBUG_FATAL, "tdb_free: right read failed at %u\n", right)); |
4732 | - goto left; |
4733 | - } |
4734 | - |
4735 | - /* If it's free, expand to include it. */ |
4736 | - if (r.magic == TDB_FREE_MAGIC) { |
4737 | - if (remove_from_freelist(tdb, right, r.next) == -1) { |
4738 | - TDB_LOG((tdb, TDB_DEBUG_FATAL, "tdb_free: right free failed at %u\n", right)); |
4739 | - goto left; |
4740 | - } |
4741 | - rec->rec_len += sizeof(r) + r.rec_len; |
4742 | - if (update_tailer(tdb, offset, rec) == -1) { |
4743 | - TDB_LOG((tdb, TDB_DEBUG_FATAL, "tdb_free: update_tailer failed at %u\n", offset)); |
4744 | - goto fail; |
4745 | - } |
4746 | - } |
4747 | - } |
4748 | -left: |
4749 | -#endif |
4750 | - |
4751 | ret = check_merge_with_left_record(tdb, offset, rec, NULL, NULL); |
4752 | if (ret == -1) { |
4753 | goto fail; |
4754 | @@ -444,6 +387,8 @@ static tdb_off_t tdb_allocate_from_freelist( |
4755 | struct tdb_context *tdb, tdb_len_t length, struct tdb_record *rec) |
4756 | { |
4757 | tdb_off_t rec_ptr, last_ptr, newrec_ptr; |
4758 | + struct tdb_chainwalk_ctx chainwalk; |
4759 | + bool modified; |
4760 | struct { |
4761 | tdb_off_t rec_ptr, last_ptr; |
4762 | tdb_len_t rec_len; |
4763 | @@ -466,6 +411,9 @@ static tdb_off_t tdb_allocate_from_freelist( |
4764 | if (tdb_ofs_read(tdb, FREELIST_TOP, &rec_ptr) == -1) |
4765 | return 0; |
4766 | |
4767 | + modified = false; |
4768 | + tdb_chainwalk_init(&chainwalk, rec_ptr); |
4769 | + |
4770 | bestfit.rec_ptr = 0; |
4771 | bestfit.last_ptr = 0; |
4772 | bestfit.rec_len = 0; |
4773 | @@ -526,6 +474,8 @@ static tdb_off_t tdb_allocate_from_freelist( |
4774 | merge_created_candidate = true; |
4775 | } |
4776 | |
4777 | + modified = true; |
4778 | + |
4779 | continue; |
4780 | } |
4781 | |
4782 | @@ -542,6 +492,14 @@ static tdb_off_t tdb_allocate_from_freelist( |
4783 | last_ptr = rec_ptr; |
4784 | rec_ptr = rec->next; |
4785 | |
4786 | + if (!modified) { |
4787 | + bool ok; |
4788 | + ok = tdb_chainwalk_check(tdb, &chainwalk, rec_ptr); |
4789 | + if (!ok) { |
4790 | + return 0; |
4791 | + } |
4792 | + } |
4793 | + |
4794 | /* if we've found a record that is big enough, then |
4795 | stop searching if its also not too big. The |
4796 | definition of 'too big' changes as we scan |
4797 | @@ -597,6 +555,17 @@ static bool tdb_alloc_dead( |
4798 | return (tdb_ofs_write(tdb, last_ptr, &rec->next) == 0); |
4799 | } |
4800 | |
4801 | +static void tdb_purge_dead(struct tdb_context *tdb, uint32_t hash) |
4802 | +{ |
4803 | + int max_dead_records = tdb->max_dead_records; |
4804 | + |
4805 | + tdb->max_dead_records = 0; |
4806 | + |
4807 | + tdb_trim_dead(tdb, hash); |
4808 | + |
4809 | + tdb->max_dead_records = max_dead_records; |
4810 | +} |
4811 | + |
4812 | /* |
4813 | * Chain "hash" is assumed to be locked |
4814 | */ |
4815 | @@ -605,7 +574,7 @@ tdb_off_t tdb_allocate(struct tdb_context *tdb, int hash, tdb_len_t length, |
4816 | struct tdb_record *rec) |
4817 | { |
4818 | tdb_off_t ret; |
4819 | - int i; |
4820 | + uint32_t i; |
4821 | |
4822 | if (tdb->max_dead_records == 0) { |
4823 | /* |
4824 | @@ -661,6 +630,12 @@ blocking_freelist_allocate: |
4825 | if (tdb_lock(tdb, -1, F_WRLCK) == -1) { |
4826 | return 0; |
4827 | } |
4828 | + /* |
4829 | + * Dead records can happen even if max_dead_records==0, they |
4830 | + * are older than the max_dead_records concept: They happen if |
4831 | + * tdb_delete happens concurrently with a traverse. |
4832 | + */ |
4833 | + tdb_purge_dead(tdb, hash); |
4834 | ret = tdb_allocate_from_freelist(tdb, length, rec); |
4835 | tdb_unlock(tdb, -1, F_WRLCK); |
4836 | return ret; |
4837 | diff --git a/common/io.c b/common/io.c |
4838 | index 94b3163..df46017 100644 |
4839 | --- a/common/io.c |
4840 | +++ b/common/io.c |
4841 | @@ -96,7 +96,7 @@ static int tdb_ftruncate(struct tdb_context *tdb, off_t length) |
4842 | return ret; |
4843 | } |
4844 | |
4845 | -#if HAVE_POSIX_FALLOCATE |
4846 | +#ifdef HAVE_POSIX_FALLOCATE |
4847 | static int tdb_posix_fallocate(struct tdb_context *tdb, off_t offset, |
4848 | off_t len) |
4849 | { |
4850 | @@ -413,7 +413,7 @@ static int tdb_expand_file(struct tdb_context *tdb, tdb_off_t size, tdb_off_t ad |
4851 | return -1; |
4852 | } |
4853 | |
4854 | -#if HAVE_POSIX_FALLOCATE |
4855 | +#ifdef HAVE_POSIX_FALLOCATE |
4856 | ret = tdb_posix_fallocate(tdb, size, addition); |
4857 | if (ret == 0) { |
4858 | return 0; |
4859 | diff --git a/common/lock.c b/common/lock.c |
4860 | index 9f30c7a..f55184d 100644 |
4861 | --- a/common/lock.c |
4862 | +++ b/common/lock.c |
4863 | @@ -149,8 +149,8 @@ static int fcntl_unlock(struct tdb_context *tdb, int rw, off_t off, off_t len) |
4864 | * This is the memory layout of the hashchain array: |
4865 | * |
4866 | * FREELIST_TOP + 0 = freelist |
4867 | - * FREELIST_TOP + 4 = hashtbale list 0 |
4868 | - * FREELIST_TOP + 8 = hashtbale list 1 |
4869 | + * FREELIST_TOP + 4 = hashtable list 0 |
4870 | + * FREELIST_TOP + 8 = hashtable list 1 |
4871 | * ... |
4872 | * |
4873 | * Otoh lock_offset computes: |
4874 | diff --git a/common/open.c b/common/open.c |
4875 | index 8baa7e4..dd5783e 100644 |
4876 | --- a/common/open.c |
4877 | +++ b/common/open.c |
4878 | @@ -230,8 +230,6 @@ static bool check_header_hash(struct tdb_context *tdb, |
4879 | static bool tdb_mutex_open_ok(struct tdb_context *tdb, |
4880 | const struct tdb_header *header) |
4881 | { |
4882 | - int locked; |
4883 | - |
4884 | if (tdb->flags & TDB_NOLOCK) { |
4885 | /* |
4886 | * We don't look at locks, so it does not matter to have a |
4887 | @@ -240,37 +238,6 @@ static bool tdb_mutex_open_ok(struct tdb_context *tdb, |
4888 | return true; |
4889 | } |
4890 | |
4891 | - locked = tdb_nest_lock(tdb, ACTIVE_LOCK, F_WRLCK, |
4892 | - TDB_LOCK_NOWAIT|TDB_LOCK_PROBE); |
4893 | - |
4894 | - if ((locked == -1) && (tdb->ecode == TDB_ERR_LOCK)) { |
4895 | - /* |
4896 | - * CLEAR_IF_FIRST still active. The tdb was created on this |
4897 | - * host, so we can assume the mutex implementation is |
4898 | - * compatible. Important for tools like tdbdump on a still |
4899 | - * open locking.tdb. |
4900 | - */ |
4901 | - goto check_local_settings; |
4902 | - } |
4903 | - |
4904 | - /* |
4905 | - * We got the CLEAR_IF_FIRST lock. That means the database was |
4906 | - * potentially copied from somewhere else. The mutex implementation |
4907 | - * might be incompatible. |
4908 | - */ |
4909 | - |
4910 | - if (tdb_nest_unlock(tdb, ACTIVE_LOCK, F_WRLCK, false) == -1) { |
4911 | - /* |
4912 | - * Should not happen |
4913 | - */ |
4914 | - TDB_LOG((tdb, TDB_DEBUG_ERROR, "tdb_mutex_open_ok: " |
4915 | - "failed to release ACTIVE_LOCK on %s: %s\n", |
4916 | - tdb->name, strerror(errno))); |
4917 | - return false; |
4918 | - } |
4919 | - |
4920 | -check_local_settings: |
4921 | - |
4922 | if (!(tdb->flags & TDB_MUTEX_LOCKING)) { |
4923 | TDB_LOG((tdb, TDB_DEBUG_ERROR, "tdb_mutex_open_ok[%s]: " |
4924 | "Can use mutexes only with " |
4925 | @@ -281,10 +248,10 @@ check_local_settings: |
4926 | |
4927 | if (tdb_mutex_size(tdb) != header->mutex_size) { |
4928 | TDB_LOG((tdb, TDB_DEBUG_ERROR, "tdb_mutex_open_ok[%s]: " |
4929 | - "Mutex size changed from %u to %u\n.", |
4930 | + "Mutex size changed from %"PRIu32" to %zu\n.", |
4931 | tdb->name, |
4932 | - (unsigned int)header->mutex_size, |
4933 | - (unsigned int)tdb_mutex_size(tdb))); |
4934 | + header->mutex_size, |
4935 | + tdb_mutex_size(tdb))); |
4936 | return false; |
4937 | } |
4938 | |
4939 | @@ -297,7 +264,9 @@ _PUBLIC_ struct tdb_context *tdb_open_ex(const char *name, int hash_size, int td |
4940 | tdb_hash_func hash_fn) |
4941 | { |
4942 | int orig_errno = errno; |
4943 | - struct tdb_header header; |
4944 | + struct tdb_header header = { |
4945 | + .version = 0, |
4946 | + }; |
4947 | struct tdb_context *tdb; |
4948 | struct stat st; |
4949 | int rev = 0; |
4950 | @@ -309,8 +278,6 @@ _PUBLIC_ struct tdb_context *tdb_open_ex(const char *name, int hash_size, int td |
4951 | uint32_t magic1, magic2; |
4952 | int ret; |
4953 | |
4954 | - ZERO_STRUCT(header); |
4955 | - |
4956 | if (!(tdb = (struct tdb_context *)calloc(1, sizeof *tdb))) { |
4957 | /* Can't log this */ |
4958 | errno = ENOMEM; |
4959 | @@ -418,14 +385,6 @@ _PUBLIC_ struct tdb_context *tdb_open_ex(const char *name, int hash_size, int td |
4960 | * the runtime check for existing tdb's comes later. |
4961 | */ |
4962 | |
4963 | - if (!(tdb->flags & TDB_CLEAR_IF_FIRST)) { |
4964 | - TDB_LOG((tdb, TDB_DEBUG_ERROR, "tdb_open_ex: " |
4965 | - "invalid flags for %s - TDB_MUTEX_LOCKING " |
4966 | - "requires TDB_CLEAR_IF_FIRST\n", name)); |
4967 | - errno = EINVAL; |
4968 | - goto fail; |
4969 | - } |
4970 | - |
4971 | if (tdb->flags & TDB_INTERNAL) { |
4972 | TDB_LOG((tdb, TDB_DEBUG_ERROR, "tdb_open_ex: " |
4973 | "invalid flags for %s - TDB_MUTEX_LOCKING and " |
4974 | @@ -634,6 +593,30 @@ _PUBLIC_ struct tdb_context *tdb_open_ex(const char *name, int hash_size, int td |
4975 | * mutex locking. |
4976 | */ |
4977 | tdb->hdr_ofs = header.mutex_size; |
4978 | + |
4979 | + if ((!(tdb_flags & TDB_CLEAR_IF_FIRST)) && (!tdb->read_only)) { |
4980 | + /* |
4981 | + * Open an existing mutexed tdb, but without |
4982 | + * CLEAR_IF_FIRST. We need to initialize the |
4983 | + * mutex array and keep the CLEAR_IF_FIRST |
4984 | + * lock locked. |
4985 | + */ |
4986 | + ret = tdb_nest_lock(tdb, ACTIVE_LOCK, F_WRLCK, |
4987 | + TDB_LOCK_NOWAIT|TDB_LOCK_PROBE); |
4988 | + locked = (ret == 0); |
4989 | + |
4990 | + if (locked) { |
4991 | + ret = tdb_mutex_init(tdb); |
4992 | + if (ret == -1) { |
4993 | + TDB_LOG((tdb, |
4994 | + TDB_DEBUG_FATAL, |
4995 | + "tdb_open_ex: tdb_mutex_init " |
4996 | + "failed for ""%s: %s\n", |
4997 | + name, strerror(errno))); |
4998 | + goto fail; |
4999 | + } |
5000 | + } |
From IRC:
<cjwatson> bzr-git's tdb thing is used by LP
<cjwatson> for git-to-bzr code imports
<cjwatson> this doesn't mean you should block on it, since we maintain our own copies of the modules we need, just FYI on what sorts of things it's used for