Merge ~ahasenack/ubuntu/+source/tdb:disco-tdb-1.3.18 into ubuntu/+source/tdb:ubuntu/devel
- Git
- lp:~ahasenack/ubuntu/+source/tdb
- disco-tdb-1.3.18
- Merge into ubuntu/devel
Status: | Merged | ||||
---|---|---|---|---|---|
Approved by: | Andreas Hasenack | ||||
Approved revision: | cbfc32075322ceab4847ec1d185429090232c13b | ||||
Merged at revision: | cbfc32075322ceab4847ec1d185429090232c13b | ||||
Proposed branch: | ~ahasenack/ubuntu/+source/tdb:disco-tdb-1.3.18 | ||||
Merge into: | ubuntu/+source/tdb:ubuntu/devel | ||||
Diff against target: |
67291 lines (+42355/-1419) 258 files modified
ABI/tdb-1.3.17.sigs (+73/-0) ABI/tdb-1.3.18.sigs (+73/-0) Makefile (+3/-1) _tdb_text.py (+0/-1) buildtools/bin/waf (+111/-21) buildtools/examples/run_on_target.py (+1/-1) buildtools/scripts/abi_gen.sh (+1/-1) buildtools/wafsamba/configure_file.py (+4/-2) buildtools/wafsamba/generic_cc.py (+51/-52) buildtools/wafsamba/pkgconfig.py (+2/-2) buildtools/wafsamba/samba3.py (+4/-5) buildtools/wafsamba/samba_abi.py (+22/-14) buildtools/wafsamba/samba_autoconf.py (+99/-73) buildtools/wafsamba/samba_autoproto.py (+3/-3) buildtools/wafsamba/samba_bundled.py (+11/-11) buildtools/wafsamba/samba_conftests.py (+34/-31) buildtools/wafsamba/samba_cross.py (+14/-13) buildtools/wafsamba/samba_deps.py (+23/-14) buildtools/wafsamba/samba_dist.py (+42/-9) buildtools/wafsamba/samba_git.py (+1/-1) buildtools/wafsamba/samba_headers.py (+7/-6) buildtools/wafsamba/samba_install.py (+6/-5) buildtools/wafsamba/samba_patterns.py (+10/-3) buildtools/wafsamba/samba_perl.py (+8/-5) buildtools/wafsamba/samba_pidl.py (+18/-12) buildtools/wafsamba/samba_python.py (+31/-21) buildtools/wafsamba/samba_third_party.py (+7/-7) buildtools/wafsamba/samba_utils.py (+178/-111) buildtools/wafsamba/samba_version.py (+8/-5) buildtools/wafsamba/samba_waf18.py (+429/-0) buildtools/wafsamba/samba_wildcard.py (+8/-9) buildtools/wafsamba/stale_files.py (+4/-2) buildtools/wafsamba/symbols.py (+19/-17) buildtools/wafsamba/test_duplicate_symbol.sh (+1/-1) buildtools/wafsamba/tests/test_abi.py (+15/-1) buildtools/wafsamba/wafsamba.py (+48/-118) buildtools/wafsamba/wscript (+47/-58) common/dump.c (+10/-1) common/freelist.c (+34/-59) common/io.c (+2/-2) common/lock.c (+2/-2) common/open.c (+48/-55) common/summary.c (+8/-0) common/tdb.c (+170/-105) common/tdb_private.h (+11/-2) common/traverse.c (+110/-6) configure (+1/-1) debian/changelog (+15/-0) debian/libtdb-dev.install (+1/-0) debian/libtdb1.symbols (+4/-0) debian/patches/40_test_transaction_expand_non_fatal.diff (+4/-4) debian/rules (+2/-7) dev/null (+0/-129) include/tdb.h (+68/-1) lib/replace/Makefile (+2/-1) lib/replace/README (+1/-0) lib/replace/configure (+1/-1) lib/replace/getifaddrs.c (+1/-1) lib/replace/replace.c (+109/-0) lib/replace/replace.h (+52/-27) lib/replace/snprintf.c (+72/-72) lib/replace/system/capability.h (+2/-0) lib/replace/system/dir.h (+2/-2) lib/replace/system/filesys.h (+4/-12) lib/replace/system/gssapi.h (+6/-6) lib/replace/system/kerberos.h (+2/-2) lib/replace/system/readline.h (+1/-1) lib/replace/system/threads.h (+27/-0) lib/replace/wscript (+71/-18) pytdb.c (+34/-18) python/tdbdump.py (+1/-1) python/tests/simple.py (+1/-1) test/run-circular-chain.c (+42/-0) test/run-circular-freelist.c (+50/-0) test/run-marklock-deadlock.c (+1/-1) test/run-mutex-openflags2.c (+0/-7) test/run-traverse-chain.c (+94/-0) test/test_tdbbackup.sh (+54/-0) third_party/waf/waflib/Build.py (+1474/-0) third_party/waf/waflib/ConfigSet.py (+361/-0) third_party/waf/waflib/Configure.py (+638/-0) third_party/waf/waflib/Context.py (+737/-0) third_party/waf/waflib/Errors.py (+68/-0) third_party/waf/waflib/Logs.py (+379/-0) third_party/waf/waflib/Node.py (+970/-0) third_party/waf/waflib/Options.py (+342/-0) third_party/waf/waflib/Runner.py (+586/-0) third_party/waf/waflib/Scripting.py (+613/-0) third_party/waf/waflib/Task.py (+1281/-0) third_party/waf/waflib/TaskGen.py (+917/-0) third_party/waf/waflib/Tools/__init__.py (+1/-1) third_party/waf/waflib/Tools/ar.py (+24/-0) third_party/waf/waflib/Tools/asm.py (+73/-0) third_party/waf/waflib/Tools/bison.py (+49/-0) third_party/waf/waflib/Tools/c.py (+39/-0) third_party/waf/waflib/Tools/c_aliases.py (+144/-0) third_party/waf/waflib/Tools/c_config.py (+1352/-0) third_party/waf/waflib/Tools/c_osx.py (+193/-0) third_party/waf/waflib/Tools/c_preproc.py (+1091/-0) third_party/waf/waflib/Tools/c_tests.py (+229/-0) third_party/waf/waflib/Tools/ccroot.py (+775/-0) third_party/waf/waflib/Tools/clang.py (+29/-0) third_party/waf/waflib/Tools/clangxx.py (+30/-0) third_party/waf/waflib/Tools/compiler_c.py (+110/-0) third_party/waf/waflib/Tools/compiler_cxx.py (+111/-0) third_party/waf/waflib/Tools/compiler_d.py (+85/-0) third_party/waf/waflib/Tools/compiler_fc.py (+73/-0) third_party/waf/waflib/Tools/cs.py (+211/-0) third_party/waf/waflib/Tools/cxx.py (+40/-0) third_party/waf/waflib/Tools/d.py (+97/-0) third_party/waf/waflib/Tools/d_config.py (+64/-0) third_party/waf/waflib/Tools/d_scan.py (+211/-0) third_party/waf/waflib/Tools/dbus.py (+70/-0) third_party/waf/waflib/Tools/dmd.py (+80/-0) third_party/waf/waflib/Tools/errcheck.py (+237/-0) third_party/waf/waflib/Tools/fc.py (+187/-0) third_party/waf/waflib/Tools/fc_config.py (+488/-0) third_party/waf/waflib/Tools/fc_scan.py (+114/-0) third_party/waf/waflib/Tools/flex.py (+62/-0) third_party/waf/waflib/Tools/g95.py (+66/-0) third_party/waf/waflib/Tools/gas.py (+18/-0) third_party/waf/waflib/Tools/gcc.py (+156/-0) third_party/waf/waflib/Tools/gdc.py (+55/-0) third_party/waf/waflib/Tools/gfortran.py (+93/-0) third_party/waf/waflib/Tools/glib2.py (+489/-0) third_party/waf/waflib/Tools/gnu_dirs.py (+131/-0) third_party/waf/waflib/Tools/gxx.py (+157/-0) third_party/waf/waflib/Tools/icc.py (+30/-0) third_party/waf/waflib/Tools/icpc.py (+30/-0) third_party/waf/waflib/Tools/ifort.py (+413/-0) third_party/waf/waflib/Tools/intltool.py (+231/-0) third_party/waf/waflib/Tools/irixcc.py (+66/-0) third_party/waf/waflib/Tools/javaw.py (+464/-0) third_party/waf/waflib/Tools/ldc2.py (+56/-0) third_party/waf/waflib/Tools/lua.py (+38/-0) third_party/waf/waflib/Tools/md5_tstamp.py (+39/-0) third_party/waf/waflib/Tools/msvc.py (+1020/-0) third_party/waf/waflib/Tools/nasm.py (+26/-0) third_party/waf/waflib/Tools/nobuild.py (+24/-0) third_party/waf/waflib/Tools/perl.py (+156/-0) third_party/waf/waflib/Tools/python.py (+627/-0) third_party/waf/waflib/Tools/qt5.py (+796/-0) third_party/waf/waflib/Tools/ruby.py (+186/-0) third_party/waf/waflib/Tools/suncc.py (+67/-0) third_party/waf/waflib/Tools/suncxx.py (+67/-0) third_party/waf/waflib/Tools/tex.py (+543/-0) third_party/waf/waflib/Tools/vala.py (+355/-0) third_party/waf/waflib/Tools/waf_unit_test.py (+296/-0) third_party/waf/waflib/Tools/winres.py (+78/-0) third_party/waf/waflib/Tools/xlc.py (+65/-0) third_party/waf/waflib/Tools/xlcxx.py (+65/-0) third_party/waf/waflib/Utils.py (+1021/-0) third_party/waf/waflib/__init__.py (+1/-1) third_party/waf/waflib/ansiterm.py (+342/-0) third_party/waf/waflib/extras/__init__.py (+3/-0) third_party/waf/waflib/extras/batched_cc.py (+173/-0) third_party/waf/waflib/extras/biber.py (+58/-0) third_party/waf/waflib/extras/bjam.py (+128/-0) third_party/waf/waflib/extras/blender.py (+108/-0) third_party/waf/waflib/extras/boo.py (+81/-0) third_party/waf/waflib/extras/boost.py (+525/-0) third_party/waf/waflib/extras/build_file_tracker.py (+28/-0) third_party/waf/waflib/extras/build_logs.py (+110/-0) third_party/waf/waflib/extras/buildcopy.py (+82/-0) third_party/waf/waflib/extras/c_bgxlc.py (+32/-0) third_party/waf/waflib/extras/c_dumbpreproc.py (+72/-0) third_party/waf/waflib/extras/c_emscripten.py (+87/-0) third_party/waf/waflib/extras/c_nec.py (+74/-0) third_party/waf/waflib/extras/cabal.py (+152/-0) third_party/waf/waflib/extras/cfg_altoptions.py (+110/-0) third_party/waf/waflib/extras/clang_compilation_database.py (+85/-0) third_party/waf/waflib/extras/codelite.py (+875/-0) third_party/waf/waflib/extras/color_gcc.py (+39/-0) third_party/waf/waflib/extras/color_rvct.py (+51/-0) third_party/waf/waflib/extras/compat15.py (+406/-0) third_party/waf/waflib/extras/cppcheck.py (+585/-0) third_party/waf/waflib/extras/cpplint.py (+222/-0) third_party/waf/waflib/extras/cross_gnu.py (+227/-0) third_party/waf/waflib/extras/cython.py (+146/-0) third_party/waf/waflib/extras/dcc.py (+72/-0) third_party/waf/waflib/extras/distnet.py (+430/-0) third_party/waf/waflib/extras/doxygen.py (+227/-0) third_party/waf/waflib/extras/dpapi.py (+87/-0) third_party/waf/waflib/extras/eclipse.py (+431/-0) third_party/waf/waflib/extras/erlang.py (+110/-0) third_party/waf/waflib/extras/fast_partial.py (+518/-0) third_party/waf/waflib/extras/fc_bgxlf.py (+32/-0) third_party/waf/waflib/extras/fc_cray.py (+51/-0) third_party/waf/waflib/extras/fc_nag.py (+61/-0) third_party/waf/waflib/extras/fc_nec.py (+60/-0) third_party/waf/waflib/extras/fc_open64.py (+58/-0) third_party/waf/waflib/extras/fc_pgfortran.py (+68/-0) third_party/waf/waflib/extras/fc_solstudio.py (+62/-0) third_party/waf/waflib/extras/fc_xlf.py (+63/-0) third_party/waf/waflib/extras/file_to_object.py (+137/-0) third_party/waf/waflib/extras/fluid.py (+13/-9) third_party/waf/waflib/extras/freeimage.py (+74/-0) third_party/waf/waflib/extras/fsb.py (+31/-0) third_party/waf/waflib/extras/fsc.py (+64/-0) third_party/waf/waflib/extras/gccdeps.py (+214/-0) third_party/waf/waflib/extras/gdbus.py (+87/-0) third_party/waf/waflib/extras/gob2.py (+5/-5) third_party/waf/waflib/extras/halide.py (+151/-0) third_party/waf/waflib/extras/javatest.py (+118/-0) third_party/waf/waflib/extras/kde4.py (+93/-0) third_party/waf/waflib/extras/local_rpath.py (+19/-0) third_party/waf/waflib/extras/make.py (+142/-0) third_party/waf/waflib/extras/midl.py (+69/-0) third_party/waf/waflib/extras/msvcdeps.py (+256/-0) third_party/waf/waflib/extras/msvs.py (+1048/-0) third_party/waf/waflib/extras/netcache_client.py (+390/-0) third_party/waf/waflib/extras/objcopy.py (+50/-0) third_party/waf/waflib/extras/ocaml.py (+142/-91) third_party/waf/waflib/extras/package.py (+76/-0) third_party/waf/waflib/extras/parallel_debug.py (+459/-0) third_party/waf/waflib/extras/pch.py (+148/-0) third_party/waf/waflib/extras/pep8.py (+106/-0) third_party/waf/waflib/extras/pgicc.py (+75/-0) third_party/waf/waflib/extras/pgicxx.py (+20/-0) third_party/waf/waflib/extras/proc.py (+54/-0) third_party/waf/waflib/extras/protoc.py (+243/-0) third_party/waf/waflib/extras/pyqt5.py (+241/-0) third_party/waf/waflib/extras/pytest.py (+225/-0) third_party/waf/waflib/extras/qnxnto.py (+72/-0) third_party/waf/waflib/extras/qt4.py (+695/-0) third_party/waf/waflib/extras/relocation.py (+85/-0) third_party/waf/waflib/extras/remote.py (+327/-0) third_party/waf/waflib/extras/resx.py (+35/-0) third_party/waf/waflib/extras/review.py (+325/-0) third_party/waf/waflib/extras/rst.py (+260/-0) third_party/waf/waflib/extras/run_do_script.py (+139/-0) third_party/waf/waflib/extras/run_m_script.py (+88/-0) third_party/waf/waflib/extras/run_py_script.py (+104/-0) third_party/waf/waflib/extras/run_r_script.py (+86/-0) third_party/waf/waflib/extras/sas.py (+71/-0) third_party/waf/waflib/extras/satellite_assembly.py (+57/-0) third_party/waf/waflib/extras/scala.py (+128/-0) third_party/waf/waflib/extras/slow_qt4.py (+96/-0) third_party/waf/waflib/extras/softlink_libs.py (+76/-0) third_party/waf/waflib/extras/stale.py (+98/-0) third_party/waf/waflib/extras/stracedeps.py (+174/-0) third_party/waf/waflib/extras/swig.py (+237/-0) third_party/waf/waflib/extras/syms.py (+84/-0) third_party/waf/waflib/extras/ticgt.py (+300/-0) third_party/waf/waflib/extras/unity.py (+108/-0) third_party/waf/waflib/extras/use_config.py (+185/-0) third_party/waf/waflib/extras/valadoc.py (+127/-99) third_party/waf/waflib/extras/waf_xattr.py (+150/-0) third_party/waf/waflib/extras/why.py (+78/-0) third_party/waf/waflib/extras/win32_opts.py (+170/-0) third_party/waf/waflib/extras/wix.py (+87/-0) third_party/waf/waflib/extras/xcode6.py (+727/-0) third_party/waf/waflib/fixpy2.py (+64/-0) third_party/waf/waflib/processor.py (+64/-0) tools/tdbbackup.c (+28/-7) tools/tdbdump.c (+3/-1) tools/tdbtorture.c (+17/-6) wscript (+33/-18) |
||||
Related bugs: |
|
Reviewer | Review Type | Date Requested | Status |
---|---|---|---|
Christian Ehrhardt (community) | Approve | ||
Canonical Server | Pending | ||
Review via email: mp+364086@code.launchpad.net |
Commit message
Description of the change
PPA with builds: ppa:ahasenack/
https:/
Bileto ticket:
https:/
It was green, but recent uploads removed the dep8 test history. I have to copy the packages over again.
This is part of the effort to remove python2 from the desktop iso.
That requires samba to be updated to 4.10 (bug #1818518), and that update requires a new tdb, which is that this branch is about.
We are going ahead of Debian.
Upstream switched from python 2 to using python 3 by default, so that required a few changes to the ./configure call in d/rules, and more cleanups in dh_clean.
I don't know why debian is skipping generating a symbols file for the python{,3}-tdb packages (the -N option):
override_
I also don't know why debian is manually building a static library in override_
I didn't clean up the commit tree, because there has been some back and forth here about dropping or not the py2 packages entirely. Foundations wanted the python-* (py2) packages entirely dropped, but at least python-tdb is still needed by bzr-git (Recommends). I'll keep building it, but we might be asked to drop it and somehow fix bzr-git. Anyway, I think that can be done later if needed.
Andreas Hasenack (ahasenack) wrote : | # |
Christian Ehrhardt (paelzer) wrote : | # |
probably bikeshedding, but all the changes in the changelog are "due to" the New upstream version.
I'd therefore have expected to find them indented as
* New upstream version: 1.3.18 (LP: #1818520)
- d/p/40_
fuzz
- ...
up to you
Andreas Hasenack (ahasenack) wrote : | # |
I can improve that
Christian Ehrhardt (paelzer) wrote : | # |
waf cleanup is fine, but given how much you list don't you want to convert it to just
$ find ... -name .pyc -delete
or something like that?
Considering that you also clear directories maybe:
$ find -name ".pyc" -o -name "__pycache__" -exec rm {} \;
Christian Ehrhardt (paelzer) wrote : | # |
Other than the nit picks mentioned the commits LGTM, I'll go check the build log now ...
Christian Ehrhardt (paelzer) wrote : | # |
I wondered if [1] would cause trouble for the py2 packages, but after reviewing what ends up in those packages I think you are good.
[1]: https:/
Christian Ehrhardt (paelzer) wrote : | # |
I'm done parsing the upstream changes towards 1.3.18 and the build log not much showing up there that needs work.
The one thing I wondered is (probably unimportant):
dh_gencontrol
dpkg-gencontrol: warning: package python3-tdb: substitution variable ${python3:Versions} unused, but is defined
dpkg-gencontrol: warning: package python3-tdb: substitution variable ${python3:Versions} unused, but is defined
dpkg-gencontrol: warning: package python-tdb: substitution variable ${python:Versions} unused, but is defined
dpkg-gencontrol: warning: package python-tdb: substitution variable ${python:Versions} unused, but is defined
The field [1] is optional after all.
An entry would look loke that on the py3 packages (py2 isn't needed since py2.7 is the only one)
XS-Python-Version: ${python3:Versions}
OTOH it might future transitions easier (or harder??)
See also man dh_python3 for that.
Since I have not seen you iterating over python3 versions in d/rules it might be safer to explicitly define this to be for the current py3.7.
I have seen builds crash in -devel in times there were two py3 (e.g. 3.6&3.7) in -release, and as far as i understand the flag here it might help with that.
And adding those lines should be easier than modifying more of d/rules to build potentially multiply pytohn modules iterating over `py3versions -vr`
[1]: https:/
Christian Ehrhardt (paelzer) wrote : | # |
All that said, nothing I found is critical - the changes LGTM.
Do you want to change the MP state to ack now or only after a re-review once you are finished?
(I must admit I squashed the commits for reviewability)
Andreas Hasenack (ahasenack) wrote : | # |
I'll look at the python issues you pointed at, and maybe use that for the other packages too. Let's leave the MP state as is for now.
Andreas Hasenack (ahasenack) wrote : | # |
Even when using X-Python3-Version: ${python3:Versions} in d/control, I still get the warning, specifically in the binary packages. I'll remove it for now, pending a better understanding of how this works.
Andreas Hasenack (ahasenack) wrote : | # |
- changelog updated
- I also squashed the revert commit I had
tdb will build py2 and py3 packages. py2 because of bzr-git, even though the FFe bug said it would have been ok to drop python-tdb (py2). We can still do that later.
I'll move on to the other MPs now, get all consistent.
Andreas Hasenack (ahasenack) wrote : | # |
Tagged and uploaded:
$ git push pkg upload/
Enumerating objects: 395, done.
Counting objects: 100% (395/395), done.
Delta compression using up to 4 threads
Compressing objects: 100% (208/208), done.
Writing objects: 100% (301/301), 438.24 KiB | 245.00 KiB/s, done.
Total 301 (delta 101), reused 277 (delta 86)
To ssh://git.
* [new tag] upload/
$ dput ubuntu ../tdb_
Checking signature on .changes
gpg: ../tdb_
Checking signature on .dsc
gpg: ../tdb_
Uploading to ubuntu (via ftp to upload.ubuntu.com):
Uploading tdb_1.3.
Uploading tdb_1.3.
Uploading tdb_1.3.
Uploading tdb_1.3.
Successfully uploaded packages.
Andreas Hasenack (ahasenack) wrote : | # |
Now uploading with the orig tarball included:
$ dput -f ubuntu ../tdb_
Checking signature on .changes
gpg: ../tdb_
Checking signature on .dsc
gpg: ../tdb_
Uploading to ubuntu (via ftp to upload.ubuntu.com):
Uploading tdb_1.3.
Uploading tdb_1.3.
Uploading tdb_1.3.
Uploading tdb_1.3.
Uploading tdb_1.3.
Successfully uploaded packages.
Preview Diff
1 | diff --git a/ABI/tdb-1.3.17.sigs b/ABI/tdb-1.3.17.sigs | |||
2 | 0 | new file mode 100644 | 0 | new file mode 100644 |
3 | index 0000000..e2b0427 | |||
4 | --- /dev/null | |||
5 | +++ b/ABI/tdb-1.3.17.sigs | |||
6 | @@ -0,0 +1,73 @@ | |||
7 | 1 | tdb_add_flags: void (struct tdb_context *, unsigned int) | ||
8 | 2 | tdb_append: int (struct tdb_context *, TDB_DATA, TDB_DATA) | ||
9 | 3 | tdb_chainlock: int (struct tdb_context *, TDB_DATA) | ||
10 | 4 | tdb_chainlock_mark: int (struct tdb_context *, TDB_DATA) | ||
11 | 5 | tdb_chainlock_nonblock: int (struct tdb_context *, TDB_DATA) | ||
12 | 6 | tdb_chainlock_read: int (struct tdb_context *, TDB_DATA) | ||
13 | 7 | tdb_chainlock_read_nonblock: int (struct tdb_context *, TDB_DATA) | ||
14 | 8 | tdb_chainlock_unmark: int (struct tdb_context *, TDB_DATA) | ||
15 | 9 | tdb_chainunlock: int (struct tdb_context *, TDB_DATA) | ||
16 | 10 | tdb_chainunlock_read: int (struct tdb_context *, TDB_DATA) | ||
17 | 11 | tdb_check: int (struct tdb_context *, int (*)(TDB_DATA, TDB_DATA, void *), void *) | ||
18 | 12 | tdb_close: int (struct tdb_context *) | ||
19 | 13 | tdb_delete: int (struct tdb_context *, TDB_DATA) | ||
20 | 14 | tdb_dump_all: void (struct tdb_context *) | ||
21 | 15 | tdb_enable_seqnum: void (struct tdb_context *) | ||
22 | 16 | tdb_error: enum TDB_ERROR (struct tdb_context *) | ||
23 | 17 | tdb_errorstr: const char *(struct tdb_context *) | ||
24 | 18 | tdb_exists: int (struct tdb_context *, TDB_DATA) | ||
25 | 19 | tdb_fd: int (struct tdb_context *) | ||
26 | 20 | tdb_fetch: TDB_DATA (struct tdb_context *, TDB_DATA) | ||
27 | 21 | tdb_firstkey: TDB_DATA (struct tdb_context *) | ||
28 | 22 | tdb_freelist_size: int (struct tdb_context *) | ||
29 | 23 | tdb_get_flags: int (struct tdb_context *) | ||
30 | 24 | tdb_get_logging_private: void *(struct tdb_context *) | ||
31 | 25 | tdb_get_seqnum: int (struct tdb_context *) | ||
32 | 26 | tdb_hash_size: int (struct tdb_context *) | ||
33 | 27 | tdb_increment_seqnum_nonblock: void (struct tdb_context *) | ||
34 | 28 | tdb_jenkins_hash: unsigned int (TDB_DATA *) | ||
35 | 29 | tdb_lock_nonblock: int (struct tdb_context *, int, int) | ||
36 | 30 | tdb_lockall: int (struct tdb_context *) | ||
37 | 31 | tdb_lockall_mark: int (struct tdb_context *) | ||
38 | 32 | tdb_lockall_nonblock: int (struct tdb_context *) | ||
39 | 33 | tdb_lockall_read: int (struct tdb_context *) | ||
40 | 34 | tdb_lockall_read_nonblock: int (struct tdb_context *) | ||
41 | 35 | tdb_lockall_unmark: int (struct tdb_context *) | ||
42 | 36 | tdb_log_fn: tdb_log_func (struct tdb_context *) | ||
43 | 37 | tdb_map_size: size_t (struct tdb_context *) | ||
44 | 38 | tdb_name: const char *(struct tdb_context *) | ||
45 | 39 | tdb_nextkey: TDB_DATA (struct tdb_context *, TDB_DATA) | ||
46 | 40 | tdb_null: dptr = 0xXXXX, dsize = 0 | ||
47 | 41 | tdb_open: struct tdb_context *(const char *, int, int, int, mode_t) | ||
48 | 42 | tdb_open_ex: struct tdb_context *(const char *, int, int, int, mode_t, const struct tdb_logging_context *, tdb_hash_func) | ||
49 | 43 | tdb_parse_record: int (struct tdb_context *, TDB_DATA, int (*)(TDB_DATA, TDB_DATA, void *), void *) | ||
50 | 44 | tdb_printfreelist: int (struct tdb_context *) | ||
51 | 45 | tdb_remove_flags: void (struct tdb_context *, unsigned int) | ||
52 | 46 | tdb_reopen: int (struct tdb_context *) | ||
53 | 47 | tdb_reopen_all: int (int) | ||
54 | 48 | tdb_repack: int (struct tdb_context *) | ||
55 | 49 | tdb_rescue: int (struct tdb_context *, void (*)(TDB_DATA, TDB_DATA, void *), void *) | ||
56 | 50 | tdb_runtime_check_for_robust_mutexes: bool (void) | ||
57 | 51 | tdb_set_logging_function: void (struct tdb_context *, const struct tdb_logging_context *) | ||
58 | 52 | tdb_set_max_dead: void (struct tdb_context *, int) | ||
59 | 53 | tdb_setalarm_sigptr: void (struct tdb_context *, volatile sig_atomic_t *) | ||
60 | 54 | tdb_store: int (struct tdb_context *, TDB_DATA, TDB_DATA, int) | ||
61 | 55 | tdb_storev: int (struct tdb_context *, TDB_DATA, const TDB_DATA *, int, int) | ||
62 | 56 | tdb_summary: char *(struct tdb_context *) | ||
63 | 57 | tdb_transaction_active: bool (struct tdb_context *) | ||
64 | 58 | tdb_transaction_cancel: int (struct tdb_context *) | ||
65 | 59 | tdb_transaction_commit: int (struct tdb_context *) | ||
66 | 60 | tdb_transaction_prepare_commit: int (struct tdb_context *) | ||
67 | 61 | tdb_transaction_start: int (struct tdb_context *) | ||
68 | 62 | tdb_transaction_start_nonblock: int (struct tdb_context *) | ||
69 | 63 | tdb_transaction_write_lock_mark: int (struct tdb_context *) | ||
70 | 64 | tdb_transaction_write_lock_unmark: int (struct tdb_context *) | ||
71 | 65 | tdb_traverse: int (struct tdb_context *, tdb_traverse_func, void *) | ||
72 | 66 | tdb_traverse_chain: int (struct tdb_context *, unsigned int, tdb_traverse_func, void *) | ||
73 | 67 | tdb_traverse_key_chain: int (struct tdb_context *, TDB_DATA, tdb_traverse_func, void *) | ||
74 | 68 | tdb_traverse_read: int (struct tdb_context *, tdb_traverse_func, void *) | ||
75 | 69 | tdb_unlock: int (struct tdb_context *, int, int) | ||
76 | 70 | tdb_unlockall: int (struct tdb_context *) | ||
77 | 71 | tdb_unlockall_read: int (struct tdb_context *) | ||
78 | 72 | tdb_validate_freelist: int (struct tdb_context *, int *) | ||
79 | 73 | tdb_wipe_all: int (struct tdb_context *) | ||
80 | diff --git a/ABI/tdb-1.3.18.sigs b/ABI/tdb-1.3.18.sigs | |||
81 | 0 | new file mode 100644 | 74 | new file mode 100644 |
82 | index 0000000..e2b0427 | |||
83 | --- /dev/null | |||
84 | +++ b/ABI/tdb-1.3.18.sigs | |||
85 | @@ -0,0 +1,73 @@ | |||
86 | 1 | tdb_add_flags: void (struct tdb_context *, unsigned int) | ||
87 | 2 | tdb_append: int (struct tdb_context *, TDB_DATA, TDB_DATA) | ||
88 | 3 | tdb_chainlock: int (struct tdb_context *, TDB_DATA) | ||
89 | 4 | tdb_chainlock_mark: int (struct tdb_context *, TDB_DATA) | ||
90 | 5 | tdb_chainlock_nonblock: int (struct tdb_context *, TDB_DATA) | ||
91 | 6 | tdb_chainlock_read: int (struct tdb_context *, TDB_DATA) | ||
92 | 7 | tdb_chainlock_read_nonblock: int (struct tdb_context *, TDB_DATA) | ||
93 | 8 | tdb_chainlock_unmark: int (struct tdb_context *, TDB_DATA) | ||
94 | 9 | tdb_chainunlock: int (struct tdb_context *, TDB_DATA) | ||
95 | 10 | tdb_chainunlock_read: int (struct tdb_context *, TDB_DATA) | ||
96 | 11 | tdb_check: int (struct tdb_context *, int (*)(TDB_DATA, TDB_DATA, void *), void *) | ||
97 | 12 | tdb_close: int (struct tdb_context *) | ||
98 | 13 | tdb_delete: int (struct tdb_context *, TDB_DATA) | ||
99 | 14 | tdb_dump_all: void (struct tdb_context *) | ||
100 | 15 | tdb_enable_seqnum: void (struct tdb_context *) | ||
101 | 16 | tdb_error: enum TDB_ERROR (struct tdb_context *) | ||
102 | 17 | tdb_errorstr: const char *(struct tdb_context *) | ||
103 | 18 | tdb_exists: int (struct tdb_context *, TDB_DATA) | ||
104 | 19 | tdb_fd: int (struct tdb_context *) | ||
105 | 20 | tdb_fetch: TDB_DATA (struct tdb_context *, TDB_DATA) | ||
106 | 21 | tdb_firstkey: TDB_DATA (struct tdb_context *) | ||
107 | 22 | tdb_freelist_size: int (struct tdb_context *) | ||
108 | 23 | tdb_get_flags: int (struct tdb_context *) | ||
109 | 24 | tdb_get_logging_private: void *(struct tdb_context *) | ||
110 | 25 | tdb_get_seqnum: int (struct tdb_context *) | ||
111 | 26 | tdb_hash_size: int (struct tdb_context *) | ||
112 | 27 | tdb_increment_seqnum_nonblock: void (struct tdb_context *) | ||
113 | 28 | tdb_jenkins_hash: unsigned int (TDB_DATA *) | ||
114 | 29 | tdb_lock_nonblock: int (struct tdb_context *, int, int) | ||
115 | 30 | tdb_lockall: int (struct tdb_context *) | ||
116 | 31 | tdb_lockall_mark: int (struct tdb_context *) | ||
117 | 32 | tdb_lockall_nonblock: int (struct tdb_context *) | ||
118 | 33 | tdb_lockall_read: int (struct tdb_context *) | ||
119 | 34 | tdb_lockall_read_nonblock: int (struct tdb_context *) | ||
120 | 35 | tdb_lockall_unmark: int (struct tdb_context *) | ||
121 | 36 | tdb_log_fn: tdb_log_func (struct tdb_context *) | ||
122 | 37 | tdb_map_size: size_t (struct tdb_context *) | ||
123 | 38 | tdb_name: const char *(struct tdb_context *) | ||
124 | 39 | tdb_nextkey: TDB_DATA (struct tdb_context *, TDB_DATA) | ||
125 | 40 | tdb_null: dptr = 0xXXXX, dsize = 0 | ||
126 | 41 | tdb_open: struct tdb_context *(const char *, int, int, int, mode_t) | ||
127 | 42 | tdb_open_ex: struct tdb_context *(const char *, int, int, int, mode_t, const struct tdb_logging_context *, tdb_hash_func) | ||
128 | 43 | tdb_parse_record: int (struct tdb_context *, TDB_DATA, int (*)(TDB_DATA, TDB_DATA, void *), void *) | ||
129 | 44 | tdb_printfreelist: int (struct tdb_context *) | ||
130 | 45 | tdb_remove_flags: void (struct tdb_context *, unsigned int) | ||
131 | 46 | tdb_reopen: int (struct tdb_context *) | ||
132 | 47 | tdb_reopen_all: int (int) | ||
133 | 48 | tdb_repack: int (struct tdb_context *) | ||
134 | 49 | tdb_rescue: int (struct tdb_context *, void (*)(TDB_DATA, TDB_DATA, void *), void *) | ||
135 | 50 | tdb_runtime_check_for_robust_mutexes: bool (void) | ||
136 | 51 | tdb_set_logging_function: void (struct tdb_context *, const struct tdb_logging_context *) | ||
137 | 52 | tdb_set_max_dead: void (struct tdb_context *, int) | ||
138 | 53 | tdb_setalarm_sigptr: void (struct tdb_context *, volatile sig_atomic_t *) | ||
139 | 54 | tdb_store: int (struct tdb_context *, TDB_DATA, TDB_DATA, int) | ||
140 | 55 | tdb_storev: int (struct tdb_context *, TDB_DATA, const TDB_DATA *, int, int) | ||
141 | 56 | tdb_summary: char *(struct tdb_context *) | ||
142 | 57 | tdb_transaction_active: bool (struct tdb_context *) | ||
143 | 58 | tdb_transaction_cancel: int (struct tdb_context *) | ||
144 | 59 | tdb_transaction_commit: int (struct tdb_context *) | ||
145 | 60 | tdb_transaction_prepare_commit: int (struct tdb_context *) | ||
146 | 61 | tdb_transaction_start: int (struct tdb_context *) | ||
147 | 62 | tdb_transaction_start_nonblock: int (struct tdb_context *) | ||
148 | 63 | tdb_transaction_write_lock_mark: int (struct tdb_context *) | ||
149 | 64 | tdb_transaction_write_lock_unmark: int (struct tdb_context *) | ||
150 | 65 | tdb_traverse: int (struct tdb_context *, tdb_traverse_func, void *) | ||
151 | 66 | tdb_traverse_chain: int (struct tdb_context *, unsigned int, tdb_traverse_func, void *) | ||
152 | 67 | tdb_traverse_key_chain: int (struct tdb_context *, TDB_DATA, tdb_traverse_func, void *) | ||
153 | 68 | tdb_traverse_read: int (struct tdb_context *, tdb_traverse_func, void *) | ||
154 | 69 | tdb_unlock: int (struct tdb_context *, int, int) | ||
155 | 70 | tdb_unlockall: int (struct tdb_context *) | ||
156 | 71 | tdb_unlockall_read: int (struct tdb_context *) | ||
157 | 72 | tdb_validate_freelist: int (struct tdb_context *, int *) | ||
158 | 73 | tdb_wipe_all: int (struct tdb_context *) | ||
159 | diff --git a/Makefile b/Makefile | |||
160 | index fe44ff6..8fd56c8 100644 | |||
161 | --- a/Makefile | |||
162 | +++ b/Makefile | |||
163 | @@ -1,6 +1,8 @@ | |||
164 | 1 | # simple makefile wrapper to run waf | 1 | # simple makefile wrapper to run waf |
165 | 2 | 2 | ||
167 | 3 | WAF=WAF_MAKE=1 PATH=buildtools/bin:../../buildtools/bin:$$PATH waf | 3 | WAF_BIN=`PATH=buildtools/bin:../../buildtools/bin:$$PATH which waf` |
168 | 4 | WAF_BINARY=$(PYTHON) $(WAF_BIN) | ||
169 | 5 | WAF=PYTHONHASHSEED=1 WAF_MAKE=1 $(WAF_BINARY) | ||
170 | 4 | 6 | ||
171 | 5 | all: | 7 | all: |
172 | 6 | $(WAF) build | 8 | $(WAF) build |
173 | diff --git a/_tdb_text.py b/_tdb_text.py | |||
174 | index c823bf8..f3caa53 100644 | |||
175 | --- a/_tdb_text.py | |||
176 | +++ b/_tdb_text.py | |||
177 | @@ -4,7 +4,6 @@ | |||
178 | 4 | # Published under the GNU LGPLv3 or later | 4 | # Published under the GNU LGPLv3 or later |
179 | 5 | 5 | ||
180 | 6 | import sys | 6 | import sys |
181 | 7 | import functools | ||
182 | 8 | 7 | ||
183 | 9 | import tdb | 8 | import tdb |
184 | 10 | 9 | ||
185 | diff --git a/buildtools/bin/waf b/buildtools/bin/waf | |||
186 | index 1b0f466..3ee4d5b 100755 | |||
187 | --- a/buildtools/bin/waf | |||
188 | +++ b/buildtools/bin/waf | |||
189 | @@ -1,7 +1,7 @@ | |||
194 | 1 | #!/usr/bin/env python | 1 | #!/usr/bin/env python3 |
195 | 2 | # encoding: ISO-8859-1 | 2 | # encoding: latin-1 |
196 | 3 | # Thomas Nagy, 2005-2010 | 3 | # Thomas Nagy, 2005-2018 |
197 | 4 | 4 | # | |
198 | 5 | """ | 5 | """ |
199 | 6 | Redistribution and use in source and binary forms, with or without | 6 | Redistribution and use in source and binary forms, with or without |
200 | 7 | modification, are permitted provided that the following conditions | 7 | modification, are permitted provided that the following conditions |
201 | @@ -30,25 +30,24 @@ IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE | |||
202 | 30 | POSSIBILITY OF SUCH DAMAGE. | 30 | POSSIBILITY OF SUCH DAMAGE. |
203 | 31 | """ | 31 | """ |
204 | 32 | 32 | ||
211 | 33 | import os, sys | 33 | import os, sys, inspect |
206 | 34 | if sys.hexversion<0x203000f: raise ImportError("Waf requires Python >= 2.3") | ||
207 | 35 | |||
208 | 36 | if 'PSYCOWAF' in os.environ: | ||
209 | 37 | try:import psyco;psyco.full() | ||
210 | 38 | except:pass | ||
212 | 39 | 34 | ||
214 | 40 | VERSION="1.5.19" | 35 | VERSION="2.0.8" |
215 | 41 | REVISION="x" | 36 | REVISION="x" |
216 | 37 | GIT="x" | ||
217 | 42 | INSTALL="x" | 38 | INSTALL="x" |
218 | 43 | C1='x' | 39 | C1='x' |
219 | 44 | C2='x' | 40 | C2='x' |
220 | 41 | C3='x' | ||
221 | 45 | cwd = os.getcwd() | 42 | cwd = os.getcwd() |
222 | 46 | join = os.path.join | 43 | join = os.path.join |
223 | 47 | 44 | ||
224 | 45 | if sys.hexversion<0x206000f: | ||
225 | 46 | raise ImportError('Python >= 2.6 is required to create the waf file') | ||
226 | 47 | |||
227 | 48 | WAF='waf' | 48 | WAF='waf' |
228 | 49 | def b(x): | 49 | def b(x): |
229 | 50 | return x | 50 | return x |
230 | 51 | |||
231 | 52 | if sys.hexversion>0x300000f: | 51 | if sys.hexversion>0x300000f: |
232 | 53 | WAF='waf3' | 52 | WAF='waf3' |
233 | 54 | def b(x): | 53 | def b(x): |
234 | @@ -58,20 +57,111 @@ def err(m): | |||
235 | 58 | print(('\033[91mError: %s\033[0m' % m)) | 57 | print(('\033[91mError: %s\033[0m' % m)) |
236 | 59 | sys.exit(1) | 58 | sys.exit(1) |
237 | 60 | 59 | ||
240 | 61 | def test(dir): | 60 | def unpack_wafdir(dir, src): |
241 | 62 | try: os.stat(join(dir, 'wafadmin')); return os.path.abspath(dir) | 61 | f = open(src,'rb') |
242 | 62 | c = 'corrupt archive (%d)' | ||
243 | 63 | while 1: | ||
244 | 64 | line = f.readline() | ||
245 | 65 | if not line: err('run waf-light from a folder containing waflib') | ||
246 | 66 | if line == b('#==>\n'): | ||
247 | 67 | txt = f.readline() | ||
248 | 68 | if not txt: err(c % 1) | ||
249 | 69 | if f.readline() != b('#<==\n'): err(c % 2) | ||
250 | 70 | break | ||
251 | 71 | if not txt: err(c % 3) | ||
252 | 72 | txt = txt[1:-1].replace(b(C1), b('\n')).replace(b(C2), b('\r')).replace(b(C3), b('\x00')) | ||
253 | 73 | |||
254 | 74 | import shutil, tarfile | ||
255 | 75 | try: shutil.rmtree(dir) | ||
256 | 63 | except OSError: pass | 76 | except OSError: pass |
257 | 77 | try: | ||
258 | 78 | for x in ('Tools', 'extras'): | ||
259 | 79 | os.makedirs(join(dir, 'waflib', x)) | ||
260 | 80 | except OSError: | ||
261 | 81 | err("Cannot unpack waf lib into %s\nMove waf in a writable directory" % dir) | ||
262 | 82 | |||
263 | 83 | os.chdir(dir) | ||
264 | 84 | tmp = 't.bz2' | ||
265 | 85 | t = open(tmp,'wb') | ||
266 | 86 | try: t.write(txt) | ||
267 | 87 | finally: t.close() | ||
268 | 88 | |||
269 | 89 | try: | ||
270 | 90 | t = tarfile.open(tmp) | ||
271 | 91 | except: | ||
272 | 92 | try: | ||
273 | 93 | os.system('bunzip2 t.bz2') | ||
274 | 94 | t = tarfile.open('t') | ||
275 | 95 | tmp = 't' | ||
276 | 96 | except: | ||
277 | 97 | os.chdir(cwd) | ||
278 | 98 | try: shutil.rmtree(dir) | ||
279 | 99 | except OSError: pass | ||
280 | 100 | err("Waf cannot be unpacked, check that bzip2 support is present") | ||
281 | 101 | |||
282 | 102 | try: | ||
283 | 103 | for x in t: t.extract(x) | ||
284 | 104 | finally: | ||
285 | 105 | t.close() | ||
286 | 106 | |||
287 | 107 | for x in ('Tools', 'extras'): | ||
288 | 108 | os.chmod(join('waflib',x), 493) | ||
289 | 109 | |||
290 | 110 | if sys.hexversion<0x300000f: | ||
291 | 111 | sys.path = [join(dir, 'waflib')] + sys.path | ||
292 | 112 | import fixpy2 | ||
293 | 113 | fixpy2.fixdir(dir) | ||
294 | 114 | |||
295 | 115 | os.remove(tmp) | ||
296 | 116 | os.chdir(cwd) | ||
297 | 117 | |||
298 | 118 | try: dir = unicode(dir, 'mbcs') | ||
299 | 119 | except: pass | ||
300 | 120 | try: | ||
301 | 121 | from ctypes import windll | ||
302 | 122 | windll.kernel32.SetFileAttributesW(dir, 2) | ||
303 | 123 | except: | ||
304 | 124 | pass | ||
305 | 125 | |||
306 | 126 | def test(dir): | ||
307 | 127 | try: | ||
308 | 128 | os.stat(join(dir, 'waflib')) | ||
309 | 129 | return os.path.abspath(dir) | ||
310 | 130 | except OSError: | ||
311 | 131 | pass | ||
312 | 64 | 132 | ||
313 | 65 | def find_lib(): | 133 | def find_lib(): |
315 | 66 | return os.path.abspath(os.path.join(os.path.dirname(__file__), '../../third_party/waf')) | 134 | path = '../../third_party/waf' |
316 | 135 | paths = [path, path+'/waflib'] | ||
317 | 136 | return [os.path.abspath(os.path.join(os.path.dirname(__file__), x)) for x in paths] | ||
318 | 67 | 137 | ||
319 | 68 | wafdir = find_lib() | 138 | wafdir = find_lib() |
324 | 69 | w = join(wafdir, 'wafadmin') | 139 | for p in wafdir: |
325 | 70 | t = join(w, 'Tools') | 140 | sys.path.insert(0, p) |
322 | 71 | f = join(w, '3rdparty') | ||
323 | 72 | sys.path = [w, t, f] + sys.path | ||
326 | 73 | 141 | ||
327 | 74 | if __name__ == '__main__': | 142 | if __name__ == '__main__': |
330 | 75 | import Scripting | 143 | #import extras.compat15#PRELUDE |
331 | 76 | Scripting.prepare(t, cwd, VERSION, wafdir) | 144 | import sys |
332 | 145 | |||
333 | 146 | from waflib.Tools import ccroot, c, ar, compiler_c, gcc | ||
334 | 147 | sys.modules['cc'] = c | ||
335 | 148 | sys.modules['ccroot'] = ccroot | ||
336 | 149 | sys.modules['ar'] = ar | ||
337 | 150 | sys.modules['compiler_cc'] = compiler_c | ||
338 | 151 | sys.modules['gcc'] = gcc | ||
339 | 152 | |||
340 | 153 | from waflib import Options | ||
341 | 154 | Options.lockfile = os.environ.get('WAFLOCK', '.lock-wscript') | ||
342 | 155 | if os.path.isfile(Options.lockfile) and os.stat(Options.lockfile).st_size == 0: | ||
343 | 156 | os.environ['NOCLIMB'] = "1" | ||
344 | 157 | # there is a single top-level, but libraries must build independently | ||
345 | 158 | os.environ['NO_LOCK_IN_TOP'] = "1" | ||
346 | 159 | |||
347 | 160 | from waflib import Task | ||
348 | 161 | class o(object): | ||
349 | 162 | display = None | ||
350 | 163 | Task.classes['cc_link'] = o | ||
351 | 164 | |||
352 | 165 | from waflib import Scripting | ||
353 | 166 | Scripting.waf_entry_point(cwd, VERSION, wafdir[0]) | ||
354 | 77 | 167 | ||
355 | diff --git a/buildtools/examples/run_on_target.py b/buildtools/examples/run_on_target.py | |||
356 | index 8322759..79c5730 100755 | |||
357 | --- a/buildtools/examples/run_on_target.py | |||
358 | +++ b/buildtools/examples/run_on_target.py | |||
359 | @@ -1,4 +1,4 @@ | |||
361 | 1 | #!/usr/bin/env python | 1 | #!/usr/bin/env python3 |
362 | 2 | 2 | ||
363 | 3 | # | 3 | # |
364 | 4 | # Sample run-on-target script | 4 | # Sample run-on-target script |
365 | diff --git a/buildtools/scripts/abi_gen.sh b/buildtools/scripts/abi_gen.sh | |||
366 | index 787718c..6dd6d32 100755 | |||
367 | --- a/buildtools/scripts/abi_gen.sh | |||
368 | +++ b/buildtools/scripts/abi_gen.sh | |||
369 | @@ -17,5 +17,5 @@ done | |||
370 | 17 | ) > $GDBSCRIPT | 17 | ) > $GDBSCRIPT |
371 | 18 | 18 | ||
372 | 19 | # forcing the terminal avoids a problem on Fedora12 | 19 | # forcing the terminal avoids a problem on Fedora12 |
374 | 20 | TERM=none gdb -batch -x $GDBSCRIPT "$SHAREDLIB" < /dev/null | 20 | TERM=none gdb -n -batch -x $GDBSCRIPT "$SHAREDLIB" < /dev/null |
375 | 21 | rm -f $GDBSCRIPT | 21 | rm -f $GDBSCRIPT |
376 | diff --git a/buildtools/wafsamba/configure_file.py b/buildtools/wafsamba/configure_file.py | |||
377 | index e28282b..6ad4354 100644 | |||
378 | --- a/buildtools/wafsamba/configure_file.py | |||
379 | +++ b/buildtools/wafsamba/configure_file.py | |||
380 | @@ -1,7 +1,9 @@ | |||
381 | 1 | # handle substitution of variables in .in files | 1 | # handle substitution of variables in .in files |
382 | 2 | 2 | ||
385 | 3 | import re, os | 3 | import sys |
386 | 4 | import Build, sys, Logs | 4 | import re |
387 | 5 | import os | ||
388 | 6 | from waflib import Build, Logs | ||
389 | 5 | from samba_utils import SUBST_VARS_RECURSIVE | 7 | from samba_utils import SUBST_VARS_RECURSIVE |
390 | 6 | 8 | ||
391 | 7 | def subst_at_vars(task): | 9 | def subst_at_vars(task): |
392 | diff --git a/buildtools/wafsamba/generic_cc.py b/buildtools/wafsamba/generic_cc.py | |||
393 | index 504e902..1352c54 100644 | |||
394 | --- a/buildtools/wafsamba/generic_cc.py | |||
395 | +++ b/buildtools/wafsamba/generic_cc.py | |||
396 | @@ -3,69 +3,68 @@ | |||
397 | 3 | # based on suncc.py from waf | 3 | # based on suncc.py from waf |
398 | 4 | 4 | ||
399 | 5 | import os, optparse | 5 | import os, optparse |
403 | 6 | import Utils, Options, Configure | 6 | from waflib import Errors |
404 | 7 | import ccroot, ar | 7 | from waflib.Tools import ccroot, ar |
405 | 8 | from Configure import conftest | 8 | from waflib.Configure import conf |
406 | 9 | 9 | ||
408 | 10 | from compiler_cc import c_compiler | 10 | # |
409 | 11 | # Let waflib provide useful defaults, but | ||
410 | 12 | # provide generic_cc as last resort fallback on | ||
411 | 13 | # all platforms | ||
412 | 14 | # | ||
413 | 15 | from waflib.Tools.compiler_c import c_compiler | ||
414 | 16 | for key in c_compiler.keys(): | ||
415 | 17 | c_compiler[key].append('generic_cc') | ||
416 | 11 | 18 | ||
421 | 12 | c_compiler['default'] = ['gcc', 'generic_cc'] | 19 | @conf |
418 | 13 | c_compiler['hpux'] = ['gcc', 'generic_cc'] | ||
419 | 14 | |||
420 | 15 | @conftest | ||
422 | 16 | def find_generic_cc(conf): | 20 | def find_generic_cc(conf): |
423 | 17 | v = conf.env | 21 | v = conf.env |
424 | 18 | cc = None | 22 | cc = None |
436 | 19 | if v['CC']: cc = v['CC'] | 23 | if v.CC: |
437 | 20 | elif 'CC' in conf.environ: cc = conf.environ['CC'] | 24 | cc = v.CC |
438 | 21 | if not cc: cc = conf.find_program('cc', var='CC') | 25 | elif 'CC' in conf.environ: |
439 | 22 | if not cc: conf.fatal('generic_cc was not found') | 26 | cc = conf.environ['CC'] |
440 | 23 | cc = conf.cmd_to_list(cc) | 27 | if not cc: |
441 | 24 | v['CC'] = cc | 28 | cc = conf.find_program('cc', var='CC') |
442 | 25 | v['CC_NAME'] = 'generic' | 29 | if not cc: |
443 | 26 | 30 | conf.fatal('generic_cc was not found') | |
433 | 27 | @conftest | ||
434 | 28 | def generic_cc_common_flags(conf): | ||
435 | 29 | v = conf.env | ||
444 | 30 | 31 | ||
448 | 31 | v['CC_SRC_F'] = '' | 32 | try: |
449 | 32 | v['CC_TGT_F'] = ['-c', '-o', ''] | 33 | conf.cmd_and_log(cc + ['--version']) |
450 | 33 | v['CPPPATH_ST'] = '-I%s' # template for adding include paths | 34 | except Errors.WafError: |
451 | 35 | conf.fatal('%r --version could not be executed' % cc) | ||
452 | 34 | 36 | ||
457 | 35 | # linker | 37 | v.CC = cc |
458 | 36 | if not v['LINK_CC']: v['LINK_CC'] = v['CC'] | 38 | v.CC_NAME = 'generic_cc' |
455 | 37 | v['CCLNK_SRC_F'] = '' | ||
456 | 38 | v['CCLNK_TGT_F'] = ['-o', ''] | ||
459 | 39 | 39 | ||
465 | 40 | v['LIB_ST'] = '-l%s' # template for adding libs | 40 | @conf |
466 | 41 | v['LIBPATH_ST'] = '-L%s' # template for adding libpaths | 41 | def generic_cc_common_flags(conf): |
467 | 42 | v['STATICLIB_ST'] = '-l%s' | 42 | v = conf.env |
463 | 43 | v['STATICLIBPATH_ST'] = '-L%s' | ||
464 | 44 | v['CCDEFINES_ST'] = '-D%s' | ||
468 | 45 | 43 | ||
472 | 46 | # v['SONAME_ST'] = '-Wl,-h -Wl,%s' | 44 | v.CC_SRC_F = '' |
473 | 47 | # v['SHLIB_MARKER'] = '-Bdynamic' | 45 | v.CC_TGT_F = ['-c', '-o'] |
474 | 48 | # v['STATICLIB_MARKER'] = '-Bstatic' | 46 | v.CPPPATH_ST = '-I%s' |
475 | 47 | v.DEFINES_ST = '-D%s' | ||
476 | 49 | 48 | ||
479 | 50 | # program | 49 | if not v.LINK_CC: |
480 | 51 | v['program_PATTERN'] = '%s' | 50 | v.LINK_CC = v.CC |
481 | 52 | 51 | ||
486 | 53 | # shared library | 52 | v.CCLNK_SRC_F = '' |
487 | 54 | # v['shlib_CCFLAGS'] = ['-Kpic', '-DPIC'] | 53 | v.CCLNK_TGT_F = ['-o'] |
484 | 55 | # v['shlib_LINKFLAGS'] = ['-G'] | ||
485 | 56 | v['shlib_PATTERN'] = 'lib%s.so' | ||
488 | 57 | 54 | ||
492 | 58 | # static lib | 55 | v.LIB_ST = '-l%s' # template for adding libs |
493 | 59 | # v['staticlib_LINKFLAGS'] = ['-Bstatic'] | 56 | v.LIBPATH_ST = '-L%s' # template for adding libpaths |
494 | 60 | # v['staticlib_PATTERN'] = 'lib%s.a' | 57 | v.STLIB_ST = '-l%s' |
495 | 58 | v.STLIBPATH_ST = '-L%s' | ||
496 | 61 | 59 | ||
506 | 62 | detect = ''' | 60 | v.cprogram_PATTERN = '%s' |
507 | 63 | find_generic_cc | 61 | v.cshlib_PATTERN = 'lib%s.so' |
508 | 64 | find_cpp | 62 | v.cstlib_PATTERN = 'lib%s.a' |
500 | 65 | find_ar | ||
501 | 66 | generic_cc_common_flags | ||
502 | 67 | cc_load_tools | ||
503 | 68 | cc_add_flags | ||
504 | 69 | link_add_flags | ||
505 | 70 | ''' | ||
509 | 71 | 63 | ||
510 | 64 | def configure(conf): | ||
511 | 65 | conf.find_generic_cc() | ||
512 | 66 | conf.find_ar() | ||
513 | 67 | conf.generic_cc_common_flags() | ||
514 | 68 | conf.cc_load_tools() | ||
515 | 69 | conf.cc_add_flags() | ||
516 | 70 | conf.link_add_flags() | ||
517 | diff --git a/buildtools/wafsamba/hpuxcc.py b/buildtools/wafsamba/hpuxcc.py | |||
518 | 72 | deleted file mode 100644 | 71 | deleted file mode 100644 |
519 | index c263556..0000000 | |||
520 | --- a/buildtools/wafsamba/hpuxcc.py | |||
521 | +++ /dev/null | |||
522 | @@ -1,56 +0,0 @@ | |||
523 | 1 | # compiler definition for HPUX | ||
524 | 2 | # based on suncc.py from waf | ||
525 | 3 | |||
526 | 4 | import os, optparse, sys | ||
527 | 5 | import Utils, Options, Configure | ||
528 | 6 | import ccroot, ar | ||
529 | 7 | from Configure import conftest | ||
530 | 8 | import gcc | ||
531 | 9 | |||
532 | 10 | |||
533 | 11 | @conftest | ||
534 | 12 | def gcc_modifier_hpux(conf): | ||
535 | 13 | v=conf.env | ||
536 | 14 | v['CCFLAGS_DEBUG']=['-g'] | ||
537 | 15 | v['CCFLAGS_RELEASE']=['-O2'] | ||
538 | 16 | v['CC_SRC_F']='' | ||
539 | 17 | v['CC_TGT_F']=['-c','-o',''] | ||
540 | 18 | v['CPPPATH_ST']='-I%s' | ||
541 | 19 | if not v['LINK_CC']:v['LINK_CC']=v['CC'] | ||
542 | 20 | v['CCLNK_SRC_F']='' | ||
543 | 21 | v['CCLNK_TGT_F']=['-o',''] | ||
544 | 22 | v['LIB_ST']='-l%s' | ||
545 | 23 | v['LIBPATH_ST']='-L%s' | ||
546 | 24 | v['STATICLIB_ST']='-l%s' | ||
547 | 25 | v['STATICLIBPATH_ST']='-L%s' | ||
548 | 26 | v['RPATH_ST']='-Wl,-rpath,%s' | ||
549 | 27 | v['CCDEFINES_ST']='-D%s' | ||
550 | 28 | v['SONAME_ST']='-Wl,-h,%s' | ||
551 | 29 | v['SHLIB_MARKER']=[] | ||
552 | 30 | # v['STATICLIB_MARKER']='-Wl,-Bstatic' | ||
553 | 31 | v['FULLSTATIC_MARKER']='-static' | ||
554 | 32 | v['program_PATTERN']='%s' | ||
555 | 33 | v['shlib_CCFLAGS']=['-fPIC','-DPIC'] | ||
556 | 34 | v['shlib_LINKFLAGS']=['-shared'] | ||
557 | 35 | v['shlib_PATTERN']='lib%s.sl' | ||
558 | 36 | # v['staticlib_LINKFLAGS']=['-Wl,-Bstatic'] | ||
559 | 37 | v['staticlib_PATTERN']='lib%s.a' | ||
560 | 38 | |||
561 | 39 | gcc.gcc_modifier_hpux = gcc_modifier_hpux | ||
562 | 40 | |||
563 | 41 | from TaskGen import feature, after | ||
564 | 42 | @feature('cprogram', 'cshlib') | ||
565 | 43 | @after('apply_link', 'apply_lib_vars', 'apply_obj_vars') | ||
566 | 44 | def hpux_addfullpath(self): | ||
567 | 45 | if sys.platform == 'hp-ux11': | ||
568 | 46 | link = getattr(self, 'link_task', None) | ||
569 | 47 | if link: | ||
570 | 48 | lst = link.env.LINKFLAGS | ||
571 | 49 | buf = [] | ||
572 | 50 | for x in lst: | ||
573 | 51 | if x.startswith('-L'): | ||
574 | 52 | p2 = x[2:] | ||
575 | 53 | if not os.path.isabs(p2): | ||
576 | 54 | x = x[:2] + self.bld.srcnode.abspath(link.env) + "/../" + x[2:].lstrip('.') | ||
577 | 55 | buf.append(x) | ||
578 | 56 | link.env.LINKFLAGS = buf | ||
579 | diff --git a/buildtools/wafsamba/irixcc.py b/buildtools/wafsamba/irixcc.py | |||
580 | 57 | deleted file mode 100644 | 0 | deleted file mode 100644 |
581 | index f3cb451..0000000 | |||
582 | --- a/buildtools/wafsamba/irixcc.py | |||
583 | +++ /dev/null | |||
584 | @@ -1,79 +0,0 @@ | |||
585 | 1 | |||
586 | 2 | # compiler definition for irix/MIPSpro cc compiler | ||
587 | 3 | # based on suncc.py from waf | ||
588 | 4 | |||
589 | 5 | import os, optparse | ||
590 | 6 | import Utils, Options, Configure | ||
591 | 7 | import ccroot, ar | ||
592 | 8 | from Configure import conftest | ||
593 | 9 | |||
594 | 10 | from compiler_cc import c_compiler | ||
595 | 11 | |||
596 | 12 | c_compiler['irix'] = ['gcc', 'irixcc'] | ||
597 | 13 | |||
598 | 14 | @conftest | ||
599 | 15 | def find_irixcc(conf): | ||
600 | 16 | v = conf.env | ||
601 | 17 | cc = None | ||
602 | 18 | if v['CC']: cc = v['CC'] | ||
603 | 19 | elif 'CC' in conf.environ: cc = conf.environ['CC'] | ||
604 | 20 | if not cc: cc = conf.find_program('cc', var='CC') | ||
605 | 21 | if not cc: conf.fatal('irixcc was not found') | ||
606 | 22 | cc = conf.cmd_to_list(cc) | ||
607 | 23 | |||
608 | 24 | try: | ||
609 | 25 | if Utils.cmd_output(cc + ['-c99'] + ['-version']) != '': | ||
610 | 26 | conf.fatal('irixcc %r was not found' % cc) | ||
611 | 27 | except ValueError: | ||
612 | 28 | conf.fatal('irixcc -v could not be executed') | ||
613 | 29 | |||
614 | 30 | conf.env.append_unique('CCFLAGS', '-c99') | ||
615 | 31 | |||
616 | 32 | v['CC'] = cc | ||
617 | 33 | v['CC_NAME'] = 'irix' | ||
618 | 34 | |||
619 | 35 | @conftest | ||
620 | 36 | def irixcc_common_flags(conf): | ||
621 | 37 | v = conf.env | ||
622 | 38 | |||
623 | 39 | v['CC_SRC_F'] = '' | ||
624 | 40 | v['CC_TGT_F'] = ['-c', '-o', ''] | ||
625 | 41 | v['CPPPATH_ST'] = '-I%s' # template for adding include paths | ||
626 | 42 | |||
627 | 43 | # linker | ||
628 | 44 | if not v['LINK_CC']: v['LINK_CC'] = v['CC'] | ||
629 | 45 | v['CCLNK_SRC_F'] = '' | ||
630 | 46 | v['CCLNK_TGT_F'] = ['-o', ''] | ||
631 | 47 | |||
632 | 48 | v['LIB_ST'] = '-l%s' # template for adding libs | ||
633 | 49 | v['LIBPATH_ST'] = '-L%s' # template for adding libpaths | ||
634 | 50 | v['STATICLIB_ST'] = '-l%s' | ||
635 | 51 | v['STATICLIBPATH_ST'] = '-L%s' | ||
636 | 52 | v['CCDEFINES_ST'] = '-D%s' | ||
637 | 53 | |||
638 | 54 | # v['SONAME_ST'] = '-Wl,-h -Wl,%s' | ||
639 | 55 | # v['SHLIB_MARKER'] = '-Bdynamic' | ||
640 | 56 | # v['STATICLIB_MARKER'] = '-Bstatic' | ||
641 | 57 | |||
642 | 58 | # program | ||
643 | 59 | v['program_PATTERN'] = '%s' | ||
644 | 60 | |||
645 | 61 | # shared library | ||
646 | 62 | # v['shlib_CCFLAGS'] = ['-Kpic', '-DPIC'] | ||
647 | 63 | # v['shlib_LINKFLAGS'] = ['-G'] | ||
648 | 64 | v['shlib_PATTERN'] = 'lib%s.so' | ||
649 | 65 | |||
650 | 66 | # static lib | ||
651 | 67 | # v['staticlib_LINKFLAGS'] = ['-Bstatic'] | ||
652 | 68 | # v['staticlib_PATTERN'] = 'lib%s.a' | ||
653 | 69 | |||
654 | 70 | detect = ''' | ||
655 | 71 | find_irixcc | ||
656 | 72 | find_cpp | ||
657 | 73 | find_ar | ||
658 | 74 | irixcc_common_flags | ||
659 | 75 | cc_load_tools | ||
660 | 76 | cc_add_flags | ||
661 | 77 | link_add_flags | ||
662 | 78 | ''' | ||
663 | 79 | |||
664 | diff --git a/buildtools/wafsamba/nothreads.py b/buildtools/wafsamba/nothreads.py | |||
665 | 80 | deleted file mode 100644 | 0 | deleted file mode 100644 |
666 | index 9bd33e8..0000000 | |||
667 | --- a/buildtools/wafsamba/nothreads.py | |||
668 | +++ /dev/null | |||
669 | @@ -1,219 +0,0 @@ | |||
670 | 1 | # encoding: utf-8 | ||
671 | 2 | # Thomas Nagy, 2005-2008 (ita) | ||
672 | 3 | |||
673 | 4 | # this replaces the core of Runner.py in waf with a varient that works | ||
674 | 5 | # on systems with completely broken threading (such as Python 2.5.x on | ||
675 | 6 | # AIX). For simplicity we enable this when JOBS=1, which is triggered | ||
676 | 7 | # by the compatibility makefile used for the waf build. That also ensures | ||
677 | 8 | # this code is tested, as it means it is used in the build farm, and by | ||
678 | 9 | # anyone using 'make' to build Samba with waf | ||
679 | 10 | |||
680 | 11 | "Execute the tasks" | ||
681 | 12 | |||
682 | 13 | import sys, random, threading | ||
683 | 14 | try: from Queue import Queue | ||
684 | 15 | except ImportError: from queue import Queue | ||
685 | 16 | import Utils, Options | ||
686 | 17 | from Constants import EXCEPTION, CRASHED, MAXJOBS, ASK_LATER, SKIPPED, SKIP_ME, SUCCESS | ||
687 | 18 | |||
688 | 19 | GAP = 15 | ||
689 | 20 | |||
690 | 21 | run_old = threading.Thread.run | ||
691 | 22 | def run(*args, **kwargs): | ||
692 | 23 | try: | ||
693 | 24 | run_old(*args, **kwargs) | ||
694 | 25 | except (KeyboardInterrupt, SystemExit): | ||
695 | 26 | raise | ||
696 | 27 | except: | ||
697 | 28 | sys.excepthook(*sys.exc_info()) | ||
698 | 29 | threading.Thread.run = run | ||
699 | 30 | |||
700 | 31 | |||
701 | 32 | class TaskConsumer(object): | ||
702 | 33 | consumers = 1 | ||
703 | 34 | |||
704 | 35 | def process(tsk): | ||
705 | 36 | m = tsk.master | ||
706 | 37 | if m.stop: | ||
707 | 38 | m.out.put(tsk) | ||
708 | 39 | return | ||
709 | 40 | |||
710 | 41 | try: | ||
711 | 42 | tsk.generator.bld.printout(tsk.display()) | ||
712 | 43 | if tsk.__class__.stat: ret = tsk.__class__.stat(tsk) | ||
713 | 44 | # actual call to task's run() function | ||
714 | 45 | else: ret = tsk.call_run() | ||
715 | 46 | except Exception as e: | ||
716 | 47 | tsk.err_msg = Utils.ex_stack() | ||
717 | 48 | tsk.hasrun = EXCEPTION | ||
718 | 49 | |||
719 | 50 | # TODO cleanup | ||
720 | 51 | m.error_handler(tsk) | ||
721 | 52 | m.out.put(tsk) | ||
722 | 53 | return | ||
723 | 54 | |||
724 | 55 | if ret: | ||
725 | 56 | tsk.err_code = ret | ||
726 | 57 | tsk.hasrun = CRASHED | ||
727 | 58 | else: | ||
728 | 59 | try: | ||
729 | 60 | tsk.post_run() | ||
730 | 61 | except Utils.WafError: | ||
731 | 62 | pass | ||
732 | 63 | except Exception: | ||
733 | 64 | tsk.err_msg = Utils.ex_stack() | ||
734 | 65 | tsk.hasrun = EXCEPTION | ||
735 | 66 | else: | ||
736 | 67 | tsk.hasrun = SUCCESS | ||
737 | 68 | if tsk.hasrun != SUCCESS: | ||
738 | 69 | m.error_handler(tsk) | ||
739 | 70 | |||
740 | 71 | m.out.put(tsk) | ||
741 | 72 | |||
742 | 73 | class Parallel(object): | ||
743 | 74 | """ | ||
744 | 75 | keep the consumer threads busy, and avoid consuming cpu cycles | ||
745 | 76 | when no more tasks can be added (end of the build, etc) | ||
746 | 77 | """ | ||
747 | 78 | def __init__(self, bld, j=2): | ||
748 | 79 | |||
749 | 80 | # number of consumers | ||
750 | 81 | self.numjobs = j | ||
751 | 82 | |||
752 | 83 | self.manager = bld.task_manager | ||
753 | 84 | self.manager.current_group = 0 | ||
754 | 85 | |||
755 | 86 | self.total = self.manager.total() | ||
756 | 87 | |||
757 | 88 | # tasks waiting to be processed - IMPORTANT | ||
758 | 89 | self.outstanding = [] | ||
759 | 90 | self.maxjobs = MAXJOBS | ||
760 | 91 | |||
761 | 92 | # tasks that are awaiting for another task to complete | ||
762 | 93 | self.frozen = [] | ||
763 | 94 | |||
764 | 95 | # tasks returned by the consumers | ||
765 | 96 | self.out = Queue(0) | ||
766 | 97 | |||
767 | 98 | self.count = 0 # tasks not in the producer area | ||
768 | 99 | |||
769 | 100 | self.processed = 1 # progress indicator | ||
770 | 101 | |||
771 | 102 | self.stop = False # error condition to stop the build | ||
772 | 103 | self.error = False # error flag | ||
773 | 104 | |||
774 | 105 | def get_next(self): | ||
775 | 106 | "override this method to schedule the tasks in a particular order" | ||
776 | 107 | if not self.outstanding: | ||
777 | 108 | return None | ||
778 | 109 | return self.outstanding.pop(0) | ||
779 | 110 | |||
780 | 111 | def postpone(self, tsk): | ||
781 | 112 | "override this method to schedule the tasks in a particular order" | ||
782 | 113 | # TODO consider using a deque instead | ||
783 | 114 | if random.randint(0, 1): | ||
784 | 115 | self.frozen.insert(0, tsk) | ||
785 | 116 | else: | ||
786 | 117 | self.frozen.append(tsk) | ||
787 | 118 | |||
788 | 119 | def refill_task_list(self): | ||
789 | 120 | "called to set the next group of tasks" | ||
790 | 121 | |||
791 | 122 | while self.count > self.numjobs + GAP or self.count >= self.maxjobs: | ||
792 | 123 | self.get_out() | ||
793 | 124 | |||
794 | 125 | while not self.outstanding: | ||
795 | 126 | if self.count: | ||
796 | 127 | self.get_out() | ||
797 | 128 | |||
798 | 129 | if self.frozen: | ||
799 | 130 | self.outstanding += self.frozen | ||
800 | 131 | self.frozen = [] | ||
801 | 132 | elif not self.count: | ||
802 | 133 | (jobs, tmp) = self.manager.get_next_set() | ||
803 | 134 | if jobs is not None: | ||
804 | 135 | self.maxjobs = jobs | ||
805 | 136 | if tmp: | ||
806 | 137 | self.outstanding += tmp | ||
807 | 138 | break | ||
808 | 139 | |||
809 | 140 | def get_out(self): | ||
810 | 141 | "the tasks that are put to execute are all collected using get_out" | ||
811 | 142 | ret = self.out.get() | ||
812 | 143 | self.manager.add_finished(ret) | ||
813 | 144 | if not self.stop and getattr(ret, 'more_tasks', None): | ||
814 | 145 | self.outstanding += ret.more_tasks | ||
815 | 146 | self.total += len(ret.more_tasks) | ||
816 | 147 | self.count -= 1 | ||
817 | 148 | |||
818 | 149 | def error_handler(self, tsk): | ||
819 | 150 | "by default, errors make the build stop (not thread safe so be careful)" | ||
820 | 151 | if not Options.options.keep: | ||
821 | 152 | self.stop = True | ||
822 | 153 | self.error = True | ||
823 | 154 | |||
824 | 155 | def start(self): | ||
825 | 156 | "execute the tasks" | ||
826 | 157 | |||
827 | 158 | while not self.stop: | ||
828 | 159 | |||
829 | 160 | self.refill_task_list() | ||
830 | 161 | |||
831 | 162 | # consider the next task | ||
832 | 163 | tsk = self.get_next() | ||
833 | 164 | if not tsk: | ||
834 | 165 | if self.count: | ||
835 | 166 | # tasks may add new ones after they are run | ||
836 | 167 | continue | ||
837 | 168 | else: | ||
838 | 169 | # no tasks to run, no tasks running, time to exit | ||
839 | 170 | break | ||
840 | 171 | |||
841 | 172 | if tsk.hasrun: | ||
842 | 173 | # if the task is marked as "run", just skip it | ||
843 | 174 | self.processed += 1 | ||
844 | 175 | self.manager.add_finished(tsk) | ||
845 | 176 | continue | ||
846 | 177 | |||
847 | 178 | try: | ||
848 | 179 | st = tsk.runnable_status() | ||
849 | 180 | except Exception as e: | ||
850 | 181 | self.processed += 1 | ||
851 | 182 | if self.stop and not Options.options.keep: | ||
852 | 183 | tsk.hasrun = SKIPPED | ||
853 | 184 | self.manager.add_finished(tsk) | ||
854 | 185 | continue | ||
855 | 186 | self.error_handler(tsk) | ||
856 | 187 | self.manager.add_finished(tsk) | ||
857 | 188 | tsk.hasrun = EXCEPTION | ||
858 | 189 | tsk.err_msg = Utils.ex_stack() | ||
859 | 190 | continue | ||
860 | 191 | |||
861 | 192 | if st == ASK_LATER: | ||
862 | 193 | self.postpone(tsk) | ||
863 | 194 | elif st == SKIP_ME: | ||
864 | 195 | self.processed += 1 | ||
865 | 196 | tsk.hasrun = SKIPPED | ||
866 | 197 | self.manager.add_finished(tsk) | ||
867 | 198 | else: | ||
868 | 199 | # run me: put the task in ready queue | ||
869 | 200 | tsk.position = (self.processed, self.total) | ||
870 | 201 | self.count += 1 | ||
871 | 202 | self.processed += 1 | ||
872 | 203 | tsk.master = self | ||
873 | 204 | |||
874 | 205 | process(tsk) | ||
875 | 206 | |||
876 | 207 | # self.count represents the tasks that have been made available to the consumer threads | ||
877 | 208 | # collect all the tasks after an error else the message may be incomplete | ||
878 | 209 | while self.error and self.count: | ||
879 | 210 | self.get_out() | ||
880 | 211 | |||
881 | 212 | #print loop | ||
882 | 213 | assert (self.count == 0 or self.stop) | ||
883 | 214 | |||
884 | 215 | |||
885 | 216 | # enable nothreads | ||
886 | 217 | import Runner | ||
887 | 218 | Runner.process = process | ||
888 | 219 | Runner.Parallel = Parallel | ||
889 | diff --git a/buildtools/wafsamba/pkgconfig.py b/buildtools/wafsamba/pkgconfig.py | |||
890 | index 999bad4..b83d5f3 100644 | |||
891 | --- a/buildtools/wafsamba/pkgconfig.py | |||
892 | +++ b/buildtools/wafsamba/pkgconfig.py | |||
893 | @@ -1,7 +1,7 @@ | |||
894 | 1 | # handle substitution of variables in pc files | 1 | # handle substitution of variables in pc files |
895 | 2 | 2 | ||
896 | 3 | import os, re, sys | 3 | import os, re, sys |
898 | 4 | import Build, Logs | 4 | from waflib import Build, Logs |
899 | 5 | from samba_utils import SUBST_VARS_RECURSIVE, TO_LIST | 5 | from samba_utils import SUBST_VARS_RECURSIVE, TO_LIST |
900 | 6 | 6 | ||
901 | 7 | def subst_at_vars(task): | 7 | def subst_at_vars(task): |
902 | @@ -52,7 +52,7 @@ def PKG_CONFIG_FILES(bld, pc_files, vnum=None, extra_name=None): | |||
903 | 52 | rule=subst_at_vars, | 52 | rule=subst_at_vars, |
904 | 53 | source=f+'.in', | 53 | source=f+'.in', |
905 | 54 | target=target) | 54 | target=target) |
907 | 55 | bld.add_manual_dependency(bld.path.find_or_declare(f), bld.env['PREFIX']) | 55 | bld.add_manual_dependency(bld.path.find_or_declare(f), bld.env['PREFIX'].encode('utf8')) |
908 | 56 | t.vars = [] | 56 | t.vars = [] |
909 | 57 | if t.env.RPATH_ON_INSTALL: | 57 | if t.env.RPATH_ON_INSTALL: |
910 | 58 | t.env.LIB_RPATH = t.env.RPATH_ST % t.env.LIBDIR | 58 | t.env.LIB_RPATH = t.env.RPATH_ST % t.env.LIBDIR |
911 | diff --git a/buildtools/wafsamba/samba3.py b/buildtools/wafsamba/samba3.py | |||
912 | index 44daff9..5aab250 100644 | |||
913 | --- a/buildtools/wafsamba/samba3.py | |||
914 | +++ b/buildtools/wafsamba/samba3.py | |||
915 | @@ -1,12 +1,11 @@ | |||
916 | 1 | # a waf tool to add autoconf-like macros to the configure section | 1 | # a waf tool to add autoconf-like macros to the configure section |
917 | 2 | # and for SAMBA_ macros for building libraries, binaries etc | 2 | # and for SAMBA_ macros for building libraries, binaries etc |
918 | 3 | 3 | ||
921 | 4 | import Options, Build, os | 4 | import os |
922 | 5 | from samba_utils import os_path_relpath, TO_LIST, samba_add_onoff_option | 5 | from waflib import Build |
923 | 6 | from samba_utils import os_path_relpath, TO_LIST | ||
924 | 6 | from samba_autoconf import library_flags | 7 | from samba_autoconf import library_flags |
925 | 7 | 8 | ||
926 | 8 | Options.Handler.SAMBA3_ADD_OPTION = samba_add_onoff_option | ||
927 | 9 | |||
928 | 10 | def SAMBA3_IS_STATIC_MODULE(bld, module): | 9 | def SAMBA3_IS_STATIC_MODULE(bld, module): |
929 | 11 | '''Check whether module is in static list''' | 10 | '''Check whether module is in static list''' |
930 | 12 | if module in bld.env['static_modules']: | 11 | if module in bld.env['static_modules']: |
931 | @@ -32,7 +31,7 @@ def s3_fix_kwargs(bld, kwargs): | |||
932 | 32 | '''fix the build arguments for s3 build rules to include the | 31 | '''fix the build arguments for s3 build rules to include the |
933 | 33 | necessary includes, subdir and cflags options ''' | 32 | necessary includes, subdir and cflags options ''' |
934 | 34 | s3dir = os.path.join(bld.env.srcdir, 'source3') | 33 | s3dir = os.path.join(bld.env.srcdir, 'source3') |
936 | 35 | s3reldir = os_path_relpath(s3dir, bld.curdir) | 34 | s3reldir = os_path_relpath(s3dir, bld.path.abspath()) |
937 | 36 | 35 | ||
938 | 37 | # the extra_includes list is relative to the source3 directory | 36 | # the extra_includes list is relative to the source3 directory |
939 | 38 | extra_includes = [ '.', 'include', 'lib' ] | 37 | extra_includes = [ '.', 'include', 'lib' ] |
940 | diff --git a/buildtools/wafsamba/samba_abi.py b/buildtools/wafsamba/samba_abi.py | |||
941 | index 196b468..5e7686d 100644 | |||
942 | --- a/buildtools/wafsamba/samba_abi.py | |||
943 | +++ b/buildtools/wafsamba/samba_abi.py | |||
944 | @@ -1,7 +1,13 @@ | |||
945 | 1 | # functions for handling ABI checking of libraries | 1 | # functions for handling ABI checking of libraries |
946 | 2 | 2 | ||
949 | 3 | import Options, Utils, os, Logs, samba_utils, sys, Task, fnmatch, re, Build | 3 | import os |
950 | 4 | from TaskGen import feature, before, after | 4 | import sys |
951 | 5 | import re | ||
952 | 6 | import fnmatch | ||
953 | 7 | |||
954 | 8 | from waflib import Options, Utils, Logs, Task, Build, Errors | ||
955 | 9 | from waflib.TaskGen import feature, before, after | ||
956 | 10 | from wafsamba import samba_utils | ||
957 | 5 | 11 | ||
958 | 6 | # these type maps cope with platform specific names for common types | 12 | # these type maps cope with platform specific names for common types |
959 | 7 | # please add new type mappings into the list below | 13 | # please add new type mappings into the list below |
960 | @@ -10,7 +16,7 @@ abi_type_maps = { | |||
961 | 10 | 'struct __va_list_tag *' : 'va_list' | 16 | 'struct __va_list_tag *' : 'va_list' |
962 | 11 | } | 17 | } |
963 | 12 | 18 | ||
965 | 13 | version_key = lambda x: map(int, x.split(".")) | 19 | version_key = lambda x: list(map(int, x.split("."))) |
966 | 14 | 20 | ||
967 | 15 | def normalise_signature(sig): | 21 | def normalise_signature(sig): |
968 | 16 | '''normalise a signature from gdb''' | 22 | '''normalise a signature from gdb''' |
969 | @@ -79,7 +85,7 @@ def abi_check_task(self): | |||
970 | 79 | libpath = self.inputs[0].abspath(self.env) | 85 | libpath = self.inputs[0].abspath(self.env) |
971 | 80 | libname = os.path.basename(libpath) | 86 | libname = os.path.basename(libpath) |
972 | 81 | 87 | ||
974 | 82 | sigs = Utils.cmd_output([abi_gen, libpath]) | 88 | sigs = samba_utils.get_string(Utils.cmd_output([abi_gen, libpath])) |
975 | 83 | parsed_sigs = parse_sigs(sigs, self.ABI_MATCH) | 89 | parsed_sigs = parse_sigs(sigs, self.ABI_MATCH) |
976 | 84 | 90 | ||
977 | 85 | sig_file = self.ABI_FILE | 91 | sig_file = self.ABI_FILE |
978 | @@ -87,7 +93,7 @@ def abi_check_task(self): | |||
979 | 87 | old_sigs = samba_utils.load_file(sig_file) | 93 | old_sigs = samba_utils.load_file(sig_file) |
980 | 88 | if old_sigs is None or Options.options.ABI_UPDATE: | 94 | if old_sigs is None or Options.options.ABI_UPDATE: |
981 | 89 | if not save_sigs(sig_file, parsed_sigs): | 95 | if not save_sigs(sig_file, parsed_sigs): |
983 | 90 | raise Utils.WafError('Failed to save ABI file "%s"' % sig_file) | 96 | raise Errors.WafError('Failed to save ABI file "%s"' % sig_file) |
984 | 91 | Logs.warn('Generated ABI signatures %s' % sig_file) | 97 | Logs.warn('Generated ABI signatures %s' % sig_file) |
985 | 92 | return | 98 | return |
986 | 93 | 99 | ||
987 | @@ -112,14 +118,14 @@ def abi_check_task(self): | |||
988 | 112 | got_error = True | 118 | got_error = True |
989 | 113 | 119 | ||
990 | 114 | if got_error: | 120 | if got_error: |
992 | 115 | raise Utils.WafError('ABI for %s has changed - please fix library version then build with --abi-update\nSee http://wiki.samba.org/index.php/Waf#ABI_Checking for more information\nIf you have not changed any ABI, and your platform always gives this error, please configure with --abi-check-disable to skip this check' % libname) | 121 | raise Errors.WafError('ABI for %s has changed - please fix library version then build with --abi-update\nSee http://wiki.samba.org/index.php/Waf#ABI_Checking for more information\nIf you have not changed any ABI, and your platform always gives this error, please configure with --abi-check-disable to skip this check' % libname) |
993 | 116 | 122 | ||
994 | 117 | 123 | ||
996 | 118 | t = Task.task_type_from_func('abi_check', abi_check_task, color='BLUE', ext_in='.bin') | 124 | t = Task.task_factory('abi_check', abi_check_task, color='BLUE', ext_in='.bin') |
997 | 119 | t.quiet = True | 125 | t.quiet = True |
998 | 120 | # allow "waf --abi-check" to force re-checking the ABI | 126 | # allow "waf --abi-check" to force re-checking the ABI |
999 | 121 | if '--abi-check' in sys.argv: | 127 | if '--abi-check' in sys.argv: |
1001 | 122 | Task.always_run(t) | 128 | t.always_run = True |
1002 | 123 | 129 | ||
1003 | 124 | @after('apply_link') | 130 | @after('apply_link') |
1004 | 125 | @feature('abi_check') | 131 | @feature('abi_check') |
1005 | @@ -184,18 +190,20 @@ def abi_write_vscript(f, libname, current_version, versions, symmap, abi_match): | |||
1006 | 184 | f.write("}%s;\n\n" % last_key) | 190 | f.write("}%s;\n\n" % last_key) |
1007 | 185 | last_key = " %s" % symver | 191 | last_key = " %s" % symver |
1008 | 186 | f.write("%s {\n" % current_version) | 192 | f.write("%s {\n" % current_version) |
1011 | 187 | local_abi = filter(lambda x: x[0] == '!', abi_match) | 193 | local_abi = list(filter(lambda x: x[0] == '!', abi_match)) |
1012 | 188 | global_abi = filter(lambda x: x[0] != '!', abi_match) | 194 | global_abi = list(filter(lambda x: x[0] != '!', abi_match)) |
1013 | 189 | f.write("\tglobal:\n") | 195 | f.write("\tglobal:\n") |
1014 | 190 | if len(global_abi) > 0: | 196 | if len(global_abi) > 0: |
1015 | 191 | for x in global_abi: | 197 | for x in global_abi: |
1016 | 192 | f.write("\t\t%s;\n" % x) | 198 | f.write("\t\t%s;\n" % x) |
1017 | 193 | else: | 199 | else: |
1018 | 194 | f.write("\t\t*;\n") | 200 | f.write("\t\t*;\n") |
1023 | 195 | if abi_match != ["*"]: | 201 | # Always hide symbols that must be local if exist |
1024 | 196 | f.write("\tlocal:\n") | 202 | local_abi.extend(["!_end", "!__bss_start", "!_edata"]) |
1025 | 197 | for x in local_abi: | 203 | f.write("\tlocal:\n") |
1026 | 198 | f.write("\t\t%s;\n" % x[1:]) | 204 | for x in local_abi: |
1027 | 205 | f.write("\t\t%s;\n" % x[1:]) | ||
1028 | 206 | if global_abi != ["*"]: | ||
1029 | 199 | if len(global_abi) > 0: | 207 | if len(global_abi) > 0: |
1030 | 200 | f.write("\t\t*;\n") | 208 | f.write("\t\t*;\n") |
1031 | 201 | f.write("};\n") | 209 | f.write("};\n") |
1032 | diff --git a/buildtools/wafsamba/samba_autoconf.py b/buildtools/wafsamba/samba_autoconf.py | |||
1033 | index c4391d0..ee1fc23 100644 | |||
1034 | --- a/buildtools/wafsamba/samba_autoconf.py | |||
1035 | +++ b/buildtools/wafsamba/samba_autoconf.py | |||
1036 | @@ -1,9 +1,10 @@ | |||
1037 | 1 | # a waf tool to add autoconf-like macros to the configure section | 1 | # a waf tool to add autoconf-like macros to the configure section |
1038 | 2 | 2 | ||
1039 | 3 | import os, sys | 3 | import os, sys |
1043 | 4 | import Build, Options, preproc, Logs | 4 | from waflib import Build, Options, Logs, Context |
1044 | 5 | from Configure import conf | 5 | from waflib.Configure import conf |
1045 | 6 | from TaskGen import feature | 6 | from waflib.TaskGen import feature |
1046 | 7 | from waflib.Tools import c_preproc as preproc | ||
1047 | 7 | from samba_utils import TO_LIST, GET_TARGET_TYPE, SET_TARGET_TYPE, unique_list, mkdir_p | 8 | from samba_utils import TO_LIST, GET_TARGET_TYPE, SET_TARGET_TYPE, unique_list, mkdir_p |
1048 | 8 | 9 | ||
1049 | 9 | missing_headers = set() | 10 | missing_headers = set() |
1050 | @@ -18,7 +19,7 @@ def DEFINE(conf, d, v, add_to_cflags=False, quote=False): | |||
1051 | 18 | '''define a config option''' | 19 | '''define a config option''' |
1052 | 19 | conf.define(d, v, quote=quote) | 20 | conf.define(d, v, quote=quote) |
1053 | 20 | if add_to_cflags: | 21 | if add_to_cflags: |
1055 | 21 | conf.env.append_value('CCDEFINES', d + '=' + str(v)) | 22 | conf.env.append_value('CFLAGS', '-D%s=%s' % (d, str(v))) |
1056 | 22 | 23 | ||
1057 | 23 | def hlist_to_string(conf, headers=None): | 24 | def hlist_to_string(conf, headers=None): |
1058 | 24 | '''convert a headers list to a set of #include lines''' | 25 | '''convert a headers list to a set of #include lines''' |
1059 | @@ -44,11 +45,11 @@ def COMPOUND_START(conf, msg): | |||
1060 | 44 | if v != [] and v != 0: | 45 | if v != [] and v != 0: |
1061 | 45 | conf.env.in_compound = v + 1 | 46 | conf.env.in_compound = v + 1 |
1062 | 46 | return | 47 | return |
1068 | 47 | conf.check_message_1(msg) | 48 | conf.start_msg(msg) |
1069 | 48 | conf.saved_check_message_1 = conf.check_message_1 | 49 | conf.saved_check_message_1 = conf.start_msg |
1070 | 49 | conf.check_message_1 = null_check_message_1 | 50 | conf.start_msg = null_check_message_1 |
1071 | 50 | conf.saved_check_message_2 = conf.check_message_2 | 51 | conf.saved_check_message_2 = conf.end_msg |
1072 | 51 | conf.check_message_2 = null_check_message_2 | 52 | conf.end_msg = null_check_message_2 |
1073 | 52 | conf.env.in_compound = 1 | 53 | conf.env.in_compound = 1 |
1074 | 53 | 54 | ||
1075 | 54 | 55 | ||
1076 | @@ -58,9 +59,9 @@ def COMPOUND_END(conf, result): | |||
1077 | 58 | conf.env.in_compound -= 1 | 59 | conf.env.in_compound -= 1 |
1078 | 59 | if conf.env.in_compound != 0: | 60 | if conf.env.in_compound != 0: |
1079 | 60 | return | 61 | return |
1083 | 61 | conf.check_message_1 = conf.saved_check_message_1 | 62 | conf.start_msg = conf.saved_check_message_1 |
1084 | 62 | conf.check_message_2 = conf.saved_check_message_2 | 63 | conf.end_msg = conf.saved_check_message_2 |
1085 | 63 | p = conf.check_message_2 | 64 | p = conf.end_msg |
1086 | 64 | if result is True: | 65 | if result is True: |
1087 | 65 | p('ok') | 66 | p('ok') |
1088 | 66 | elif not result: | 67 | elif not result: |
1089 | @@ -96,10 +97,10 @@ def CHECK_HEADER(conf, h, add_headers=False, lib=None): | |||
1090 | 96 | hdrs = hlist_to_string(conf, headers=h) | 97 | hdrs = hlist_to_string(conf, headers=h) |
1091 | 97 | if lib is None: | 98 | if lib is None: |
1092 | 98 | lib = "" | 99 | lib = "" |
1094 | 99 | ret = conf.check(fragment='%s\nint main(void) { return 0; }' % hdrs, | 100 | ret = conf.check(fragment='%s\nint main(void) { return 0; }\n' % hdrs, |
1095 | 100 | type='nolink', | 101 | type='nolink', |
1096 | 101 | execute=0, | 102 | execute=0, |
1098 | 102 | ccflags=ccflags, | 103 | cflags=ccflags, |
1099 | 103 | mandatory=False, | 104 | mandatory=False, |
1100 | 104 | includes=cpppath, | 105 | includes=cpppath, |
1101 | 105 | uselib=lib.upper(), | 106 | uselib=lib.upper(), |
1102 | @@ -250,7 +251,10 @@ def CHECK_FUNC(conf, f, link=True, lib=None, headers=None): | |||
1103 | 250 | 251 | ||
1104 | 251 | ret = False | 252 | ret = False |
1105 | 252 | 253 | ||
1107 | 253 | conf.COMPOUND_START('Checking for %s' % f) | 254 | in_lib_str = "" |
1108 | 255 | if lib: | ||
1109 | 256 | in_lib_str = " in %s" % lib | ||
1110 | 257 | conf.COMPOUND_START('Checking for %s%s' % (f, in_lib_str)) | ||
1111 | 254 | 258 | ||
1112 | 255 | if link is None or link: | 259 | if link is None or link: |
1113 | 256 | ret = CHECK_CODE(conf, | 260 | ret = CHECK_CODE(conf, |
1114 | @@ -322,7 +326,7 @@ def CHECK_SIZEOF(conf, vars, headers=None, define=None, critical=True): | |||
1115 | 322 | ret = False | 326 | ret = False |
1116 | 323 | if v_define is None: | 327 | if v_define is None: |
1117 | 324 | v_define = 'SIZEOF_%s' % v.upper().replace(' ', '_') | 328 | v_define = 'SIZEOF_%s' % v.upper().replace(' ', '_') |
1119 | 325 | for size in list((1, 2, 4, 8, 16, 32)): | 329 | for size in list((1, 2, 4, 8, 16, 32, 64)): |
1120 | 326 | if CHECK_CODE(conf, | 330 | if CHECK_CODE(conf, |
1121 | 327 | 'static int test_array[1 - 2 * !(((long int)(sizeof(%s))) <= %d)];' % (v, size), | 331 | 'static int test_array[1 - 2 * !(((long int)(sizeof(%s))) <= %d)];' % (v, size), |
1122 | 328 | define=v_define, | 332 | define=v_define, |
1123 | @@ -383,12 +387,10 @@ def CHECK_CODE(conf, code, define, | |||
1124 | 383 | else: | 387 | else: |
1125 | 384 | execute = 0 | 388 | execute = 0 |
1126 | 385 | 389 | ||
1127 | 386 | defs = conf.get_config_header() | ||
1128 | 387 | |||
1129 | 388 | if addmain: | 390 | if addmain: |
1131 | 389 | fragment='%s\n%s\n int main(void) { %s; return 0; }\n' % (defs, hdrs, code) | 391 | fragment='%s\n int main(void) { %s; return 0; }\n' % (hdrs, code) |
1132 | 390 | else: | 392 | else: |
1134 | 391 | fragment='%s\n%s\n%s\n' % (defs, hdrs, code) | 393 | fragment='%s\n%s\n' % (hdrs, code) |
1135 | 392 | 394 | ||
1136 | 393 | if msg is None: | 395 | if msg is None: |
1137 | 394 | msg="Checking for %s" % define | 396 | msg="Checking for %s" % define |
1138 | @@ -398,15 +400,11 @@ def CHECK_CODE(conf, code, define, | |||
1139 | 398 | # Be strict when relying on a compiler check | 400 | # Be strict when relying on a compiler check |
1140 | 399 | # Some compilers (e.g. xlc) ignore non-supported features as warnings | 401 | # Some compilers (e.g. xlc) ignore non-supported features as warnings |
1141 | 400 | if strict: | 402 | if strict: |
1148 | 401 | extra_cflags = None | 403 | if 'WERROR_CFLAGS' in conf.env: |
1149 | 402 | if conf.env["CC_NAME"] == "gcc": | 404 | cflags.extend(conf.env['WERROR_CFLAGS']) |
1144 | 403 | extra_cflags = "-Werror" | ||
1145 | 404 | elif conf.env["CC_NAME"] == "xlc": | ||
1146 | 405 | extra_cflags = "-qhalt=w" | ||
1147 | 406 | cflags.append(extra_cflags) | ||
1150 | 407 | 405 | ||
1151 | 408 | if local_include: | 406 | if local_include: |
1153 | 409 | cflags.append('-I%s' % conf.curdir) | 407 | cflags.append('-I%s' % conf.path.abspath()) |
1154 | 410 | 408 | ||
1155 | 411 | if not link: | 409 | if not link: |
1156 | 412 | type='nolink' | 410 | type='nolink' |
1157 | @@ -431,11 +429,11 @@ def CHECK_CODE(conf, code, define, | |||
1158 | 431 | 429 | ||
1159 | 432 | conf.COMPOUND_START(msg) | 430 | conf.COMPOUND_START(msg) |
1160 | 433 | 431 | ||
1162 | 434 | ret = conf.check(fragment=fragment, | 432 | try: |
1163 | 433 | ret = conf.check(fragment=fragment, | ||
1164 | 435 | execute=execute, | 434 | execute=execute, |
1165 | 436 | define_name = define, | 435 | define_name = define, |
1168 | 437 | mandatory = mandatory, | 436 | cflags=cflags, |
1167 | 438 | ccflags=cflags, | ||
1169 | 439 | ldflags=ldflags, | 437 | ldflags=ldflags, |
1170 | 440 | includes=includes, | 438 | includes=includes, |
1171 | 441 | uselib=uselib, | 439 | uselib=uselib, |
1172 | @@ -444,22 +442,30 @@ def CHECK_CODE(conf, code, define, | |||
1173 | 444 | quote=quote, | 442 | quote=quote, |
1174 | 445 | exec_args=exec_args, | 443 | exec_args=exec_args, |
1175 | 446 | define_ret=define_ret) | 444 | define_ret=define_ret) |
1181 | 447 | if not ret and CONFIG_SET(conf, define): | 445 | except Exception: |
1182 | 448 | # sometimes conf.check() returns false, but it | 446 | if always: |
1183 | 449 | # sets the define. Maybe a waf bug? | 447 | conf.DEFINE(define, 0) |
1184 | 450 | ret = True | 448 | else: |
1185 | 451 | if ret: | 449 | conf.undefine(define) |
1186 | 450 | conf.COMPOUND_END(False) | ||
1187 | 451 | if mandatory: | ||
1188 | 452 | raise | ||
1189 | 453 | return False | ||
1190 | 454 | else: | ||
1191 | 455 | # Success is indicated by ret but we should unset | ||
1192 | 456 | # defines set by WAF's c_config.check() because it | ||
1193 | 457 | # defines it to int(ret) and we want to undefine it | ||
1194 | 458 | if not ret: | ||
1195 | 459 | conf.undefine(define) | ||
1196 | 460 | conf.COMPOUND_END(False) | ||
1197 | 461 | return False | ||
1198 | 452 | if not define_ret: | 462 | if not define_ret: |
1199 | 453 | conf.DEFINE(define, 1) | 463 | conf.DEFINE(define, 1) |
1200 | 454 | conf.COMPOUND_END(True) | 464 | conf.COMPOUND_END(True) |
1201 | 455 | else: | 465 | else: |
1203 | 456 | conf.COMPOUND_END(conf.env[define]) | 466 | conf.DEFINE(define, ret, quote=quote) |
1204 | 467 | conf.COMPOUND_END(ret) | ||
1205 | 457 | return True | 468 | return True |
1206 | 458 | if always: | ||
1207 | 459 | conf.DEFINE(define, 0) | ||
1208 | 460 | conf.COMPOUND_END(False) | ||
1209 | 461 | return False | ||
1210 | 462 | |||
1211 | 463 | 469 | ||
1212 | 464 | 470 | ||
1213 | 465 | @conf | 471 | @conf |
1214 | @@ -490,8 +496,9 @@ def CHECK_CFLAGS(conf, cflags, fragment='int main(void) { return 0; }\n'): | |||
1215 | 490 | check_cflags.extend(conf.env['WERROR_CFLAGS']) | 496 | check_cflags.extend(conf.env['WERROR_CFLAGS']) |
1216 | 491 | return conf.check(fragment=fragment, | 497 | return conf.check(fragment=fragment, |
1217 | 492 | execute=0, | 498 | execute=0, |
1218 | 499 | mandatory=False, | ||
1219 | 493 | type='nolink', | 500 | type='nolink', |
1221 | 494 | ccflags=check_cflags, | 501 | cflags=check_cflags, |
1222 | 495 | msg="Checking compiler accepts %s" % cflags) | 502 | msg="Checking compiler accepts %s" % cflags) |
1223 | 496 | 503 | ||
1224 | 497 | @conf | 504 | @conf |
1225 | @@ -547,12 +554,15 @@ def library_flags(self, libs): | |||
1226 | 547 | # note that we do not add the -I and -L in here, as that is added by the waf | 554 | # note that we do not add the -I and -L in here, as that is added by the waf |
1227 | 548 | # core. Adding it here would just change the order that it is put on the link line | 555 | # core. Adding it here would just change the order that it is put on the link line |
1228 | 549 | # which can cause system paths to be added before internal libraries | 556 | # which can cause system paths to be added before internal libraries |
1230 | 550 | extra_ccflags = TO_LIST(getattr(self.env, 'CCFLAGS_%s' % lib.upper(), [])) | 557 | extra_ccflags = TO_LIST(getattr(self.env, 'CFLAGS_%s' % lib.upper(), [])) |
1231 | 551 | extra_ldflags = TO_LIST(getattr(self.env, 'LDFLAGS_%s' % lib.upper(), [])) | 558 | extra_ldflags = TO_LIST(getattr(self.env, 'LDFLAGS_%s' % lib.upper(), [])) |
1232 | 552 | extra_cpppath = TO_LIST(getattr(self.env, 'CPPPATH_%s' % lib.upper(), [])) | 559 | extra_cpppath = TO_LIST(getattr(self.env, 'CPPPATH_%s' % lib.upper(), [])) |
1233 | 553 | ccflags.extend(extra_ccflags) | 560 | ccflags.extend(extra_ccflags) |
1234 | 554 | ldflags.extend(extra_ldflags) | 561 | ldflags.extend(extra_ldflags) |
1235 | 555 | cpppath.extend(extra_cpppath) | 562 | cpppath.extend(extra_cpppath) |
1236 | 563 | |||
1237 | 564 | extra_cpppath = TO_LIST(getattr(self.env, 'INCLUDES_%s' % lib.upper(), [])) | ||
1238 | 565 | cpppath.extend(extra_cpppath) | ||
1239 | 556 | if 'EXTRA_LDFLAGS' in self.env: | 566 | if 'EXTRA_LDFLAGS' in self.env: |
1240 | 557 | ldflags.extend(self.env['EXTRA_LDFLAGS']) | 567 | ldflags.extend(self.env['EXTRA_LDFLAGS']) |
1241 | 558 | 568 | ||
1242 | @@ -585,9 +595,9 @@ int foo() | |||
1243 | 585 | 595 | ||
1244 | 586 | (ccflags, ldflags, cpppath) = library_flags(conf, lib) | 596 | (ccflags, ldflags, cpppath) = library_flags(conf, lib) |
1245 | 587 | if shlib: | 597 | if shlib: |
1247 | 588 | res = conf.check(features='c cshlib', fragment=fragment, lib=lib, uselib_store=lib, ccflags=ccflags, ldflags=ldflags, uselib=lib.upper(), mandatory=False) | 598 | res = conf.check(features='c cshlib', fragment=fragment, lib=lib, uselib_store=lib, cflags=ccflags, ldflags=ldflags, uselib=lib.upper(), mandatory=False) |
1248 | 589 | else: | 599 | else: |
1250 | 590 | res = conf.check(lib=lib, uselib_store=lib, ccflags=ccflags, ldflags=ldflags, uselib=lib.upper(), mandatory=False) | 600 | res = conf.check(lib=lib, uselib_store=lib, cflags=ccflags, ldflags=ldflags, uselib=lib.upper(), mandatory=False) |
1251 | 591 | 601 | ||
1252 | 592 | if not res: | 602 | if not res: |
1253 | 593 | if mandatory: | 603 | if mandatory: |
1254 | @@ -661,8 +671,8 @@ def CHECK_FUNCS_IN(conf, list, library, mandatory=False, checklibc=False, | |||
1255 | 661 | @conf | 671 | @conf |
1256 | 662 | def IN_LAUNCH_DIR(conf): | 672 | def IN_LAUNCH_DIR(conf): |
1257 | 663 | '''return True if this rule is being run from the launch directory''' | 673 | '''return True if this rule is being run from the launch directory''' |
1260 | 664 | return os.path.realpath(conf.curdir) == os.path.realpath(Options.launch_dir) | 674 | return os.path.realpath(conf.path.abspath()) == os.path.realpath(Context.launch_dir) |
1261 | 665 | Options.Handler.IN_LAUNCH_DIR = IN_LAUNCH_DIR | 675 | Options.OptionsContext.IN_LAUNCH_DIR = IN_LAUNCH_DIR |
1262 | 666 | 676 | ||
1263 | 667 | 677 | ||
1264 | 668 | @conf | 678 | @conf |
1265 | @@ -674,23 +684,42 @@ def SAMBA_CONFIG_H(conf, path=None): | |||
1266 | 674 | return | 684 | return |
1267 | 675 | 685 | ||
1268 | 676 | # we need to build real code that can't be optimized away to test | 686 | # we need to build real code that can't be optimized away to test |
1286 | 677 | if conf.check(fragment=''' | 687 | stack_protect_list = ['-fstack-protector-strong', '-fstack-protector'] |
1287 | 678 | #include <stdio.h> | 688 | for stack_protect_flag in stack_protect_list: |
1288 | 679 | 689 | flag_supported = conf.check(fragment=''' | |
1289 | 680 | int main(void) | 690 | #include <stdio.h> |
1290 | 681 | { | 691 | |
1291 | 682 | char t[100000]; | 692 | int main(void) |
1292 | 683 | while (fgets(t, sizeof(t), stdin)); | 693 | { |
1293 | 684 | return 0; | 694 | char t[100000]; |
1294 | 685 | } | 695 | while (fgets(t, sizeof(t), stdin)); |
1295 | 686 | ''', | 696 | return 0; |
1296 | 687 | execute=0, | 697 | } |
1297 | 688 | ccflags='-fstack-protector', | 698 | ''', |
1298 | 689 | ldflags='-fstack-protector', | 699 | execute=0, |
1299 | 690 | mandatory=False, | 700 | cflags=[ '-Werror', '-Wp,-D_FORTIFY_SOURCE=2', stack_protect_flag], |
1300 | 691 | msg='Checking if toolchain accepts -fstack-protector'): | 701 | mandatory=False, |
1301 | 692 | conf.ADD_CFLAGS('-fstack-protector') | 702 | msg='Checking if compiler accepts %s' % (stack_protect_flag)) |
1302 | 693 | conf.ADD_LDFLAGS('-fstack-protector') | 703 | if flag_supported: |
1303 | 704 | conf.ADD_CFLAGS('%s' % (stack_protect_flag)) | ||
1304 | 705 | break | ||
1305 | 706 | |||
1306 | 707 | flag_supported = conf.check(fragment=''' | ||
1307 | 708 | #include <stdio.h> | ||
1308 | 709 | |||
1309 | 710 | int main(void) | ||
1310 | 711 | { | ||
1311 | 712 | char t[100000]; | ||
1312 | 713 | while (fgets(t, sizeof(t), stdin)); | ||
1313 | 714 | return 0; | ||
1314 | 715 | } | ||
1315 | 716 | ''', | ||
1316 | 717 | execute=0, | ||
1317 | 718 | cflags=[ '-Werror', '-fstack-clash-protection'], | ||
1318 | 719 | mandatory=False, | ||
1319 | 720 | msg='Checking if compiler accepts -fstack-clash-protection') | ||
1320 | 721 | if flag_supported: | ||
1321 | 722 | conf.ADD_CFLAGS('-fstack-clash-protection') | ||
1322 | 694 | 723 | ||
1323 | 695 | if Options.options.debug: | 724 | if Options.options.debug: |
1324 | 696 | conf.ADD_CFLAGS('-g', testflags=True) | 725 | conf.ADD_CFLAGS('-g', testflags=True) |
1325 | @@ -774,9 +803,12 @@ int main(void) { | |||
1326 | 774 | conf.env['EXTRA_LDFLAGS'].extend(conf.env['ADDITIONAL_LDFLAGS']) | 803 | conf.env['EXTRA_LDFLAGS'].extend(conf.env['ADDITIONAL_LDFLAGS']) |
1327 | 775 | 804 | ||
1328 | 776 | if path is None: | 805 | if path is None: |
1330 | 777 | conf.write_config_header('config.h', top=True) | 806 | conf.write_config_header('default/config.h', top=True, remove=False) |
1331 | 778 | else: | 807 | else: |
1333 | 779 | conf.write_config_header(path) | 808 | conf.write_config_header(os.path.join(conf.variant, path), remove=False) |
1334 | 809 | for key in conf.env.define_key: | ||
1335 | 810 | conf.undefine(key, from_env=False) | ||
1336 | 811 | conf.env.define_key = [] | ||
1337 | 780 | conf.SAMBA_CROSS_CHECK_COMPLETE() | 812 | conf.SAMBA_CROSS_CHECK_COMPLETE() |
1338 | 781 | 813 | ||
1339 | 782 | 814 | ||
1340 | @@ -863,9 +895,6 @@ def CHECK_CC_ENV(conf): | |||
1341 | 863 | The build farm sometimes puts a space at the start""" | 895 | The build farm sometimes puts a space at the start""" |
1342 | 864 | if os.environ.get('CC'): | 896 | if os.environ.get('CC'): |
1343 | 865 | conf.env.CC = TO_LIST(os.environ.get('CC')) | 897 | conf.env.CC = TO_LIST(os.environ.get('CC')) |
1344 | 866 | if len(conf.env.CC) == 1: | ||
1345 | 867 | # make for nicer logs if just a single command | ||
1346 | 868 | conf.env.CC = conf.env.CC[0] | ||
1347 | 869 | 898 | ||
1348 | 870 | 899 | ||
1349 | 871 | @conf | 900 | @conf |
1350 | @@ -875,7 +904,7 @@ def SETUP_CONFIGURE_CACHE(conf, enable): | |||
1351 | 875 | # when -C is chosen, we will use a private cache and will | 904 | # when -C is chosen, we will use a private cache and will |
1352 | 876 | # not look into system includes. This roughtly matches what | 905 | # not look into system includes. This roughtly matches what |
1353 | 877 | # autoconf does with -C | 906 | # autoconf does with -C |
1355 | 878 | cache_path = os.path.join(conf.blddir, '.confcache') | 907 | cache_path = os.path.join(conf.bldnode.abspath(), '.confcache') |
1356 | 879 | mkdir_p(cache_path) | 908 | mkdir_p(cache_path) |
1357 | 880 | Options.cache_global = os.environ['WAFCACHE'] = cache_path | 909 | Options.cache_global = os.environ['WAFCACHE'] = cache_path |
1358 | 881 | else: | 910 | else: |
1359 | @@ -899,6 +928,3 @@ def SAMBA_CHECK_UNDEFINED_SYMBOL_FLAGS(conf): | |||
1360 | 899 | if conf.CHECK_LDFLAGS(['-undefined', 'dynamic_lookup']): | 928 | if conf.CHECK_LDFLAGS(['-undefined', 'dynamic_lookup']): |
1361 | 900 | conf.env.undefined_ignore_ldflags = ['-undefined', 'dynamic_lookup'] | 929 | conf.env.undefined_ignore_ldflags = ['-undefined', 'dynamic_lookup'] |
1362 | 901 | 930 | ||
1363 | 902 | @conf | ||
1364 | 903 | def CHECK_CFG(self, *k, **kw): | ||
1365 | 904 | return self.check_cfg(*k, **kw) | ||
1366 | diff --git a/buildtools/wafsamba/samba_autoproto.py b/buildtools/wafsamba/samba_autoproto.py | |||
1367 | index b2b5233..ace434f 100644 | |||
1368 | --- a/buildtools/wafsamba/samba_autoproto.py | |||
1369 | +++ b/buildtools/wafsamba/samba_autoproto.py | |||
1370 | @@ -1,13 +1,13 @@ | |||
1371 | 1 | # waf build tool for building automatic prototypes from C source | 1 | # waf build tool for building automatic prototypes from C source |
1372 | 2 | 2 | ||
1373 | 3 | import os | 3 | import os |
1375 | 4 | import Build | 4 | from waflib import Build |
1376 | 5 | from samba_utils import SET_TARGET_TYPE, os_path_relpath | 5 | from samba_utils import SET_TARGET_TYPE, os_path_relpath |
1377 | 6 | 6 | ||
1378 | 7 | def SAMBA_AUTOPROTO(bld, header, source): | 7 | def SAMBA_AUTOPROTO(bld, header, source): |
1379 | 8 | '''rule for samba prototype generation''' | 8 | '''rule for samba prototype generation''' |
1380 | 9 | bld.SET_BUILD_GROUP('prototypes') | 9 | bld.SET_BUILD_GROUP('prototypes') |
1382 | 10 | relpath = os_path_relpath(bld.curdir, bld.srcnode.abspath()) | 10 | relpath = os_path_relpath(bld.path.abspath(), bld.srcnode.abspath()) |
1383 | 11 | name = os.path.join(relpath, header) | 11 | name = os.path.join(relpath, header) |
1384 | 12 | SET_TARGET_TYPE(bld, name, 'PROTOTYPE') | 12 | SET_TARGET_TYPE(bld, name, 'PROTOTYPE') |
1385 | 13 | t = bld( | 13 | t = bld( |
1386 | @@ -16,7 +16,7 @@ def SAMBA_AUTOPROTO(bld, header, source): | |||
1387 | 16 | target = header, | 16 | target = header, |
1388 | 17 | update_outputs=True, | 17 | update_outputs=True, |
1389 | 18 | ext_out='.c', | 18 | ext_out='.c', |
1391 | 19 | before ='cc', | 19 | before ='c', |
1392 | 20 | rule = '${PERL} "${SCRIPT}/mkproto.pl" --srcdir=.. --builddir=. --public=/dev/null --private="${TGT}" ${SRC}' | 20 | rule = '${PERL} "${SCRIPT}/mkproto.pl" --srcdir=.. --builddir=. --public=/dev/null --private="${TGT}" ${SRC}' |
1393 | 21 | ) | 21 | ) |
1394 | 22 | t.env.SCRIPT = os.path.join(bld.srcnode.abspath(), 'source4/script') | 22 | t.env.SCRIPT = os.path.join(bld.srcnode.abspath(), 'source4/script') |
1395 | diff --git a/buildtools/wafsamba/samba_bundled.py b/buildtools/wafsamba/samba_bundled.py | |||
1396 | index 253d604..60ce7da 100644 | |||
1397 | --- a/buildtools/wafsamba/samba_bundled.py | |||
1398 | +++ b/buildtools/wafsamba/samba_bundled.py | |||
1399 | @@ -1,9 +1,9 @@ | |||
1400 | 1 | # functions to support bundled libraries | 1 | # functions to support bundled libraries |
1401 | 2 | 2 | ||
1402 | 3 | import sys | 3 | import sys |
1406 | 4 | import Build, Options, Logs | 4 | from waflib import Build, Options, Logs |
1407 | 5 | from Configure import conf | 5 | from waflib.Configure import conf |
1408 | 6 | from samba_utils import TO_LIST | 6 | from wafsamba import samba_utils |
1409 | 7 | 7 | ||
1410 | 8 | def PRIVATE_NAME(bld, name, private_extension, private_library): | 8 | def PRIVATE_NAME(bld, name, private_extension, private_library): |
1411 | 9 | '''possibly rename a library to include a bundled extension''' | 9 | '''possibly rename a library to include a bundled extension''' |
1412 | @@ -51,19 +51,19 @@ Build.BuildContext.BUILTIN_LIBRARY = BUILTIN_LIBRARY | |||
1413 | 51 | 51 | ||
1414 | 52 | def BUILTIN_DEFAULT(opt, builtins): | 52 | def BUILTIN_DEFAULT(opt, builtins): |
1415 | 53 | '''set a comma separated default list of builtin libraries for this package''' | 53 | '''set a comma separated default list of builtin libraries for this package''' |
1417 | 54 | if 'BUILTIN_LIBRARIES_DEFAULT' in Options.options: | 54 | if 'BUILTIN_LIBRARIES_DEFAULT' in Options.options.__dict__: |
1418 | 55 | return | 55 | return |
1421 | 56 | Options.options['BUILTIN_LIBRARIES_DEFAULT'] = builtins | 56 | Options.options.__dict__['BUILTIN_LIBRARIES_DEFAULT'] = builtins |
1422 | 57 | Options.Handler.BUILTIN_DEFAULT = BUILTIN_DEFAULT | 57 | Options.OptionsContext.BUILTIN_DEFAULT = BUILTIN_DEFAULT |
1423 | 58 | 58 | ||
1424 | 59 | 59 | ||
1425 | 60 | def PRIVATE_EXTENSION_DEFAULT(opt, extension, noextension=''): | 60 | def PRIVATE_EXTENSION_DEFAULT(opt, extension, noextension=''): |
1426 | 61 | '''set a default private library extension''' | 61 | '''set a default private library extension''' |
1428 | 62 | if 'PRIVATE_EXTENSION_DEFAULT' in Options.options: | 62 | if 'PRIVATE_EXTENSION_DEFAULT' in Options.options.__dict__: |
1429 | 63 | return | 63 | return |
1433 | 64 | Options.options['PRIVATE_EXTENSION_DEFAULT'] = extension | 64 | Options.options.__dict__['PRIVATE_EXTENSION_DEFAULT'] = extension |
1434 | 65 | Options.options['PRIVATE_EXTENSION_EXCEPTION'] = noextension | 65 | Options.options.__dict__['PRIVATE_EXTENSION_EXCEPTION'] = noextension |
1435 | 66 | Options.Handler.PRIVATE_EXTENSION_DEFAULT = PRIVATE_EXTENSION_DEFAULT | 66 | Options.OptionsContext.PRIVATE_EXTENSION_DEFAULT = PRIVATE_EXTENSION_DEFAULT |
1436 | 67 | 67 | ||
1437 | 68 | 68 | ||
1438 | 69 | def minimum_library_version(conf, libname, default): | 69 | def minimum_library_version(conf, libname, default): |
1439 | @@ -139,7 +139,7 @@ def CHECK_BUNDLED_SYSTEM(conf, libname, minversion='0.0.0', | |||
1440 | 139 | # We always do a logic validation of 'onlyif' first | 139 | # We always do a logic validation of 'onlyif' first |
1441 | 140 | missing = [] | 140 | missing = [] |
1442 | 141 | if onlyif: | 141 | if onlyif: |
1444 | 142 | for l in TO_LIST(onlyif): | 142 | for l in samba_utils.TO_LIST(onlyif): |
1445 | 143 | f = 'FOUND_SYSTEMLIB_%s' % l | 143 | f = 'FOUND_SYSTEMLIB_%s' % l |
1446 | 144 | if not f in conf.env: | 144 | if not f in conf.env: |
1447 | 145 | Logs.error('ERROR: CHECK_BUNDLED_SYSTEM(%s) - ' % (libname) + | 145 | Logs.error('ERROR: CHECK_BUNDLED_SYSTEM(%s) - ' % (libname) + |
1448 | diff --git a/buildtools/wafsamba/samba_conftests.py b/buildtools/wafsamba/samba_conftests.py | |||
1449 | index b52727b..ef632ba 100644 | |||
1450 | --- a/buildtools/wafsamba/samba_conftests.py | |||
1451 | +++ b/buildtools/wafsamba/samba_conftests.py | |||
1452 | @@ -2,34 +2,35 @@ | |||
1453 | 2 | # to test for commonly needed configuration options | 2 | # to test for commonly needed configuration options |
1454 | 3 | 3 | ||
1455 | 4 | import os, shutil, re | 4 | import os, shutil, re |
1459 | 5 | import Build, Configure, Utils, Options, Logs | 5 | from waflib import Build, Configure, Utils, Options, Logs, Errors |
1460 | 6 | from Configure import conf | 6 | from waflib.Configure import conf |
1461 | 7 | from samba_utils import TO_LIST, ADD_LD_LIBRARY_PATH | 7 | from samba_utils import TO_LIST, ADD_LD_LIBRARY_PATH, get_string |
1462 | 8 | 8 | ||
1463 | 9 | 9 | ||
1464 | 10 | def add_option(self, *k, **kw): | 10 | def add_option(self, *k, **kw): |
1465 | 11 | '''syntax help: provide the "match" attribute to opt.add_option() so that folders can be added to specific config tests''' | 11 | '''syntax help: provide the "match" attribute to opt.add_option() so that folders can be added to specific config tests''' |
1466 | 12 | Options.OptionsContext.parser = self | ||
1467 | 12 | match = kw.get('match', []) | 13 | match = kw.get('match', []) |
1468 | 13 | if match: | 14 | if match: |
1469 | 14 | del kw['match'] | 15 | del kw['match'] |
1470 | 15 | opt = self.parser.add_option(*k, **kw) | 16 | opt = self.parser.add_option(*k, **kw) |
1471 | 16 | opt.match = match | 17 | opt.match = match |
1472 | 17 | return opt | 18 | return opt |
1474 | 18 | Options.Handler.add_option = add_option | 19 | Options.OptionsContext.add_option = add_option |
1475 | 19 | 20 | ||
1476 | 20 | @conf | 21 | @conf |
1477 | 21 | def check(self, *k, **kw): | 22 | def check(self, *k, **kw): |
1478 | 22 | '''Override the waf defaults to inject --with-directory options''' | 23 | '''Override the waf defaults to inject --with-directory options''' |
1479 | 23 | 24 | ||
1480 | 24 | if not 'env' in kw: | 25 | if not 'env' in kw: |
1482 | 25 | kw['env'] = self.env.copy() | 26 | kw['env'] = self.env.derive() |
1483 | 26 | 27 | ||
1484 | 27 | # match the configuration test with specific options, for example: | 28 | # match the configuration test with specific options, for example: |
1485 | 28 | # --with-libiconv -> Options.options.iconv_open -> "Checking for library iconv" | 29 | # --with-libiconv -> Options.options.iconv_open -> "Checking for library iconv" |
1486 | 29 | additional_dirs = [] | 30 | additional_dirs = [] |
1487 | 30 | if 'msg' in kw: | 31 | if 'msg' in kw: |
1488 | 31 | msg = kw['msg'] | 32 | msg = kw['msg'] |
1490 | 32 | for x in Options.Handler.parser.parser.option_list: | 33 | for x in Options.OptionsContext.parser.parser.option_list: |
1491 | 33 | if getattr(x, 'match', None) and msg in x.match: | 34 | if getattr(x, 'match', None) and msg in x.match: |
1492 | 34 | d = getattr(Options.options, x.dest, '') | 35 | d = getattr(Options.options, x.dest, '') |
1493 | 35 | if d: | 36 | if d: |
1494 | @@ -46,12 +47,12 @@ def check(self, *k, **kw): | |||
1495 | 46 | add_options_dir(additional_dirs, kw['env']) | 47 | add_options_dir(additional_dirs, kw['env']) |
1496 | 47 | 48 | ||
1497 | 48 | self.validate_c(kw) | 49 | self.validate_c(kw) |
1499 | 49 | self.check_message_1(kw['msg']) | 50 | self.start_msg(kw['msg']) |
1500 | 50 | ret = None | 51 | ret = None |
1501 | 51 | try: | 52 | try: |
1502 | 52 | ret = self.run_c_code(*k, **kw) | 53 | ret = self.run_c_code(*k, **kw) |
1503 | 53 | except Configure.ConfigurationError as e: | 54 | except Configure.ConfigurationError as e: |
1505 | 54 | self.check_message_2(kw['errmsg'], 'YELLOW') | 55 | self.end_msg(kw['errmsg'], 'YELLOW') |
1506 | 55 | if 'mandatory' in kw and kw['mandatory']: | 56 | if 'mandatory' in kw and kw['mandatory']: |
1507 | 56 | if Logs.verbose > 1: | 57 | if Logs.verbose > 1: |
1508 | 57 | raise | 58 | raise |
1509 | @@ -59,7 +60,7 @@ def check(self, *k, **kw): | |||
1510 | 59 | self.fatal('the configuration failed (see %r)' % self.log.name) | 60 | self.fatal('the configuration failed (see %r)' % self.log.name) |
1511 | 60 | else: | 61 | else: |
1512 | 61 | kw['success'] = ret | 62 | kw['success'] = ret |
1514 | 62 | self.check_message_2(self.ret_msg(kw['okmsg'], kw)) | 63 | self.end_msg(self.ret_msg(kw['okmsg'], kw)) |
1515 | 63 | 64 | ||
1516 | 64 | # success! keep the CPPPATH/LIBPATH | 65 | # success! keep the CPPPATH/LIBPATH |
1517 | 65 | add_options_dir(additional_dirs, self.env) | 66 | add_options_dir(additional_dirs, self.env) |
1518 | @@ -85,7 +86,7 @@ def CHECK_LARGEFILE(conf, define='HAVE_LARGEFILE'): | |||
1519 | 85 | '''see what we need for largefile support''' | 86 | '''see what we need for largefile support''' |
1520 | 86 | getconf_cflags = conf.CHECK_COMMAND(['getconf', 'LFS_CFLAGS']); | 87 | getconf_cflags = conf.CHECK_COMMAND(['getconf', 'LFS_CFLAGS']); |
1521 | 87 | if getconf_cflags is not False: | 88 | if getconf_cflags is not False: |
1523 | 88 | if (conf.CHECK_CODE('return !(sizeof(off_t) >= 8)', | 89 | if (conf.CHECK_CODE('if (sizeof(off_t) < 8) return 1', |
1524 | 89 | define='WORKING_GETCONF_LFS_CFLAGS', | 90 | define='WORKING_GETCONF_LFS_CFLAGS', |
1525 | 90 | execute=True, | 91 | execute=True, |
1526 | 91 | cflags=getconf_cflags, | 92 | cflags=getconf_cflags, |
1527 | @@ -100,13 +101,13 @@ def CHECK_LARGEFILE(conf, define='HAVE_LARGEFILE'): | |||
1528 | 100 | else: | 101 | else: |
1529 | 101 | conf.DEFINE(flag_split[0], flag_split[1]) | 102 | conf.DEFINE(flag_split[0], flag_split[1]) |
1530 | 102 | 103 | ||
1532 | 103 | if conf.CHECK_CODE('return !(sizeof(off_t) >= 8)', | 104 | if conf.CHECK_CODE('if (sizeof(off_t) < 8) return 1', |
1533 | 104 | define, | 105 | define, |
1534 | 105 | execute=True, | 106 | execute=True, |
1535 | 106 | msg='Checking for large file support without additional flags'): | 107 | msg='Checking for large file support without additional flags'): |
1536 | 107 | return True | 108 | return True |
1537 | 108 | 109 | ||
1539 | 109 | if conf.CHECK_CODE('return !(sizeof(off_t) >= 8)', | 110 | if conf.CHECK_CODE('if (sizeof(off_t) < 8) return 1', |
1540 | 110 | define, | 111 | define, |
1541 | 111 | execute=True, | 112 | execute=True, |
1542 | 112 | cflags='-D_FILE_OFFSET_BITS=64', | 113 | cflags='-D_FILE_OFFSET_BITS=64', |
1543 | @@ -114,7 +115,7 @@ def CHECK_LARGEFILE(conf, define='HAVE_LARGEFILE'): | |||
1544 | 114 | conf.DEFINE('_FILE_OFFSET_BITS', 64) | 115 | conf.DEFINE('_FILE_OFFSET_BITS', 64) |
1545 | 115 | return True | 116 | return True |
1546 | 116 | 117 | ||
1548 | 117 | if conf.CHECK_CODE('return !(sizeof(off_t) >= 8)', | 118 | if conf.CHECK_CODE('if (sizeof(off_t) < 8) return 1', |
1549 | 118 | define, | 119 | define, |
1550 | 119 | execute=True, | 120 | execute=True, |
1551 | 120 | cflags='-D_LARGE_FILES', | 121 | cflags='-D_LARGE_FILES', |
1552 | @@ -162,7 +163,7 @@ def find_config_dir(conf): | |||
1553 | 162 | '''find a directory to run tests in''' | 163 | '''find a directory to run tests in''' |
1554 | 163 | k = 0 | 164 | k = 0 |
1555 | 164 | while k < 10000: | 165 | while k < 10000: |
1557 | 165 | dir = os.path.join(conf.blddir, '.conf_check_%d' % k) | 166 | dir = os.path.join(conf.bldnode.abspath(), '.conf_check_%d' % k) |
1558 | 166 | try: | 167 | try: |
1559 | 167 | shutil.rmtree(dir) | 168 | shutil.rmtree(dir) |
1560 | 168 | except OSError: | 169 | except OSError: |
1561 | @@ -257,7 +258,8 @@ int foo(int v) { | |||
1562 | 257 | environ[0] = 1; | 258 | environ[0] = 1; |
1563 | 258 | ldb_module = PyImport_ImportModule("ldb"); | 259 | ldb_module = PyImport_ImportModule("ldb"); |
1564 | 259 | return v * 2; | 260 | return v * 2; |
1566 | 260 | }''' | 261 | } |
1567 | 262 | ''' | ||
1568 | 261 | return conf.check(features='c cshlib',uselib='PYEMBED',fragment=snip,msg=msg, mandatory=False) | 263 | return conf.check(features='c cshlib',uselib='PYEMBED',fragment=snip,msg=msg, mandatory=False) |
1569 | 262 | 264 | ||
1570 | 263 | # this one is quite complex, and should probably be broken up | 265 | # this one is quite complex, and should probably be broken up |
1571 | @@ -337,7 +339,8 @@ def CHECK_LIBRARY_SUPPORT(conf, rpath=False, version_script=False, msg=None): | |||
1572 | 337 | 339 | ||
1573 | 338 | # we need to run the program, try to get its result | 340 | # we need to run the program, try to get its result |
1574 | 339 | args = conf.SAMBA_CROSS_ARGS(msg=msg) | 341 | args = conf.SAMBA_CROSS_ARGS(msg=msg) |
1576 | 340 | proc = Utils.pproc.Popen([lastprog] + args, stdout=Utils.pproc.PIPE, stderr=Utils.pproc.PIPE) | 342 | proc = Utils.subprocess.Popen([lastprog] + args, |
1577 | 343 | stdout=Utils.subprocess.PIPE, stderr=Utils.subprocess.PIPE) | ||
1578 | 341 | (out, err) = proc.communicate() | 344 | (out, err) = proc.communicate() |
1579 | 342 | w = conf.log.write | 345 | w = conf.log.write |
1580 | 343 | w(str(out)) | 346 | w(str(out)) |
1581 | @@ -364,7 +367,7 @@ def CHECK_PERL_MANPAGE(conf, msg=None, section=None): | |||
1582 | 364 | else: | 367 | else: |
1583 | 365 | msg = "perl manpage generation" | 368 | msg = "perl manpage generation" |
1584 | 366 | 369 | ||
1586 | 367 | conf.check_message_1(msg) | 370 | conf.start_msg(msg) |
1587 | 368 | 371 | ||
1588 | 369 | dir = find_config_dir(conf) | 372 | dir = find_config_dir(conf) |
1589 | 370 | 373 | ||
1590 | @@ -381,28 +384,28 @@ WriteMakefile( | |||
1591 | 381 | """) | 384 | """) |
1592 | 382 | back = os.path.abspath('.') | 385 | back = os.path.abspath('.') |
1593 | 383 | os.chdir(bdir) | 386 | os.chdir(bdir) |
1597 | 384 | proc = Utils.pproc.Popen(['perl', 'Makefile.PL'], | 387 | proc = Utils.subprocess.Popen(['perl', 'Makefile.PL'], |
1598 | 385 | stdout=Utils.pproc.PIPE, | 388 | stdout=Utils.subprocess.PIPE, |
1599 | 386 | stderr=Utils.pproc.PIPE) | 389 | stderr=Utils.subprocess.PIPE) |
1600 | 387 | (out, err) = proc.communicate() | 390 | (out, err) = proc.communicate() |
1601 | 388 | os.chdir(back) | 391 | os.chdir(back) |
1602 | 389 | 392 | ||
1603 | 390 | ret = (proc.returncode == 0) | 393 | ret = (proc.returncode == 0) |
1604 | 391 | if not ret: | 394 | if not ret: |
1606 | 392 | conf.check_message_2('not found', color='YELLOW') | 395 | conf.end_msg('not found', color='YELLOW') |
1607 | 393 | return | 396 | return |
1608 | 394 | 397 | ||
1609 | 395 | if section: | 398 | if section: |
1610 | 396 | man = Utils.readf(os.path.join(bdir,'Makefile')) | 399 | man = Utils.readf(os.path.join(bdir,'Makefile')) |
1611 | 397 | m = re.search('MAN%sEXT\s+=\s+(\w+)' % section, man) | 400 | m = re.search('MAN%sEXT\s+=\s+(\w+)' % section, man) |
1612 | 398 | if not m: | 401 | if not m: |
1614 | 399 | conf.check_message_2('not found', color='YELLOW') | 402 | conf.end_msg('not found', color='YELLOW') |
1615 | 400 | return | 403 | return |
1616 | 401 | ext = m.group(1) | 404 | ext = m.group(1) |
1618 | 402 | conf.check_message_2(ext) | 405 | conf.end_msg(ext) |
1619 | 403 | return ext | 406 | return ext |
1620 | 404 | 407 | ||
1622 | 405 | conf.check_message_2('ok') | 408 | conf.end_msg('ok') |
1623 | 406 | return True | 409 | return True |
1624 | 407 | 410 | ||
1625 | 408 | 411 | ||
1626 | @@ -416,7 +419,7 @@ def CHECK_COMMAND(conf, cmd, msg=None, define=None, on_target=True, boolean=Fals | |||
1627 | 416 | if on_target: | 419 | if on_target: |
1628 | 417 | cmd.extend(conf.SAMBA_CROSS_ARGS(msg=msg)) | 420 | cmd.extend(conf.SAMBA_CROSS_ARGS(msg=msg)) |
1629 | 418 | try: | 421 | try: |
1631 | 419 | ret = Utils.cmd_output(cmd) | 422 | ret = get_string(Utils.cmd_output(cmd)) |
1632 | 420 | except: | 423 | except: |
1633 | 421 | conf.COMPOUND_END(False) | 424 | conf.COMPOUND_END(False) |
1634 | 422 | return False | 425 | return False |
1635 | @@ -461,7 +464,7 @@ def CHECK_INLINE(conf): | |||
1636 | 461 | ret = conf.CHECK_CODE(''' | 464 | ret = conf.CHECK_CODE(''' |
1637 | 462 | typedef int foo_t; | 465 | typedef int foo_t; |
1638 | 463 | static %s foo_t static_foo () {return 0; } | 466 | static %s foo_t static_foo () {return 0; } |
1640 | 464 | %s foo_t foo () {return 0; }''' % (i, i), | 467 | %s foo_t foo () {return 0; }\n''' % (i, i), |
1641 | 465 | define='INLINE_MACRO', | 468 | define='INLINE_MACRO', |
1642 | 466 | addmain=False, | 469 | addmain=False, |
1643 | 467 | link=False) | 470 | link=False) |
1644 | @@ -486,13 +489,13 @@ def CHECK_XSLTPROC_MANPAGES(conf): | |||
1645 | 486 | return False | 489 | return False |
1646 | 487 | 490 | ||
1647 | 488 | s='http://docbook.sourceforge.net/release/xsl/current/manpages/docbook.xsl' | 491 | s='http://docbook.sourceforge.net/release/xsl/current/manpages/docbook.xsl' |
1649 | 489 | conf.CHECK_COMMAND('%s --nonet %s 2> /dev/null' % (conf.env.XSLTPROC, s), | 492 | conf.CHECK_COMMAND('%s --nonet %s 2> /dev/null' % (conf.env.get_flat('XSLTPROC'), s), |
1650 | 490 | msg='Checking for stylesheet %s' % s, | 493 | msg='Checking for stylesheet %s' % s, |
1651 | 491 | define='XSLTPROC_MANPAGES', on_target=False, | 494 | define='XSLTPROC_MANPAGES', on_target=False, |
1652 | 492 | boolean=True) | 495 | boolean=True) |
1653 | 493 | if not conf.CONFIG_SET('XSLTPROC_MANPAGES'): | 496 | if not conf.CONFIG_SET('XSLTPROC_MANPAGES'): |
1656 | 494 | print "A local copy of the docbook.xsl wasn't found on your system" \ | 497 | print("A local copy of the docbook.xsl wasn't found on your system" \ |
1657 | 495 | " consider installing package like docbook-xsl" | 498 | " consider installing package like docbook-xsl") |
1658 | 496 | 499 | ||
1659 | 497 | # | 500 | # |
1660 | 498 | # Determine the standard libpath for the used compiler, | 501 | # Determine the standard libpath for the used compiler, |
1661 | @@ -506,12 +509,12 @@ def CHECK_STANDARD_LIBPATH(conf): | |||
1662 | 506 | # at least gcc and clang support this: | 509 | # at least gcc and clang support this: |
1663 | 507 | try: | 510 | try: |
1664 | 508 | cmd = conf.env.CC + ['-print-search-dirs'] | 511 | cmd = conf.env.CC + ['-print-search-dirs'] |
1666 | 509 | out = Utils.cmd_output(cmd).split('\n') | 512 | out = get_string(Utils.cmd_output(cmd)).split('\n') |
1667 | 510 | except ValueError: | 513 | except ValueError: |
1668 | 511 | # option not supported by compiler - use a standard list of directories | 514 | # option not supported by compiler - use a standard list of directories |
1669 | 512 | dirlist = [ '/usr/lib', '/usr/lib64' ] | 515 | dirlist = [ '/usr/lib', '/usr/lib64' ] |
1670 | 513 | except: | 516 | except: |
1672 | 514 | raise Utils.WafError('Unexpected error running "%s"' % (cmd)) | 517 | raise Errors.WafError('Unexpected error running "%s"' % (cmd)) |
1673 | 515 | else: | 518 | else: |
1674 | 516 | dirlist = [] | 519 | dirlist = [] |
1675 | 517 | for line in out: | 520 | for line in out: |
1676 | diff --git a/buildtools/wafsamba/samba_cross.py b/buildtools/wafsamba/samba_cross.py | |||
1677 | index b8f2000..8863c2c 100644 | |||
1678 | --- a/buildtools/wafsamba/samba_cross.py | |||
1679 | +++ b/buildtools/wafsamba/samba_cross.py | |||
1680 | @@ -1,8 +1,9 @@ | |||
1681 | 1 | # functions for handling cross-compilation | 1 | # functions for handling cross-compilation |
1682 | 2 | 2 | ||
1683 | 3 | import os, sys, re, shlex | 3 | import os, sys, re, shlex |
1686 | 4 | import Utils, Logs, Options | 4 | from waflib import Utils, Logs, Options, Errors, Context |
1687 | 5 | from Configure import conf | 5 | from waflib.Configure import conf |
1688 | 6 | from wafsamba import samba_utils | ||
1689 | 6 | 7 | ||
1690 | 7 | real_Popen = None | 8 | real_Popen = None |
1691 | 8 | 9 | ||
1692 | @@ -81,12 +82,12 @@ def cross_answer(ca_file, msg): | |||
1693 | 81 | f.close() | 82 | f.close() |
1694 | 82 | return (int(m.group(1)), m.group(2)) | 83 | return (int(m.group(1)), m.group(2)) |
1695 | 83 | else: | 84 | else: |
1697 | 84 | raise Utils.WafError("Bad answer format '%s' in %s" % (line, ca_file)) | 85 | raise Errors.WafError("Bad answer format '%s' in %s" % (line, ca_file)) |
1698 | 85 | f.close() | 86 | f.close() |
1699 | 86 | return ANSWER_UNKNOWN | 87 | return ANSWER_UNKNOWN |
1700 | 87 | 88 | ||
1701 | 88 | 89 | ||
1703 | 89 | class cross_Popen(Utils.pproc.Popen): | 90 | class cross_Popen(Utils.subprocess.Popen): |
1704 | 90 | '''cross-compilation wrapper for Popen''' | 91 | '''cross-compilation wrapper for Popen''' |
1705 | 91 | def __init__(*k, **kw): | 92 | def __init__(*k, **kw): |
1706 | 92 | (obj, args) = k | 93 | (obj, args) = k |
1707 | @@ -118,10 +119,10 @@ class cross_Popen(Utils.pproc.Popen): | |||
1708 | 118 | newargs.extend(args[0:i]) | 119 | newargs.extend(args[0:i]) |
1709 | 119 | if use_answers: | 120 | if use_answers: |
1710 | 120 | p = real_Popen(newargs, | 121 | p = real_Popen(newargs, |
1713 | 121 | stdout=Utils.pproc.PIPE, | 122 | stdout=Utils.subprocess.PIPE, |
1714 | 122 | stderr=Utils.pproc.PIPE) | 123 | stderr=Utils.subprocess.PIPE) |
1715 | 123 | ce_out, ce_err = p.communicate() | 124 | ce_out, ce_err = p.communicate() |
1717 | 124 | ans = (p.returncode, ce_out) | 125 | ans = (p.returncode, samba_utils.get_string(ce_out)) |
1718 | 125 | add_answer(ca_file, msg, ans) | 126 | add_answer(ca_file, msg, ans) |
1719 | 126 | else: | 127 | else: |
1720 | 127 | args = newargs | 128 | args = newargs |
1721 | @@ -144,8 +145,8 @@ def SAMBA_CROSS_ARGS(conf, msg=None): | |||
1722 | 144 | 145 | ||
1723 | 145 | global real_Popen | 146 | global real_Popen |
1724 | 146 | if real_Popen is None: | 147 | if real_Popen is None: |
1727 | 147 | real_Popen = Utils.pproc.Popen | 148 | real_Popen = Utils.subprocess.Popen |
1728 | 148 | Utils.pproc.Popen = cross_Popen | 149 | Utils.subprocess.Popen = cross_Popen |
1729 | 149 | 150 | ||
1730 | 150 | ret = [] | 151 | ret = [] |
1731 | 151 | 152 | ||
1732 | @@ -154,11 +155,11 @@ def SAMBA_CROSS_ARGS(conf, msg=None): | |||
1733 | 154 | 155 | ||
1734 | 155 | if conf.env.CROSS_ANSWERS: | 156 | if conf.env.CROSS_ANSWERS: |
1735 | 156 | if msg is None: | 157 | if msg is None: |
1738 | 157 | raise Utils.WafError("Cannot have NULL msg in cross-answers") | 158 | raise Errors.WafError("Cannot have NULL msg in cross-answers") |
1739 | 158 | ret.extend(['--cross-answers', os.path.join(Options.launch_dir, conf.env.CROSS_ANSWERS), msg]) | 159 | ret.extend(['--cross-answers', os.path.join(Context.launch_dir, conf.env.CROSS_ANSWERS), msg]) |
1740 | 159 | 160 | ||
1741 | 160 | if ret == []: | 161 | if ret == []: |
1743 | 161 | raise Utils.WafError("Cannot cross-compile without either --cross-execute or --cross-answers") | 162 | raise Errors.WafError("Cannot cross-compile without either --cross-execute or --cross-answers") |
1744 | 162 | 163 | ||
1745 | 163 | return ret | 164 | return ret |
1746 | 164 | 165 | ||
1747 | @@ -167,5 +168,5 @@ def SAMBA_CROSS_CHECK_COMPLETE(conf): | |||
1748 | 167 | '''check if we have some unanswered questions''' | 168 | '''check if we have some unanswered questions''' |
1749 | 168 | global cross_answers_incomplete | 169 | global cross_answers_incomplete |
1750 | 169 | if conf.env.CROSS_COMPILE and cross_answers_incomplete: | 170 | if conf.env.CROSS_COMPILE and cross_answers_incomplete: |
1752 | 170 | raise Utils.WafError("Cross answers file %s is incomplete" % conf.env.CROSS_ANSWERS) | 171 | raise Errors.WafError("Cross answers file %s is incomplete" % conf.env.CROSS_ANSWERS) |
1753 | 171 | return True | 172 | return True |
1754 | diff --git a/buildtools/wafsamba/samba_deps.py b/buildtools/wafsamba/samba_deps.py | |||
1755 | index 978a5e9..f8c3880 100644 | |||
1756 | --- a/buildtools/wafsamba/samba_deps.py | |||
1757 | +++ b/buildtools/wafsamba/samba_deps.py | |||
1758 | @@ -2,9 +2,10 @@ | |||
1759 | 2 | 2 | ||
1760 | 3 | import os, sys, re, time | 3 | import os, sys, re, time |
1761 | 4 | 4 | ||
1765 | 5 | import Build, Environment, Options, Logs, Utils | 5 | from waflib import Build, Options, Logs, Utils, Errors |
1766 | 6 | from Logs import debug | 6 | from waflib.Logs import debug |
1767 | 7 | from Configure import conf | 7 | from waflib.Configure import conf |
1768 | 8 | from waflib import ConfigSet | ||
1769 | 8 | 9 | ||
1770 | 9 | from samba_bundled import BUILTIN_LIBRARY | 10 | from samba_bundled import BUILTIN_LIBRARY |
1771 | 10 | from samba_utils import LOCAL_CACHE, TO_LIST, get_tgt_list, unique_list, os_path_relpath | 11 | from samba_utils import LOCAL_CACHE, TO_LIST, get_tgt_list, unique_list, os_path_relpath |
1772 | @@ -85,7 +86,7 @@ def build_dependencies(self): | |||
1773 | 85 | # extra link flags from pkg_config | 86 | # extra link flags from pkg_config |
1774 | 86 | libs = self.final_syslibs.copy() | 87 | libs = self.final_syslibs.copy() |
1775 | 87 | 88 | ||
1777 | 88 | (ccflags, ldflags, cpppath) = library_flags(self, list(libs)) | 89 | (cflags, ldflags, cpppath) = library_flags(self, list(libs)) |
1778 | 89 | new_ldflags = getattr(self, 'samba_ldflags', [])[:] | 90 | new_ldflags = getattr(self, 'samba_ldflags', [])[:] |
1779 | 90 | new_ldflags.extend(ldflags) | 91 | new_ldflags.extend(ldflags) |
1780 | 91 | self.ldflags = new_ldflags | 92 | self.ldflags = new_ldflags |
1781 | @@ -102,7 +103,7 @@ def build_dependencies(self): | |||
1782 | 102 | self.sname, self.uselib, self.uselib_local, self.add_objects) | 103 | self.sname, self.uselib, self.uselib_local, self.add_objects) |
1783 | 103 | 104 | ||
1784 | 104 | if self.samba_type in ['SUBSYSTEM']: | 105 | if self.samba_type in ['SUBSYSTEM']: |
1786 | 105 | # this is needed for the ccflags of libs that come from pkg_config | 106 | # this is needed for the cflags of libs that come from pkg_config |
1787 | 106 | self.uselib = list(self.final_syslibs) | 107 | self.uselib = list(self.final_syslibs) |
1788 | 107 | self.uselib.extend(list(self.direct_syslibs)) | 108 | self.uselib.extend(list(self.direct_syslibs)) |
1789 | 108 | for lib in self.final_libs: | 109 | for lib in self.final_libs: |
1790 | @@ -235,7 +236,7 @@ def add_init_functions(self): | |||
1791 | 235 | if sentinel == 'NULL': | 236 | if sentinel == 'NULL': |
1792 | 236 | proto = "extern void __%s_dummy_module_proto(void)" % (sname) | 237 | proto = "extern void __%s_dummy_module_proto(void)" % (sname) |
1793 | 237 | cflags.append('-DSTATIC_%s_MODULES_PROTO=%s' % (sname, proto)) | 238 | cflags.append('-DSTATIC_%s_MODULES_PROTO=%s' % (sname, proto)) |
1795 | 238 | self.ccflags = cflags | 239 | self.cflags = cflags |
1796 | 239 | return | 240 | return |
1797 | 240 | 241 | ||
1798 | 241 | for m in modules: | 242 | for m in modules: |
1799 | @@ -257,7 +258,7 @@ def add_init_functions(self): | |||
1800 | 257 | proto += '_MODULE_PROTO(%s)' % f | 258 | proto += '_MODULE_PROTO(%s)' % f |
1801 | 258 | proto += "extern void __%s_dummy_module_proto(void)" % (m) | 259 | proto += "extern void __%s_dummy_module_proto(void)" % (m) |
1802 | 259 | cflags.append('-DSTATIC_%s_MODULES_PROTO=%s' % (m, proto)) | 260 | cflags.append('-DSTATIC_%s_MODULES_PROTO=%s' % (m, proto)) |
1804 | 260 | self.ccflags = cflags | 261 | self.cflags = cflags |
1805 | 261 | 262 | ||
1806 | 262 | 263 | ||
1807 | 263 | def check_duplicate_sources(bld, tgt_list): | 264 | def check_duplicate_sources(bld, tgt_list): |
1808 | @@ -271,6 +272,9 @@ def check_duplicate_sources(bld, tgt_list): | |||
1809 | 271 | tpath = os.path.normpath(os_path_relpath(t.path.abspath(bld.env), t.env.BUILD_DIRECTORY + '/default')) | 272 | tpath = os.path.normpath(os_path_relpath(t.path.abspath(bld.env), t.env.BUILD_DIRECTORY + '/default')) |
1810 | 272 | obj_sources = set() | 273 | obj_sources = set() |
1811 | 273 | for s in source_list: | 274 | for s in source_list: |
1812 | 275 | if not isinstance(s, str): | ||
1813 | 276 | print('strange path in check_duplicate_sources %r' % s) | ||
1814 | 277 | s = s.abspath() | ||
1815 | 274 | p = os.path.normpath(os.path.join(tpath, s)) | 278 | p = os.path.normpath(os.path.join(tpath, s)) |
1816 | 275 | if p in obj_sources: | 279 | if p in obj_sources: |
1817 | 276 | Logs.error("ERROR: source %s appears twice in target '%s'" % (p, t.sname)) | 280 | Logs.error("ERROR: source %s appears twice in target '%s'" % (p, t.sname)) |
1818 | @@ -299,7 +303,7 @@ def check_duplicate_sources(bld, tgt_list): | |||
1819 | 299 | Logs.warn("WARNING: source %s is in more than one target: %s" % (s, subsystems[s].keys())) | 303 | Logs.warn("WARNING: source %s is in more than one target: %s" % (s, subsystems[s].keys())) |
1820 | 300 | for tname in subsystems[s]: | 304 | for tname in subsystems[s]: |
1821 | 301 | if len(subsystems[s][tname]) > 1: | 305 | if len(subsystems[s][tname]) > 1: |
1823 | 302 | raise Utils.WafError("ERROR: source %s is in more than one subsystem of target '%s': %s" % (s, tname, subsystems[s][tname])) | 306 | raise Errors.WafError("ERROR: source %s is in more than one subsystem of target '%s': %s" % (s, tname, subsystems[s][tname])) |
1824 | 303 | 307 | ||
1825 | 304 | return True | 308 | return True |
1826 | 305 | 309 | ||
1827 | @@ -372,7 +376,7 @@ def add_samba_attributes(bld, tgt_list): | |||
1828 | 372 | t.samba_abspath = t.path.abspath(bld.env) | 376 | t.samba_abspath = t.path.abspath(bld.env) |
1829 | 373 | t.samba_deps_extended = t.samba_deps[:] | 377 | t.samba_deps_extended = t.samba_deps[:] |
1830 | 374 | t.samba_includes_extended = TO_LIST(t.samba_includes)[:] | 378 | t.samba_includes_extended = TO_LIST(t.samba_includes)[:] |
1832 | 375 | t.ccflags = getattr(t, 'samba_cflags', '') | 379 | t.cflags = getattr(t, 'samba_cflags', '') |
1833 | 376 | 380 | ||
1834 | 377 | def replace_grouping_libraries(bld, tgt_list): | 381 | def replace_grouping_libraries(bld, tgt_list): |
1835 | 378 | '''replace dependencies based on grouping libraries | 382 | '''replace dependencies based on grouping libraries |
1836 | @@ -715,6 +719,11 @@ def reduce_objects(bld, tgt_list): | |||
1837 | 715 | if t.sname in rely_on: | 719 | if t.sname in rely_on: |
1838 | 716 | dup = dup.difference(rely_on[t.sname]) | 720 | dup = dup.difference(rely_on[t.sname]) |
1839 | 717 | if dup: | 721 | if dup: |
1840 | 722 | # Do not remove duplicates of BUILTINS | ||
1841 | 723 | d = next(iter(dup)) | ||
1842 | 724 | if BUILTIN_LIBRARY(bld, d): | ||
1843 | 725 | continue | ||
1844 | 726 | |||
1845 | 718 | debug('deps: removing dups from %s of type %s: %s also in %s %s', | 727 | debug('deps: removing dups from %s of type %s: %s also in %s %s', |
1846 | 719 | t.sname, t.samba_type, dup, t2.samba_type, l) | 728 | t.sname, t.samba_type, dup, t2.samba_type, l) |
1847 | 720 | new = new.difference(dup) | 729 | new = new.difference(dup) |
1848 | @@ -951,7 +960,7 @@ savedeps_inputs = ['samba_deps', 'samba_includes', 'local_include', 'local_incl | |||
1849 | 951 | 'source', 'grouping_library', 'samba_ldflags', 'allow_undefined_symbols', | 960 | 'source', 'grouping_library', 'samba_ldflags', 'allow_undefined_symbols', |
1850 | 952 | 'use_global_deps', 'global_include' ] | 961 | 'use_global_deps', 'global_include' ] |
1851 | 953 | savedeps_outputs = ['uselib', 'uselib_local', 'add_objects', 'includes', | 962 | savedeps_outputs = ['uselib', 'uselib_local', 'add_objects', 'includes', |
1853 | 954 | 'ccflags', 'ldflags', 'samba_deps_extended', 'final_libs'] | 963 | 'cflags', 'ldflags', 'samba_deps_extended', 'final_libs'] |
1854 | 955 | savedeps_outenv = ['INC_PATHS'] | 964 | savedeps_outenv = ['INC_PATHS'] |
1855 | 956 | savedeps_envvars = ['NONSHARED_BINARIES', 'GLOBAL_DEPENDENCIES', 'EXTRA_CFLAGS', 'EXTRA_LDFLAGS', 'EXTRA_INCLUDES' ] | 965 | savedeps_envvars = ['NONSHARED_BINARIES', 'GLOBAL_DEPENDENCIES', 'EXTRA_CFLAGS', 'EXTRA_LDFLAGS', 'EXTRA_INCLUDES' ] |
1856 | 957 | savedeps_caches = ['GLOBAL_DEPENDENCIES', 'TARGET_TYPE', 'INIT_FUNCTIONS', 'SYSLIB_DEPS'] | 966 | savedeps_caches = ['GLOBAL_DEPENDENCIES', 'TARGET_TYPE', 'INIT_FUNCTIONS', 'SYSLIB_DEPS'] |
1857 | @@ -960,7 +969,7 @@ savedeps_files = ['buildtools/wafsamba/samba_deps.py'] | |||
1858 | 960 | def save_samba_deps(bld, tgt_list): | 969 | def save_samba_deps(bld, tgt_list): |
1859 | 961 | '''save the dependency calculations between builds, to make | 970 | '''save the dependency calculations between builds, to make |
1860 | 962 | further builds faster''' | 971 | further builds faster''' |
1862 | 963 | denv = Environment.Environment() | 972 | denv = ConfigSet.ConfigSet() |
1863 | 964 | 973 | ||
1864 | 965 | denv.version = savedeps_version | 974 | denv.version = savedeps_version |
1865 | 966 | denv.savedeps_inputs = savedeps_inputs | 975 | denv.savedeps_inputs = savedeps_inputs |
1866 | @@ -1007,15 +1016,15 @@ def save_samba_deps(bld, tgt_list): | |||
1867 | 1007 | if tdeps != {}: | 1016 | if tdeps != {}: |
1868 | 1008 | denv.outenv[t.sname] = tdeps | 1017 | denv.outenv[t.sname] = tdeps |
1869 | 1009 | 1018 | ||
1871 | 1010 | depsfile = os.path.join(bld.bdir, "sambadeps") | 1019 | depsfile = os.path.join(bld.cache_dir, "sambadeps") |
1872 | 1011 | denv.store_fast(depsfile) | 1020 | denv.store_fast(depsfile) |
1873 | 1012 | 1021 | ||
1874 | 1013 | 1022 | ||
1875 | 1014 | 1023 | ||
1876 | 1015 | def load_samba_deps(bld, tgt_list): | 1024 | def load_samba_deps(bld, tgt_list): |
1877 | 1016 | '''load a previous set of build dependencies if possible''' | 1025 | '''load a previous set of build dependencies if possible''' |
1880 | 1017 | depsfile = os.path.join(bld.bdir, "sambadeps") | 1026 | depsfile = os.path.join(bld.cache_dir, "sambadeps") |
1881 | 1018 | denv = Environment.Environment() | 1027 | denv = ConfigSet.ConfigSet() |
1882 | 1019 | try: | 1028 | try: |
1883 | 1020 | debug('deps: checking saved dependencies') | 1029 | debug('deps: checking saved dependencies') |
1884 | 1021 | denv.load_fast(depsfile) | 1030 | denv.load_fast(depsfile) |
1885 | diff --git a/buildtools/wafsamba/samba_dist.py b/buildtools/wafsamba/samba_dist.py | |||
1886 | index 8d51632..6af7bb4 100644 | |||
1887 | --- a/buildtools/wafsamba/samba_dist.py | |||
1888 | +++ b/buildtools/wafsamba/samba_dist.py | |||
1889 | @@ -2,13 +2,41 @@ | |||
1890 | 2 | # uses git ls-files to get file lists | 2 | # uses git ls-files to get file lists |
1891 | 3 | 3 | ||
1892 | 4 | import os, sys, tarfile | 4 | import os, sys, tarfile |
1896 | 5 | import Utils, Scripting, Logs, Options | 5 | from waflib import Utils, Scripting, Logs, Options |
1897 | 6 | from Configure import conf | 6 | from waflib.Configure import conf |
1898 | 7 | from samba_utils import os_path_relpath | 7 | from samba_utils import os_path_relpath, get_string |
1899 | 8 | from waflib import Context | ||
1900 | 8 | 9 | ||
1901 | 9 | dist_dirs = None | 10 | dist_dirs = None |
1902 | 10 | dist_files = None | 11 | dist_files = None |
1903 | 11 | dist_blacklist = "" | 12 | dist_blacklist = "" |
1904 | 13 | dist_archive = None | ||
1905 | 14 | |||
1906 | 15 | class Dist(Context.Context): | ||
1907 | 16 | # TODO remove | ||
1908 | 17 | cmd = 'dist' | ||
1909 | 18 | fun = 'dist' | ||
1910 | 19 | def execute(self): | ||
1911 | 20 | Context.g_module.dist() | ||
1912 | 21 | |||
1913 | 22 | class DistCheck(Scripting.DistCheck): | ||
1914 | 23 | fun = 'distcheck' | ||
1915 | 24 | cmd = 'distcheck' | ||
1916 | 25 | def execute(self): | ||
1917 | 26 | Options.options.distcheck_args = '' | ||
1918 | 27 | if Context.g_module.distcheck is Scripting.distcheck: | ||
1919 | 28 | # default | ||
1920 | 29 | Context.g_module.distcheck(self) | ||
1921 | 30 | else: | ||
1922 | 31 | Context.g_module.distcheck() | ||
1923 | 32 | Context.g_module.dist() | ||
1924 | 33 | self.check() | ||
1925 | 34 | def get_arch_name(self): | ||
1926 | 35 | global dist_archive | ||
1927 | 36 | return dist_archive | ||
1928 | 37 | def make_distcheck_cmd(self, tmpdir): | ||
1929 | 38 | waf = os.path.abspath(sys.argv[0]) | ||
1930 | 39 | return [sys.executable, waf, 'configure', 'build', 'install', 'uninstall', '--destdir=' + tmpdir] | ||
1931 | 12 | 40 | ||
1932 | 13 | def add_symlink(tar, fname, abspath, basedir): | 41 | def add_symlink(tar, fname, abspath, basedir): |
1933 | 14 | '''handle symlinks to directories that may move during packaging''' | 42 | '''handle symlinks to directories that may move during packaging''' |
1934 | @@ -69,7 +97,7 @@ def add_tarfile(tar, fname, abspath, basedir): | |||
1935 | 69 | tinfo.gid = 0 | 97 | tinfo.gid = 0 |
1936 | 70 | tinfo.uname = 'root' | 98 | tinfo.uname = 'root' |
1937 | 71 | tinfo.gname = 'root' | 99 | tinfo.gname = 'root' |
1939 | 72 | fh = open(abspath) | 100 | fh = open(abspath, "rb") |
1940 | 73 | tar.addfile(tinfo, fileobj=fh) | 101 | tar.addfile(tinfo, fileobj=fh) |
1941 | 74 | fh.close() | 102 | fh.close() |
1942 | 75 | 103 | ||
1943 | @@ -91,7 +119,7 @@ def vcs_dir_contents(path): | |||
1944 | 91 | repo = os.path.dirname(repo) | 119 | repo = os.path.dirname(repo) |
1945 | 92 | if repo == "/": | 120 | if repo == "/": |
1946 | 93 | raise Exception("unsupported or no vcs for %s" % path) | 121 | raise Exception("unsupported or no vcs for %s" % path) |
1948 | 94 | return Utils.cmd_output(ls_files_cmd, cwd=cwd, env=env).split() | 122 | return get_string(Utils.cmd_output(ls_files_cmd, cwd=cwd, env=env)).split('\n') |
1949 | 95 | 123 | ||
1950 | 96 | 124 | ||
1951 | 97 | def dist(appname='', version=''): | 125 | def dist(appname='', version=''): |
1952 | @@ -136,12 +164,14 @@ def dist(appname='', version=''): | |||
1953 | 136 | 164 | ||
1954 | 137 | if not isinstance(appname, str) or not appname: | 165 | if not isinstance(appname, str) or not appname: |
1955 | 138 | # this copes with a mismatch in the calling arguments for dist() | 166 | # this copes with a mismatch in the calling arguments for dist() |
1958 | 139 | appname = Utils.g_module.APPNAME | 167 | appname = Context.g_module.APPNAME |
1959 | 140 | version = Utils.g_module.VERSION | 168 | version = Context.g_module.VERSION |
1960 | 141 | if not version: | 169 | if not version: |
1962 | 142 | version = Utils.g_module.VERSION | 170 | version = Context.g_module.VERSION |
1963 | 143 | 171 | ||
1965 | 144 | srcdir = os.path.normpath(os.path.join(os.path.dirname(Utils.g_module.root_path), Utils.g_module.srcdir)) | 172 | srcdir = os.path.normpath( |
1966 | 173 | os.path.join(os.path.dirname(Context.g_module.root_path), | ||
1967 | 174 | Context.g_module.top)) | ||
1968 | 145 | 175 | ||
1969 | 146 | if not dist_dirs: | 176 | if not dist_dirs: |
1970 | 147 | Logs.error('You must use samba_dist.DIST_DIRS() to set which directories to package') | 177 | Logs.error('You must use samba_dist.DIST_DIRS() to set which directories to package') |
1971 | @@ -218,6 +248,9 @@ def dist(appname='', version=''): | |||
1972 | 218 | else: | 248 | else: |
1973 | 219 | Logs.info('Created %s' % dist_name) | 249 | Logs.info('Created %s' % dist_name) |
1974 | 220 | 250 | ||
1975 | 251 | # TODO use the ctx object instead | ||
1976 | 252 | global dist_archive | ||
1977 | 253 | dist_archive = dist_name | ||
1978 | 221 | return dist_name | 254 | return dist_name |
1979 | 222 | 255 | ||
1980 | 223 | 256 | ||
1981 | diff --git a/buildtools/wafsamba/samba_git.py b/buildtools/wafsamba/samba_git.py | |||
1982 | index c58a579..09a204f 100644 | |||
1983 | --- a/buildtools/wafsamba/samba_git.py | |||
1984 | +++ b/buildtools/wafsamba/samba_git.py | |||
1985 | @@ -4,7 +4,7 @@ import subprocess | |||
1986 | 4 | def find_git(env=None): | 4 | def find_git(env=None): |
1987 | 5 | """Find the git binary.""" | 5 | """Find the git binary.""" |
1988 | 6 | if env is not None and 'GIT' in env: | 6 | if env is not None and 'GIT' in env: |
1990 | 7 | return env['GIT'] | 7 | return env.get_flat('GIT') |
1991 | 8 | 8 | ||
1992 | 9 | # Get version from GIT | 9 | # Get version from GIT |
1993 | 10 | if os.path.exists("/usr/bin/git"): | 10 | if os.path.exists("/usr/bin/git"): |
1994 | diff --git a/buildtools/wafsamba/samba_headers.py b/buildtools/wafsamba/samba_headers.py | |||
1995 | index 0a80082..a268c01 100644 | |||
1996 | --- a/buildtools/wafsamba/samba_headers.py | |||
1997 | +++ b/buildtools/wafsamba/samba_headers.py | |||
1998 | @@ -1,7 +1,7 @@ | |||
1999 | 1 | # specialist handling of header files for Samba | 1 | # specialist handling of header files for Samba |
2000 | 2 | 2 | ||
2001 | 3 | import os, re, sys, fnmatch | 3 | import os, re, sys, fnmatch |
2003 | 4 | import Build, Logs, Utils | 4 | from waflib import Build, Logs, Utils, Errors |
2004 | 5 | from samba_utils import TO_LIST, os_path_relpath | 5 | from samba_utils import TO_LIST, os_path_relpath |
2005 | 6 | 6 | ||
2006 | 7 | 7 | ||
2007 | @@ -99,7 +99,7 @@ def create_public_header(task): | |||
2008 | 99 | os.unlink(tgt) | 99 | os.unlink(tgt) |
2009 | 100 | sys.stderr.write("%s:%u:Error: unable to resolve public header %s (maybe try one of %s)\n" % ( | 100 | sys.stderr.write("%s:%u:Error: unable to resolve public header %s (maybe try one of %s)\n" % ( |
2010 | 101 | os.path.relpath(src, os.getcwd()), linenumber, hpath, suggested)) | 101 | os.path.relpath(src, os.getcwd()), linenumber, hpath, suggested)) |
2012 | 102 | raise Utils.WafError("Unable to resolve header path '%s' in public header '%s' in directory %s" % ( | 102 | raise Errors.WafError("Unable to resolve header path '%s' in public header '%s' in directory %s" % ( |
2013 | 103 | hpath, relsrc, task.env.RELPATH)) | 103 | hpath, relsrc, task.env.RELPATH)) |
2014 | 104 | infile.close() | 104 | infile.close() |
2015 | 105 | outfile.close() | 105 | outfile.close() |
2016 | @@ -148,11 +148,12 @@ def PUBLIC_HEADERS(bld, public_headers, header_path=None, public_headers_install | |||
2017 | 148 | else: | 148 | else: |
2018 | 149 | h_name = h | 149 | h_name = h |
2019 | 150 | inst_name = os.path.basename(h) | 150 | inst_name = os.path.basename(h) |
2022 | 151 | relpath1 = os_path_relpath(bld.srcnode.abspath(), bld.curdir) | 151 | curdir = bld.path.abspath() |
2023 | 152 | relpath2 = os_path_relpath(bld.curdir, bld.srcnode.abspath()) | 152 | relpath1 = os_path_relpath(bld.srcnode.abspath(), curdir) |
2024 | 153 | relpath2 = os_path_relpath(curdir, bld.srcnode.abspath()) | ||
2025 | 153 | targetdir = os.path.normpath(os.path.join(relpath1, bld.env.build_public_headers, inst_path)) | 154 | targetdir = os.path.normpath(os.path.join(relpath1, bld.env.build_public_headers, inst_path)) |
2028 | 154 | if not os.path.exists(os.path.join(bld.curdir, targetdir)): | 155 | if not os.path.exists(os.path.join(curdir, targetdir)): |
2029 | 155 | raise Utils.WafError("missing source directory %s for public header %s" % (targetdir, inst_name)) | 156 | raise Errors.WafError("missing source directory %s for public header %s" % (targetdir, inst_name)) |
2030 | 156 | target = os.path.join(targetdir, inst_name) | 157 | target = os.path.join(targetdir, inst_name) |
2031 | 157 | 158 | ||
2032 | 158 | # the source path of the header, relative to the top of the source tree | 159 | # the source path of the header, relative to the top of the source tree |
2033 | diff --git a/buildtools/wafsamba/samba_install.py b/buildtools/wafsamba/samba_install.py | |||
2034 | index 21035bf..47bc0cb 100644 | |||
2035 | --- a/buildtools/wafsamba/samba_install.py | |||
2036 | +++ b/buildtools/wafsamba/samba_install.py | |||
2037 | @@ -4,8 +4,8 @@ | |||
2038 | 4 | # library use | 4 | # library use |
2039 | 5 | 5 | ||
2040 | 6 | import os | 6 | import os |
2043 | 7 | import Utils | 7 | from waflib import Utils, Errors |
2044 | 8 | from TaskGen import feature, before, after | 8 | from waflib.TaskGen import feature, before, after |
2045 | 9 | from samba_utils import LIB_PATH, MODE_755, install_rpath, build_rpath | 9 | from samba_utils import LIB_PATH, MODE_755, install_rpath, build_rpath |
2046 | 10 | 10 | ||
2047 | 11 | @feature('install_bin') | 11 | @feature('install_bin') |
2048 | @@ -45,7 +45,7 @@ def install_binary(self): | |||
2049 | 45 | 45 | ||
2050 | 46 | # tell waf to install the right binary | 46 | # tell waf to install the right binary |
2051 | 47 | bld.install_as(os.path.join(install_path, orig_target), | 47 | bld.install_as(os.path.join(install_path, orig_target), |
2053 | 48 | os.path.join(self.path.abspath(bld.env), self.target), | 48 | self.path.find_or_declare(self.target), |
2054 | 49 | chmod=MODE_755) | 49 | chmod=MODE_755) |
2055 | 50 | 50 | ||
2056 | 51 | 51 | ||
2057 | @@ -143,8 +143,9 @@ def install_library(self): | |||
2058 | 143 | 143 | ||
2059 | 144 | # tell waf to install the library | 144 | # tell waf to install the library |
2060 | 145 | bld.install_as(os.path.join(install_path, install_name), | 145 | bld.install_as(os.path.join(install_path, install_name), |
2062 | 146 | os.path.join(self.path.abspath(bld.env), inst_name), | 146 | self.path.find_or_declare(inst_name), |
2063 | 147 | chmod=MODE_755) | 147 | chmod=MODE_755) |
2064 | 148 | |||
2065 | 148 | if install_link and install_link != install_name: | 149 | if install_link and install_link != install_name: |
2066 | 149 | # and the symlink if needed | 150 | # and the symlink if needed |
2067 | 150 | bld.symlink_as(os.path.join(install_path, install_link), os.path.basename(install_name)) | 151 | bld.symlink_as(os.path.join(install_path, install_link), os.path.basename(install_name)) |
2068 | @@ -227,7 +228,7 @@ def symlink_bin(self): | |||
2069 | 227 | return | 228 | return |
2070 | 228 | 229 | ||
2071 | 229 | if not self.link_task.outputs or not self.link_task.outputs[0]: | 230 | if not self.link_task.outputs or not self.link_task.outputs[0]: |
2073 | 230 | raise Utils.WafError('no outputs found for %s in symlink_bin' % self.name) | 231 | raise Errors.WafError('no outputs found for %s in symlink_bin' % self.name) |
2074 | 231 | binpath = self.link_task.outputs[0].abspath(self.env) | 232 | binpath = self.link_task.outputs[0].abspath(self.env) |
2075 | 232 | bldpath = os.path.join(self.bld.env.BUILD_DIRECTORY, self.link_task.outputs[0].name) | 233 | bldpath = os.path.join(self.bld.env.BUILD_DIRECTORY, self.link_task.outputs[0].name) |
2076 | 233 | 234 | ||
2077 | diff --git a/buildtools/wafsamba/samba_optimisation.py b/buildtools/wafsamba/samba_optimisation.py | |||
2078 | 234 | deleted file mode 100644 | 235 | deleted file mode 100644 |
2079 | index 5008f83..0000000 | |||
2080 | --- a/buildtools/wafsamba/samba_optimisation.py | |||
2081 | +++ /dev/null | |||
2082 | @@ -1,269 +0,0 @@ | |||
2083 | 1 | # This file contains waf optimisations for Samba | ||
2084 | 2 | |||
2085 | 3 | # most of these optimisations are possible because of the restricted build environment | ||
2086 | 4 | # that Samba has. For example, Samba doesn't attempt to cope with Win32 paths during the | ||
2087 | 5 | # build, and Samba doesn't need build varients | ||
2088 | 6 | |||
2089 | 7 | # overall this makes some build tasks quite a bit faster | ||
2090 | 8 | |||
2091 | 9 | import os | ||
2092 | 10 | import Build, Utils, Node | ||
2093 | 11 | from TaskGen import feature, after, before | ||
2094 | 12 | import preproc | ||
2095 | 13 | |||
2096 | 14 | @feature('c', 'cc', 'cxx') | ||
2097 | 15 | @after('apply_type_vars', 'apply_lib_vars', 'apply_core') | ||
2098 | 16 | def apply_incpaths(self): | ||
2099 | 17 | lst = [] | ||
2100 | 18 | |||
2101 | 19 | try: | ||
2102 | 20 | kak = self.bld.kak | ||
2103 | 21 | except AttributeError: | ||
2104 | 22 | kak = self.bld.kak = {} | ||
2105 | 23 | |||
2106 | 24 | # TODO move the uselib processing out of here | ||
2107 | 25 | for lib in self.to_list(self.uselib): | ||
2108 | 26 | for path in self.env['CPPPATH_' + lib]: | ||
2109 | 27 | if not path in lst: | ||
2110 | 28 | lst.append(path) | ||
2111 | 29 | if preproc.go_absolute: | ||
2112 | 30 | for path in preproc.standard_includes: | ||
2113 | 31 | if not path in lst: | ||
2114 | 32 | lst.append(path) | ||
2115 | 33 | |||
2116 | 34 | for path in self.to_list(self.includes): | ||
2117 | 35 | if not path in lst: | ||
2118 | 36 | if preproc.go_absolute or path[0] != '/': # os.path.isabs(path): | ||
2119 | 37 | lst.append(path) | ||
2120 | 38 | else: | ||
2121 | 39 | self.env.prepend_value('CPPPATH', path) | ||
2122 | 40 | |||
2123 | 41 | for path in lst: | ||
2124 | 42 | node = None | ||
2125 | 43 | if path[0] == '/': # os.path.isabs(path): | ||
2126 | 44 | if preproc.go_absolute: | ||
2127 | 45 | node = self.bld.root.find_dir(path) | ||
2128 | 46 | elif path[0] == '#': | ||
2129 | 47 | node = self.bld.srcnode | ||
2130 | 48 | if len(path) > 1: | ||
2131 | 49 | try: | ||
2132 | 50 | node = kak[path] | ||
2133 | 51 | except KeyError: | ||
2134 | 52 | kak[path] = node = node.find_dir(path[1:]) | ||
2135 | 53 | else: | ||
2136 | 54 | try: | ||
2137 | 55 | node = kak[(self.path.id, path)] | ||
2138 | 56 | except KeyError: | ||
2139 | 57 | kak[(self.path.id, path)] = node = self.path.find_dir(path) | ||
2140 | 58 | |||
2141 | 59 | if node: | ||
2142 | 60 | self.env.append_value('INC_PATHS', node) | ||
2143 | 61 | |||
2144 | 62 | @feature('c', 'cc') | ||
2145 | 63 | @after('apply_incpaths') | ||
2146 | 64 | def apply_obj_vars_cc(self): | ||
2147 | 65 | """after apply_incpaths for INC_PATHS""" | ||
2148 | 66 | env = self.env | ||
2149 | 67 | app = env.append_unique | ||
2150 | 68 | cpppath_st = env['CPPPATH_ST'] | ||
2151 | 69 | |||
2152 | 70 | lss = env['_CCINCFLAGS'] | ||
2153 | 71 | |||
2154 | 72 | try: | ||
2155 | 73 | cac = self.bld.cac | ||
2156 | 74 | except AttributeError: | ||
2157 | 75 | cac = self.bld.cac = {} | ||
2158 | 76 | |||
2159 | 77 | # local flags come first | ||
2160 | 78 | # set the user-defined includes paths | ||
2161 | 79 | for i in env['INC_PATHS']: | ||
2162 | 80 | |||
2163 | 81 | try: | ||
2164 | 82 | lss.extend(cac[i.id]) | ||
2165 | 83 | except KeyError: | ||
2166 | 84 | |||
2167 | 85 | cac[i.id] = [cpppath_st % i.bldpath(env), cpppath_st % i.srcpath(env)] | ||
2168 | 86 | lss.extend(cac[i.id]) | ||
2169 | 87 | |||
2170 | 88 | env['_CCINCFLAGS'] = lss | ||
2171 | 89 | # set the library include paths | ||
2172 | 90 | for i in env['CPPPATH']: | ||
2173 | 91 | app('_CCINCFLAGS', cpppath_st % i) | ||
2174 | 92 | |||
2175 | 93 | import Node, Environment | ||
2176 | 94 | |||
2177 | 95 | def vari(self): | ||
2178 | 96 | return "default" | ||
2179 | 97 | Environment.Environment.variant = vari | ||
2180 | 98 | |||
2181 | 99 | def variant(self, env): | ||
2182 | 100 | if not env: return 0 | ||
2183 | 101 | elif self.id & 3 == Node.FILE: return 0 | ||
2184 | 102 | else: return "default" | ||
2185 | 103 | Node.Node.variant = variant | ||
2186 | 104 | |||
2187 | 105 | |||
2188 | 106 | import TaskGen, Task | ||
2189 | 107 | |||
2190 | 108 | def create_task(self, name, src=None, tgt=None): | ||
2191 | 109 | task = Task.TaskBase.classes[name](self.env, generator=self) | ||
2192 | 110 | if src: | ||
2193 | 111 | task.set_inputs(src) | ||
2194 | 112 | if tgt: | ||
2195 | 113 | task.set_outputs(tgt) | ||
2196 | 114 | return task | ||
2197 | 115 | TaskGen.task_gen.create_task = create_task | ||
2198 | 116 | |||
2199 | 117 | def hash_constraints(self): | ||
2200 | 118 | a = self.attr | ||
2201 | 119 | sum = hash((str(a('before', '')), | ||
2202 | 120 | str(a('after', '')), | ||
2203 | 121 | str(a('ext_in', '')), | ||
2204 | 122 | str(a('ext_out', '')), | ||
2205 | 123 | self.__class__.maxjobs)) | ||
2206 | 124 | return sum | ||
2207 | 125 | Task.TaskBase.hash_constraints = hash_constraints | ||
2208 | 126 | |||
2209 | 127 | def hash_env_vars(self, env, vars_lst): | ||
2210 | 128 | idx = str(id(env)) + str(vars_lst) | ||
2211 | 129 | try: | ||
2212 | 130 | return self.cache_sig_vars[idx] | ||
2213 | 131 | except KeyError: | ||
2214 | 132 | pass | ||
2215 | 133 | |||
2216 | 134 | m = Utils.md5() | ||
2217 | 135 | m.update(''.join([str(env[a]) for a in vars_lst])) | ||
2218 | 136 | |||
2219 | 137 | ret = self.cache_sig_vars[idx] = m.digest() | ||
2220 | 138 | return ret | ||
2221 | 139 | Build.BuildContext.hash_env_vars = hash_env_vars | ||
2222 | 140 | |||
2223 | 141 | |||
2224 | 142 | def store_fast(self, filename): | ||
2225 | 143 | file = open(filename, 'wb') | ||
2226 | 144 | data = self.get_merged_dict() | ||
2227 | 145 | try: | ||
2228 | 146 | Build.cPickle.dump(data, file, -1) | ||
2229 | 147 | finally: | ||
2230 | 148 | file.close() | ||
2231 | 149 | Environment.Environment.store_fast = store_fast | ||
2232 | 150 | |||
2233 | 151 | def load_fast(self, filename): | ||
2234 | 152 | file = open(filename, 'rb') | ||
2235 | 153 | try: | ||
2236 | 154 | data = Build.cPickle.load(file) | ||
2237 | 155 | finally: | ||
2238 | 156 | file.close() | ||
2239 | 157 | self.table.update(data) | ||
2240 | 158 | Environment.Environment.load_fast = load_fast | ||
2241 | 159 | |||
2242 | 160 | def is_this_a_static_lib(self, name): | ||
2243 | 161 | try: | ||
2244 | 162 | cache = self.cache_is_this_a_static_lib | ||
2245 | 163 | except AttributeError: | ||
2246 | 164 | cache = self.cache_is_this_a_static_lib = {} | ||
2247 | 165 | try: | ||
2248 | 166 | return cache[name] | ||
2249 | 167 | except KeyError: | ||
2250 | 168 | ret = cache[name] = 'cstaticlib' in self.bld.get_tgen_by_name(name).features | ||
2251 | 169 | return ret | ||
2252 | 170 | TaskGen.task_gen.is_this_a_static_lib = is_this_a_static_lib | ||
2253 | 171 | |||
2254 | 172 | def shared_ancestors(self): | ||
2255 | 173 | try: | ||
2256 | 174 | cache = self.cache_is_this_a_static_lib | ||
2257 | 175 | except AttributeError: | ||
2258 | 176 | cache = self.cache_is_this_a_static_lib = {} | ||
2259 | 177 | try: | ||
2260 | 178 | return cache[id(self)] | ||
2261 | 179 | except KeyError: | ||
2262 | 180 | |||
2263 | 181 | ret = [] | ||
2264 | 182 | if 'cshlib' in self.features: # or 'cprogram' in self.features: | ||
2265 | 183 | if getattr(self, 'uselib_local', None): | ||
2266 | 184 | lst = self.to_list(self.uselib_local) | ||
2267 | 185 | ret = [x for x in lst if not self.is_this_a_static_lib(x)] | ||
2268 | 186 | cache[id(self)] = ret | ||
2269 | 187 | return ret | ||
2270 | 188 | TaskGen.task_gen.shared_ancestors = shared_ancestors | ||
2271 | 189 | |||
2272 | 190 | @feature('c', 'cc', 'cxx') | ||
2273 | 191 | @after('apply_link', 'init_cc', 'init_cxx', 'apply_core') | ||
2274 | 192 | def apply_lib_vars(self): | ||
2275 | 193 | """after apply_link because of 'link_task' | ||
2276 | 194 | after default_cc because of the attribute 'uselib'""" | ||
2277 | 195 | |||
2278 | 196 | # after 'apply_core' in case if 'cc' if there is no link | ||
2279 | 197 | |||
2280 | 198 | env = self.env | ||
2281 | 199 | app = env.append_value | ||
2282 | 200 | seen_libpaths = set([]) | ||
2283 | 201 | |||
2284 | 202 | # OPTIMIZATION 1: skip uselib variables already added (700ms) | ||
2285 | 203 | seen_uselib = set([]) | ||
2286 | 204 | |||
2287 | 205 | # 1. the case of the libs defined in the project (visit ancestors first) | ||
2288 | 206 | # the ancestors external libraries (uselib) will be prepended | ||
2289 | 207 | self.uselib = self.to_list(self.uselib) | ||
2290 | 208 | names = self.to_list(self.uselib_local) | ||
2291 | 209 | |||
2292 | 210 | seen = set([]) | ||
2293 | 211 | tmp = Utils.deque(names) # consume a copy of the list of names | ||
2294 | 212 | while tmp: | ||
2295 | 213 | lib_name = tmp.popleft() | ||
2296 | 214 | # visit dependencies only once | ||
2297 | 215 | if lib_name in seen: | ||
2298 | 216 | continue | ||
2299 | 217 | |||
2300 | 218 | y = self.get_tgen_by_name(lib_name) | ||
2301 | 219 | if not y: | ||
2302 | 220 | raise Utils.WafError('object %r was not found in uselib_local (required by %r)' % (lib_name, self.name)) | ||
2303 | 221 | y.post() | ||
2304 | 222 | seen.add(lib_name) | ||
2305 | 223 | |||
2306 | 224 | # OPTIMIZATION 2: pre-compute ancestors shared libraries (100ms) | ||
2307 | 225 | tmp.extend(y.shared_ancestors()) | ||
2308 | 226 | |||
2309 | 227 | # link task and flags | ||
2310 | 228 | if getattr(y, 'link_task', None): | ||
2311 | 229 | |||
2312 | 230 | link_name = y.target[y.target.rfind('/') + 1:] | ||
2313 | 231 | if 'cstaticlib' in y.features: | ||
2314 | 232 | app('STATICLIB', link_name) | ||
2315 | 233 | elif 'cshlib' in y.features or 'cprogram' in y.features: | ||
2316 | 234 | # WARNING some linkers can link against programs | ||
2317 | 235 | app('LIB', link_name) | ||
2318 | 236 | |||
2319 | 237 | # the order | ||
2320 | 238 | self.link_task.set_run_after(y.link_task) | ||
2321 | 239 | |||
2322 | 240 | # for the recompilation | ||
2323 | 241 | dep_nodes = getattr(self.link_task, 'dep_nodes', []) | ||
2324 | 242 | self.link_task.dep_nodes = dep_nodes + y.link_task.outputs | ||
2325 | 243 | |||
2326 | 244 | # OPTIMIZATION 3: reduce the amount of function calls | ||
2327 | 245 | # add the link path too | ||
2328 | 246 | par = y.link_task.outputs[0].parent | ||
2329 | 247 | if id(par) not in seen_libpaths: | ||
2330 | 248 | seen_libpaths.add(id(par)) | ||
2331 | 249 | tmp_path = par.bldpath(self.env) | ||
2332 | 250 | if not tmp_path in env['LIBPATH']: | ||
2333 | 251 | env.prepend_value('LIBPATH', tmp_path) | ||
2334 | 252 | |||
2335 | 253 | |||
2336 | 254 | # add ancestors uselib too - but only propagate those that have no staticlib | ||
2337 | 255 | for v in self.to_list(y.uselib): | ||
2338 | 256 | if v not in seen_uselib: | ||
2339 | 257 | seen_uselib.add(v) | ||
2340 | 258 | if not env['STATICLIB_' + v]: | ||
2341 | 259 | if not v in self.uselib: | ||
2342 | 260 | self.uselib.insert(0, v) | ||
2343 | 261 | |||
2344 | 262 | # 2. the case of the libs defined outside | ||
2345 | 263 | for x in self.uselib: | ||
2346 | 264 | for v in self.p_flag_vars: | ||
2347 | 265 | val = self.env[v + '_' + x] | ||
2348 | 266 | if val: | ||
2349 | 267 | self.env.append_value(v, val) | ||
2350 | 268 | |||
2351 | 269 | |||
2352 | diff --git a/buildtools/wafsamba/samba_patterns.py b/buildtools/wafsamba/samba_patterns.py | |||
2353 | index 2b93937..d0fe965 100644 | |||
2354 | --- a/buildtools/wafsamba/samba_patterns.py | |||
2355 | +++ b/buildtools/wafsamba/samba_patterns.py | |||
2356 | @@ -1,6 +1,7 @@ | |||
2357 | 1 | # a waf tool to add extension based build patterns for Samba | 1 | # a waf tool to add extension based build patterns for Samba |
2358 | 2 | 2 | ||
2360 | 3 | import Build | 3 | import sys |
2361 | 4 | from waflib import Build | ||
2362 | 4 | from wafsamba import samba_version_file | 5 | from wafsamba import samba_version_file |
2363 | 5 | 6 | ||
2364 | 6 | def write_version_header(task): | 7 | def write_version_header(task): |
2365 | @@ -146,13 +147,19 @@ def write_build_options_section(fp, keys, section): | |||
2366 | 146 | fp.write("\n") | 147 | fp.write("\n") |
2367 | 147 | 148 | ||
2368 | 148 | def write_build_options(task): | 149 | def write_build_options(task): |
2370 | 149 | tbl = task.env['defines'] | 150 | tbl = task.env |
2371 | 150 | keys_option_with = [] | 151 | keys_option_with = [] |
2372 | 151 | keys_option_utmp = [] | 152 | keys_option_utmp = [] |
2373 | 152 | keys_option_have = [] | 153 | keys_option_have = [] |
2374 | 153 | keys_header_sys = [] | 154 | keys_header_sys = [] |
2375 | 154 | keys_header_other = [] | 155 | keys_header_other = [] |
2376 | 155 | keys_misc = [] | 156 | keys_misc = [] |
2377 | 157 | if sys.hexversion>0x300000f: | ||
2378 | 158 | trans_table = bytes.maketrans(b'.-()', b'____') | ||
2379 | 159 | else: | ||
2380 | 160 | import string | ||
2381 | 161 | trans_table = string.maketrans('.-()', '____') | ||
2382 | 162 | |||
2383 | 156 | for key in tbl: | 163 | for key in tbl: |
2384 | 157 | if key.startswith("HAVE_UT_UT_") or key.find("UTMP") >= 0: | 164 | if key.startswith("HAVE_UT_UT_") or key.find("UTMP") >= 0: |
2385 | 158 | keys_option_utmp.append(key) | 165 | keys_option_utmp.append(key) |
2386 | @@ -169,7 +176,7 @@ def write_build_options(task): | |||
2387 | 169 | l = key.split("(") | 176 | l = key.split("(") |
2388 | 170 | keys_misc.append(l[0]) | 177 | keys_misc.append(l[0]) |
2389 | 171 | else: | 178 | else: |
2391 | 172 | keys_misc.append(key) | 179 | keys_misc.append(key.translate(trans_table)) |
2392 | 173 | 180 | ||
2393 | 174 | tgt = task.outputs[0].bldpath(task.env) | 181 | tgt = task.outputs[0].bldpath(task.env) |
2394 | 175 | f = open(tgt, 'w') | 182 | f = open(tgt, 'w') |
2395 | diff --git a/buildtools/wafsamba/samba_perl.py b/buildtools/wafsamba/samba_perl.py | |||
2396 | index 2e9a53a..e019acb 100644 | |||
2397 | --- a/buildtools/wafsamba/samba_perl.py | |||
2398 | +++ b/buildtools/wafsamba/samba_perl.py | |||
2399 | @@ -1,6 +1,6 @@ | |||
2403 | 1 | import Utils | 1 | from waflib import Utils |
2404 | 2 | from Configure import conf | 2 | from waflib.Configure import conf |
2405 | 3 | 3 | from samba_utils import get_string | |
2406 | 4 | done = {} | 4 | done = {} |
2407 | 5 | 5 | ||
2408 | 6 | @conf | 6 | @conf |
2409 | @@ -9,13 +9,16 @@ def SAMBA_CHECK_PERL(conf, mandatory=True, version=(5,0,0)): | |||
2410 | 9 | return | 9 | return |
2411 | 10 | done["done"] = True | 10 | done["done"] = True |
2412 | 11 | conf.find_program('perl', var='PERL', mandatory=mandatory) | 11 | conf.find_program('perl', var='PERL', mandatory=mandatory) |
2414 | 12 | conf.check_tool('perl') | 12 | conf.load('perl') |
2415 | 13 | path_perl = conf.find_program('perl') | 13 | path_perl = conf.find_program('perl') |
2416 | 14 | conf.env.PERL_SPECIFIED = (conf.env.PERL != path_perl) | 14 | conf.env.PERL_SPECIFIED = (conf.env.PERL != path_perl) |
2417 | 15 | conf.check_perl_version(version) | 15 | conf.check_perl_version(version) |
2418 | 16 | 16 | ||
2419 | 17 | def read_perl_config_var(cmd): | 17 | def read_perl_config_var(cmd): |
2421 | 18 | return Utils.to_list(Utils.cmd_output([conf.env.PERL, '-MConfig', '-e', cmd])) | 18 | output = Utils.cmd_output([conf.env.get_flat('PERL'), '-MConfig', '-e', cmd]) |
2422 | 19 | if not isinstance(output, str): | ||
2423 | 20 | output = get_string(output) | ||
2424 | 21 | return Utils.to_list(output) | ||
2425 | 19 | 22 | ||
2426 | 20 | def check_perl_config_var(var): | 23 | def check_perl_config_var(var): |
2427 | 21 | conf.start_msg("Checking for perl $Config{%s}:" % var) | 24 | conf.start_msg("Checking for perl $Config{%s}:" % var) |
2428 | diff --git a/buildtools/wafsamba/samba_pidl.py b/buildtools/wafsamba/samba_pidl.py | |||
2429 | index 9651e4d..3fecfa9 100644 | |||
2430 | --- a/buildtools/wafsamba/samba_pidl.py | |||
2431 | +++ b/buildtools/wafsamba/samba_pidl.py | |||
2432 | @@ -1,8 +1,8 @@ | |||
2433 | 1 | # waf build tool for building IDL files with pidl | 1 | # waf build tool for building IDL files with pidl |
2434 | 2 | 2 | ||
2435 | 3 | import os | 3 | import os |
2438 | 4 | import Build | 4 | from waflib import Build, Utils |
2439 | 5 | from TaskGen import feature, before | 5 | from waflib.TaskGen import feature, before |
2440 | 6 | from samba_utils import SET_TARGET_TYPE, TO_LIST, LOCAL_CACHE | 6 | from samba_utils import SET_TARGET_TYPE, TO_LIST, LOCAL_CACHE |
2441 | 7 | 7 | ||
2442 | 8 | def SAMBA_PIDL(bld, pname, source, | 8 | def SAMBA_PIDL(bld, pname, source, |
2443 | @@ -76,9 +76,9 @@ def SAMBA_PIDL(bld, pname, source, | |||
2444 | 76 | else: | 76 | else: |
2445 | 77 | cc = 'CC="%s"' % bld.CONFIG_GET("CC") | 77 | cc = 'CC="%s"' % bld.CONFIG_GET("CC") |
2446 | 78 | 78 | ||
2448 | 79 | t = bld(rule='cd .. && %s %s ${PERL} "${PIDL}" --quiet ${OPTIONS} --outputdir ${OUTPUTDIR} -- "${SRC[0].abspath(env)}"' % (cpp, cc), | 79 | t = bld(rule='cd ${PIDL_LAUNCH_DIR} && %s %s ${PERL} ${PIDL} --quiet ${OPTIONS} --outputdir ${OUTPUTDIR} -- "${IDLSRC}"' % (cpp, cc), |
2449 | 80 | ext_out = '.c', | 80 | ext_out = '.c', |
2451 | 81 | before = 'cc', | 81 | before = 'c', |
2452 | 82 | update_outputs = True, | 82 | update_outputs = True, |
2453 | 83 | shell = True, | 83 | shell = True, |
2454 | 84 | source = source, | 84 | source = source, |
2455 | @@ -86,18 +86,22 @@ def SAMBA_PIDL(bld, pname, source, | |||
2456 | 86 | name = name, | 86 | name = name, |
2457 | 87 | samba_type = 'PIDL') | 87 | samba_type = 'PIDL') |
2458 | 88 | 88 | ||
2459 | 89 | # prime the list of nodes we are dependent on with the cached pidl sources | ||
2460 | 90 | t.allnodes = pidl_src_nodes | ||
2461 | 91 | 89 | ||
2463 | 92 | t.env.PIDL = os.path.join(bld.srcnode.abspath(), 'pidl/pidl') | 90 | t.env.PIDL_LAUNCH_DIR = bld.srcnode.path_from(bld.bldnode) |
2464 | 91 | pnode = bld.srcnode.find_resource('pidl/pidl') | ||
2465 | 92 | t.env.PIDL = pnode.path_from(bld.srcnode) | ||
2466 | 93 | t.env.OPTIONS = TO_LIST(options) | 93 | t.env.OPTIONS = TO_LIST(options) |
2468 | 94 | t.env.OUTPUTDIR = bld.bldnode.name + '/' + bld.path.find_dir(output_dir).bldpath(t.env) | 94 | snode = t.path.find_resource(source[0]) |
2469 | 95 | t.env.IDLSRC = snode.path_from(bld.srcnode) | ||
2470 | 96 | t.env.OUTPUTDIR = bld.bldnode.path_from(bld.srcnode) + '/' + bld.path.find_dir(output_dir).path_from(bld.srcnode) | ||
2471 | 97 | |||
2472 | 98 | bld.add_manual_dependency(snode, pidl_src_nodes) | ||
2473 | 95 | 99 | ||
2474 | 96 | if generate_tables and table_header_idx is not None: | 100 | if generate_tables and table_header_idx is not None: |
2475 | 97 | pidl_headers = LOCAL_CACHE(bld, 'PIDL_HEADERS') | 101 | pidl_headers = LOCAL_CACHE(bld, 'PIDL_HEADERS') |
2476 | 98 | pidl_headers[name] = [bld.path.find_or_declare(out_files[table_header_idx])] | 102 | pidl_headers[name] = [bld.path.find_or_declare(out_files[table_header_idx])] |
2477 | 99 | 103 | ||
2479 | 100 | t.more_includes = '#' + bld.path.relpath_gen(bld.srcnode) | 104 | t.more_includes = '#' + bld.path.path_from(bld.srcnode) |
2480 | 101 | Build.BuildContext.SAMBA_PIDL = SAMBA_PIDL | 105 | Build.BuildContext.SAMBA_PIDL = SAMBA_PIDL |
2481 | 102 | 106 | ||
2482 | 103 | 107 | ||
2483 | @@ -117,13 +121,15 @@ Build.BuildContext.SAMBA_PIDL_LIST = SAMBA_PIDL_LIST | |||
2484 | 117 | @before('exec_rule') | 121 | @before('exec_rule') |
2485 | 118 | def collect(self): | 122 | def collect(self): |
2486 | 119 | pidl_headers = LOCAL_CACHE(self.bld, 'PIDL_HEADERS') | 123 | pidl_headers = LOCAL_CACHE(self.bld, 'PIDL_HEADERS') |
2487 | 124 | # The first source is tables.pl itself | ||
2488 | 125 | self.source = Utils.to_list(self.source) | ||
2489 | 120 | for (name, hd) in pidl_headers.items(): | 126 | for (name, hd) in pidl_headers.items(): |
2490 | 121 | y = self.bld.get_tgen_by_name(name) | 127 | y = self.bld.get_tgen_by_name(name) |
2491 | 122 | self.bld.ASSERT(y is not None, 'Failed to find PIDL header %s' % name) | 128 | self.bld.ASSERT(y is not None, 'Failed to find PIDL header %s' % name) |
2492 | 123 | y.post() | 129 | y.post() |
2493 | 124 | for node in hd: | 130 | for node in hd: |
2494 | 125 | self.bld.ASSERT(node is not None, 'Got None as build node generating PIDL table for %s' % name) | 131 | self.bld.ASSERT(node is not None, 'Got None as build node generating PIDL table for %s' % name) |
2496 | 126 | self.source += " " + node.relpath_gen(self.path) | 132 | self.source.append(node) |
2497 | 127 | 133 | ||
2498 | 128 | 134 | ||
2499 | 129 | def SAMBA_PIDL_TABLES(bld, name, target): | 135 | def SAMBA_PIDL_TABLES(bld, name, target): |
2500 | @@ -131,9 +137,9 @@ def SAMBA_PIDL_TABLES(bld, name, target): | |||
2501 | 131 | bld.SET_BUILD_GROUP('main') | 137 | bld.SET_BUILD_GROUP('main') |
2502 | 132 | t = bld( | 138 | t = bld( |
2503 | 133 | features = 'collect', | 139 | features = 'collect', |
2505 | 134 | rule = '${PERL} ${SRC} --output ${TGT} | sed "s|default/||" > ${TGT}', | 140 | rule = '${PERL} ${SRC} > ${TGT}', |
2506 | 135 | ext_out = '.c', | 141 | ext_out = '.c', |
2508 | 136 | before = 'cc', | 142 | before = 'c', |
2509 | 137 | update_outputs = True, | 143 | update_outputs = True, |
2510 | 138 | shell = True, | 144 | shell = True, |
2511 | 139 | source = '../../librpc/tables.pl', | 145 | source = '../../librpc/tables.pl', |
2512 | diff --git a/buildtools/wafsamba/samba_python.py b/buildtools/wafsamba/samba_python.py | |||
2513 | index cb99fe9..fac0e34 100644 | |||
2514 | --- a/buildtools/wafsamba/samba_python.py | |||
2515 | +++ b/buildtools/wafsamba/samba_python.py | |||
2516 | @@ -1,11 +1,11 @@ | |||
2517 | 1 | # waf build tool for building IDL files with pidl | 1 | # waf build tool for building IDL files with pidl |
2518 | 2 | 2 | ||
2522 | 3 | import os | 3 | import os, sys |
2523 | 4 | import Build, Logs, Utils, Configure | 4 | from waflib import Build, Logs, Utils, Configure, Errors |
2524 | 5 | from Configure import conf | 5 | from waflib.Configure import conf |
2525 | 6 | 6 | ||
2526 | 7 | @conf | 7 | @conf |
2528 | 8 | def SAMBA_CHECK_PYTHON(conf, mandatory=True, version=(2,4,2)): | 8 | def SAMBA_CHECK_PYTHON(conf, mandatory=True, version=(2,6,0)): |
2529 | 9 | # enable tool to build python extensions | 9 | # enable tool to build python extensions |
2530 | 10 | if conf.env.HAVE_PYTHON_H: | 10 | if conf.env.HAVE_PYTHON_H: |
2531 | 11 | conf.check_python_version(version) | 11 | conf.check_python_version(version) |
2532 | @@ -14,23 +14,25 @@ def SAMBA_CHECK_PYTHON(conf, mandatory=True, version=(2,4,2)): | |||
2533 | 14 | interpreters = [] | 14 | interpreters = [] |
2534 | 15 | 15 | ||
2535 | 16 | if conf.env['EXTRA_PYTHON']: | 16 | if conf.env['EXTRA_PYTHON']: |
2537 | 17 | conf.all_envs['extrapython'] = conf.env.copy() | 17 | conf.all_envs['extrapython'] = conf.env.derive() |
2538 | 18 | conf.setenv('extrapython') | 18 | conf.setenv('extrapython') |
2539 | 19 | conf.env['PYTHON'] = conf.env['EXTRA_PYTHON'] | 19 | conf.env['PYTHON'] = conf.env['EXTRA_PYTHON'] |
2540 | 20 | conf.env['IS_EXTRA_PYTHON'] = 'yes' | 20 | conf.env['IS_EXTRA_PYTHON'] = 'yes' |
2541 | 21 | conf.find_program('python', var='PYTHON', mandatory=True) | 21 | conf.find_program('python', var='PYTHON', mandatory=True) |
2543 | 22 | conf.check_tool('python') | 22 | conf.load('python') |
2544 | 23 | try: | 23 | try: |
2546 | 24 | conf.check_python_version((3, 3, 0)) | 24 | conf.check_python_version(version) |
2547 | 25 | except Exception: | 25 | except Exception: |
2549 | 26 | Logs.warn('extra-python needs to be Python 3.3 or later') | 26 | Logs.warn('extra-python needs to be Python %s.%s.%s or later' % |
2550 | 27 | (version[0], version[1], version[2])) | ||
2551 | 27 | raise | 28 | raise |
2552 | 28 | interpreters.append(conf.env['PYTHON']) | 29 | interpreters.append(conf.env['PYTHON']) |
2553 | 29 | conf.setenv('default') | 30 | conf.setenv('default') |
2554 | 30 | 31 | ||
2558 | 31 | conf.find_program('python', var='PYTHON', mandatory=mandatory) | 32 | conf.find_program('python3', var='PYTHON', mandatory=mandatory) |
2559 | 32 | conf.check_tool('python') | 33 | conf.load('python') |
2560 | 33 | path_python = conf.find_program('python') | 34 | path_python = conf.find_program('python3') |
2561 | 35 | |||
2562 | 34 | conf.env.PYTHON_SPECIFIED = (conf.env.PYTHON != path_python) | 36 | conf.env.PYTHON_SPECIFIED = (conf.env.PYTHON != path_python) |
2563 | 35 | conf.check_python_version(version) | 37 | conf.check_python_version(version) |
2564 | 36 | 38 | ||
2565 | @@ -42,14 +44,16 @@ def SAMBA_CHECK_PYTHON(conf, mandatory=True, version=(2,4,2)): | |||
2566 | 42 | def SAMBA_CHECK_PYTHON_HEADERS(conf, mandatory=True): | 44 | def SAMBA_CHECK_PYTHON_HEADERS(conf, mandatory=True): |
2567 | 43 | if conf.env.disable_python: | 45 | if conf.env.disable_python: |
2568 | 44 | if mandatory: | 46 | if mandatory: |
2570 | 45 | raise Utils.WafError("Cannot check for python headers when " | 47 | raise Errors.WafError("Cannot check for python headers when " |
2571 | 46 | "--disable-python specified") | 48 | "--disable-python specified") |
2572 | 47 | 49 | ||
2573 | 48 | conf.msg("python headers", "Check disabled due to --disable-python") | 50 | conf.msg("python headers", "Check disabled due to --disable-python") |
2574 | 49 | # we don't want PYTHONDIR in config.h, as otherwise changing | 51 | # we don't want PYTHONDIR in config.h, as otherwise changing |
2575 | 50 | # --prefix causes a complete rebuild | 52 | # --prefix causes a complete rebuild |
2578 | 51 | del(conf.env.defines['PYTHONDIR']) | 53 | conf.env.DEFINES = [x for x in conf.env.DEFINES |
2579 | 52 | del(conf.env.defines['PYTHONARCHDIR']) | 54 | if not x.startswith('PYTHONDIR=') |
2580 | 55 | and not x.startswith('PYTHONARCHDIR=')] | ||
2581 | 56 | |||
2582 | 53 | return | 57 | return |
2583 | 54 | 58 | ||
2584 | 55 | if conf.env["python_headers_checked"] == []: | 59 | if conf.env["python_headers_checked"] == []: |
2585 | @@ -64,21 +68,22 @@ def SAMBA_CHECK_PYTHON_HEADERS(conf, mandatory=True): | |||
2586 | 64 | if conf.env['EXTRA_PYTHON']: | 68 | if conf.env['EXTRA_PYTHON']: |
2587 | 65 | extraversion = conf.all_envs['extrapython']['PYTHON_VERSION'] | 69 | extraversion = conf.all_envs['extrapython']['PYTHON_VERSION'] |
2588 | 66 | if extraversion == conf.env['PYTHON_VERSION']: | 70 | if extraversion == conf.env['PYTHON_VERSION']: |
2590 | 67 | raise Utils.WafError("extrapython %s is same as main python %s" % ( | 71 | raise Errors.WafError("extrapython %s is same as main python %s" % ( |
2591 | 68 | extraversion, conf.env['PYTHON_VERSION'])) | 72 | extraversion, conf.env['PYTHON_VERSION'])) |
2592 | 69 | else: | 73 | else: |
2593 | 70 | conf.msg("python headers", "using cache") | 74 | conf.msg("python headers", "using cache") |
2594 | 71 | 75 | ||
2595 | 72 | # we don't want PYTHONDIR in config.h, as otherwise changing | 76 | # we don't want PYTHONDIR in config.h, as otherwise changing |
2596 | 73 | # --prefix causes a complete rebuild | 77 | # --prefix causes a complete rebuild |
2599 | 74 | del(conf.env.defines['PYTHONDIR']) | 78 | conf.env.DEFINES = [x for x in conf.env.DEFINES |
2600 | 75 | del(conf.env.defines['PYTHONARCHDIR']) | 79 | if not x.startswith('PYTHONDIR=') |
2601 | 80 | and not x.startswith('PYTHONARCHDIR=')] | ||
2602 | 76 | 81 | ||
2603 | 77 | def _check_python_headers(conf, mandatory): | 82 | def _check_python_headers(conf, mandatory): |
2604 | 78 | try: | 83 | try: |
2608 | 79 | Configure.ConfigurationError | 84 | conf.errors.ConfigurationError |
2609 | 80 | conf.check_python_headers(mandatory=mandatory) | 85 | conf.check_python_headers() |
2610 | 81 | except Configure.ConfigurationError: | 86 | except conf.errors.ConfigurationError: |
2611 | 82 | if mandatory: | 87 | if mandatory: |
2612 | 83 | raise | 88 | raise |
2613 | 84 | 89 | ||
2614 | @@ -95,6 +100,11 @@ def _check_python_headers(conf, mandatory): | |||
2615 | 95 | conf.env.append_unique('LIBPATH_PYEMBED', lib[2:]) # strip '-L' | 100 | conf.env.append_unique('LIBPATH_PYEMBED', lib[2:]) # strip '-L' |
2616 | 96 | conf.env['LINKFLAGS_PYEMBED'].remove(lib) | 101 | conf.env['LINKFLAGS_PYEMBED'].remove(lib) |
2617 | 97 | 102 | ||
2618 | 103 | # same as in waf 1.5, keep only '-fno-strict-aliasing' | ||
2619 | 104 | # and ignore defines such as NDEBUG _FORTIFY_SOURCE=2 | ||
2620 | 105 | conf.env.DEFINES_PYEXT = [] | ||
2621 | 106 | conf.env.CFLAGS_PYEXT = ['-fno-strict-aliasing'] | ||
2622 | 107 | |||
2623 | 98 | return | 108 | return |
2624 | 99 | 109 | ||
2625 | 100 | def PYTHON_BUILD_IS_ENABLED(self): | 110 | def PYTHON_BUILD_IS_ENABLED(self): |
2626 | @@ -145,7 +155,7 @@ def SAMBA_PYTHON(bld, name, | |||
2627 | 145 | source = bld.EXPAND_VARIABLES(source, vars=vars) | 155 | source = bld.EXPAND_VARIABLES(source, vars=vars) |
2628 | 146 | 156 | ||
2629 | 147 | if realname is not None: | 157 | if realname is not None: |
2631 | 148 | link_name = 'python_modules/%s' % realname | 158 | link_name = 'python/%s' % realname |
2632 | 149 | else: | 159 | else: |
2633 | 150 | link_name = None | 160 | link_name = None |
2634 | 151 | 161 | ||
2635 | diff --git a/buildtools/wafsamba/samba_third_party.py b/buildtools/wafsamba/samba_third_party.py | |||
2636 | index 1144f81..e0dd3e1 100644 | |||
2637 | --- a/buildtools/wafsamba/samba_third_party.py | |||
2638 | +++ b/buildtools/wafsamba/samba_third_party.py | |||
2639 | @@ -1,12 +1,12 @@ | |||
2640 | 1 | # functions to support third party libraries | 1 | # functions to support third party libraries |
2641 | 2 | 2 | ||
2642 | 3 | import os | 3 | import os |
2645 | 4 | import Utils, Build | 4 | from waflib import Utils, Build, Context |
2646 | 5 | from Configure import conf | 5 | from waflib.Configure import conf |
2647 | 6 | 6 | ||
2648 | 7 | @conf | 7 | @conf |
2649 | 8 | def CHECK_FOR_THIRD_PARTY(conf): | 8 | def CHECK_FOR_THIRD_PARTY(conf): |
2651 | 9 | return os.path.exists(os.path.join(Utils.g_module.srcdir, 'third_party')) | 9 | return os.path.exists(os.path.join(Context.g_module.top, 'third_party')) |
2652 | 10 | 10 | ||
2653 | 11 | Build.BuildContext.CHECK_FOR_THIRD_PARTY = CHECK_FOR_THIRD_PARTY | 11 | Build.BuildContext.CHECK_FOR_THIRD_PARTY = CHECK_FOR_THIRD_PARTY |
2654 | 12 | 12 | ||
2655 | @@ -36,18 +36,18 @@ Build.BuildContext.CHECK_POPT = CHECK_POPT | |||
2656 | 36 | 36 | ||
2657 | 37 | @conf | 37 | @conf |
2658 | 38 | def CHECK_CMOCKA(conf): | 38 | def CHECK_CMOCKA(conf): |
2660 | 39 | return conf.CHECK_BUNDLED_SYSTEM_PKG('cmocka', minversion='1.1.1') | 39 | return conf.CHECK_BUNDLED_SYSTEM_PKG('cmocka', minversion='1.1.3') |
2661 | 40 | 40 | ||
2662 | 41 | Build.BuildContext.CHECK_CMOCKA = CHECK_CMOCKA | 41 | Build.BuildContext.CHECK_CMOCKA = CHECK_CMOCKA |
2663 | 42 | 42 | ||
2664 | 43 | @conf | 43 | @conf |
2665 | 44 | def CHECK_SOCKET_WRAPPER(conf): | 44 | def CHECK_SOCKET_WRAPPER(conf): |
2667 | 45 | return conf.CHECK_BUNDLED_SYSTEM_PKG('socket_wrapper', minversion='1.1.9') | 45 | return conf.CHECK_BUNDLED_SYSTEM_PKG('socket_wrapper', minversion='1.2.1') |
2668 | 46 | Build.BuildContext.CHECK_SOCKET_WRAPPER = CHECK_SOCKET_WRAPPER | 46 | Build.BuildContext.CHECK_SOCKET_WRAPPER = CHECK_SOCKET_WRAPPER |
2669 | 47 | 47 | ||
2670 | 48 | @conf | 48 | @conf |
2671 | 49 | def CHECK_NSS_WRAPPER(conf): | 49 | def CHECK_NSS_WRAPPER(conf): |
2673 | 50 | return conf.CHECK_BUNDLED_SYSTEM_PKG('nss_wrapper', minversion='1.1.3') | 50 | return conf.CHECK_BUNDLED_SYSTEM_PKG('nss_wrapper', minversion='1.1.5') |
2674 | 51 | Build.BuildContext.CHECK_NSS_WRAPPER = CHECK_NSS_WRAPPER | 51 | Build.BuildContext.CHECK_NSS_WRAPPER = CHECK_NSS_WRAPPER |
2675 | 52 | 52 | ||
2676 | 53 | @conf | 53 | @conf |
2677 | @@ -62,5 +62,5 @@ Build.BuildContext.CHECK_UID_WRAPPER = CHECK_UID_WRAPPER | |||
2678 | 62 | 62 | ||
2679 | 63 | @conf | 63 | @conf |
2680 | 64 | def CHECK_PAM_WRAPPER(conf): | 64 | def CHECK_PAM_WRAPPER(conf): |
2682 | 65 | return conf.CHECK_BUNDLED_SYSTEM_PKG('pam_wrapper', minversion='1.0.4') | 65 | return conf.CHECK_BUNDLED_SYSTEM_PKG('pam_wrapper', minversion='1.0.7') |
2683 | 66 | Build.BuildContext.CHECK_PAM_WRAPPER = CHECK_PAM_WRAPPER | 66 | Build.BuildContext.CHECK_PAM_WRAPPER = CHECK_PAM_WRAPPER |
2684 | diff --git a/buildtools/wafsamba/samba_utils.py b/buildtools/wafsamba/samba_utils.py | |||
2685 | index 0f95c12..ad97de1 100644 | |||
2686 | --- a/buildtools/wafsamba/samba_utils.py | |||
2687 | +++ b/buildtools/wafsamba/samba_utils.py | |||
2688 | @@ -1,30 +1,92 @@ | |||
2689 | 1 | # a waf tool to add autoconf-like macros to the configure section | 1 | # a waf tool to add autoconf-like macros to the configure section |
2690 | 2 | # and for SAMBA_ macros for building libraries, binaries etc | 2 | # and for SAMBA_ macros for building libraries, binaries etc |
2691 | 3 | 3 | ||
2693 | 4 | import os, sys, re, fnmatch, shlex | 4 | import errno |
2694 | 5 | import os, sys, re, fnmatch, shlex, inspect | ||
2695 | 5 | from optparse import SUPPRESS_HELP | 6 | from optparse import SUPPRESS_HELP |
2700 | 6 | import Build, Options, Utils, Task, Logs, Configure | 7 | from waflib import Build, Options, Utils, Task, Logs, Configure, Errors, Context |
2701 | 7 | from TaskGen import feature, before, after | 8 | from waflib import Scripting |
2702 | 8 | from Configure import conf, ConfigurationContext | 9 | from waflib.TaskGen import feature, before, after |
2703 | 9 | from Logs import debug | 10 | from waflib.Configure import ConfigurationContext |
2704 | 11 | from waflib.Logs import debug | ||
2705 | 12 | from waflib import ConfigSet | ||
2706 | 13 | from waflib.Build import CACHE_SUFFIX | ||
2707 | 10 | 14 | ||
2708 | 11 | # TODO: make this a --option | 15 | # TODO: make this a --option |
2709 | 12 | LIB_PATH="shared" | 16 | LIB_PATH="shared" |
2710 | 13 | 17 | ||
2711 | 14 | 18 | ||
2712 | 19 | PY3 = sys.version_info[0] == 3 | ||
2713 | 20 | |||
2714 | 21 | if PY3: | ||
2715 | 22 | |||
2716 | 23 | # helper function to get a string from a variable that maybe 'str' or | ||
2717 | 24 | # 'bytes' if 'bytes' then it is decoded using 'utf8'. If 'str' is passed | ||
2718 | 25 | # it is returned unchanged | ||
2719 | 26 | # Using this function is PY2/PY3 code should ensure in most cases | ||
2720 | 27 | # the PY2 code runs unchanged in PY2 whereas the code in PY3 possibly | ||
2721 | 28 | # decodes the variable (see PY2 implementation of this function below) | ||
2722 | 29 | def get_string(bytesorstring): | ||
2723 | 30 | tmp = bytesorstring | ||
2724 | 31 | if isinstance(bytesorstring, bytes): | ||
2725 | 32 | tmp = bytesorstring.decode('utf8') | ||
2726 | 33 | elif not isinstance(bytesorstring, str): | ||
2727 | 34 | raise ValueError('Expected byte of string for %s:%s' % (type(bytesorstring), bytesorstring)) | ||
2728 | 35 | return tmp | ||
2729 | 36 | |||
2730 | 37 | else: | ||
2731 | 38 | |||
2732 | 39 | # Helper function to return string. | ||
2733 | 40 | # if 'str' or 'unicode' passed in they are returned unchanged | ||
2734 | 41 | # otherwise an exception is generated | ||
2735 | 42 | # Using this function is PY2/PY3 code should ensure in most cases | ||
2736 | 43 | # the PY2 code runs unchanged in PY2 whereas the code in PY3 possibly | ||
2737 | 44 | # decodes the variable (see PY3 implementation of this function above) | ||
2738 | 45 | def get_string(bytesorstring): | ||
2739 | 46 | tmp = bytesorstring | ||
2740 | 47 | if not(isinstance(bytesorstring, str) or isinstance(bytesorstring, unicode)): | ||
2741 | 48 | raise ValueError('Expected str or unicode for %s:%s' % (type(bytesorstring), bytesorstring)) | ||
2742 | 49 | return tmp | ||
2743 | 50 | |||
2744 | 15 | # sigh, python octal constants are a mess | 51 | # sigh, python octal constants are a mess |
2745 | 16 | MODE_644 = int('644', 8) | 52 | MODE_644 = int('644', 8) |
2746 | 53 | MODE_744 = int('744', 8) | ||
2747 | 17 | MODE_755 = int('755', 8) | 54 | MODE_755 = int('755', 8) |
2748 | 55 | MODE_777 = int('777', 8) | ||
2749 | 56 | |||
2750 | 57 | def conf(f): | ||
2751 | 58 | # override in order to propagate the argument "mandatory" | ||
2752 | 59 | def fun(*k, **kw): | ||
2753 | 60 | mandatory = True | ||
2754 | 61 | if 'mandatory' in kw: | ||
2755 | 62 | mandatory = kw['mandatory'] | ||
2756 | 63 | del kw['mandatory'] | ||
2757 | 64 | |||
2758 | 65 | try: | ||
2759 | 66 | return f(*k, **kw) | ||
2760 | 67 | except Errors.ConfigurationError: | ||
2761 | 68 | if mandatory: | ||
2762 | 69 | raise | ||
2763 | 70 | |||
2764 | 71 | fun.__name__ = f.__name__ | ||
2765 | 72 | if 'mandatory' in inspect.getsource(f): | ||
2766 | 73 | fun = f | ||
2767 | 74 | |||
2768 | 75 | setattr(Configure.ConfigurationContext, f.__name__, fun) | ||
2769 | 76 | setattr(Build.BuildContext, f.__name__, fun) | ||
2770 | 77 | return f | ||
2771 | 78 | Configure.conf = conf | ||
2772 | 79 | Configure.conftest = conf | ||
2773 | 18 | 80 | ||
2774 | 19 | @conf | 81 | @conf |
2775 | 20 | def SET_TARGET_TYPE(ctx, target, value): | 82 | def SET_TARGET_TYPE(ctx, target, value): |
2776 | 21 | '''set the target type of a target''' | 83 | '''set the target type of a target''' |
2777 | 22 | cache = LOCAL_CACHE(ctx, 'TARGET_TYPE') | 84 | cache = LOCAL_CACHE(ctx, 'TARGET_TYPE') |
2778 | 23 | if target in cache and cache[target] != 'EMPTY': | 85 | if target in cache and cache[target] != 'EMPTY': |
2780 | 24 | Logs.error("ERROR: Target '%s' in directory %s re-defined as %s - was %s" % (target, ctx.curdir, value, cache[target])) | 86 | Logs.error("ERROR: Target '%s' in directory %s re-defined as %s - was %s" % (target, ctx.path.abspath(), value, cache[target])) |
2781 | 25 | sys.exit(1) | 87 | sys.exit(1) |
2782 | 26 | LOCAL_CACHE_SET(ctx, 'TARGET_TYPE', target, value) | 88 | LOCAL_CACHE_SET(ctx, 'TARGET_TYPE', target, value) |
2784 | 27 | debug("task_gen: Target '%s' created of type '%s' in %s" % (target, value, ctx.curdir)) | 89 | debug("task_gen: Target '%s' created of type '%s' in %s" % (target, value, ctx.path.abspath())) |
2785 | 28 | return True | 90 | return True |
2786 | 29 | 91 | ||
2787 | 30 | 92 | ||
2788 | @@ -101,7 +163,7 @@ def LOCAL_CACHE_SET(ctx, cachename, key, value): | |||
2789 | 101 | def ASSERT(ctx, expression, msg): | 163 | def ASSERT(ctx, expression, msg): |
2790 | 102 | '''a build assert call''' | 164 | '''a build assert call''' |
2791 | 103 | if not expression: | 165 | if not expression: |
2793 | 104 | raise Utils.WafError("ERROR: %s\n" % msg) | 166 | raise Errors.WafError("ERROR: %s\n" % msg) |
2794 | 105 | Build.BuildContext.ASSERT = ASSERT | 167 | Build.BuildContext.ASSERT = ASSERT |
2795 | 106 | 168 | ||
2796 | 107 | 169 | ||
2797 | @@ -122,9 +184,9 @@ def dict_concat(d1, d2): | |||
2798 | 122 | 184 | ||
2799 | 123 | def ADD_COMMAND(opt, name, function): | 185 | def ADD_COMMAND(opt, name, function): |
2800 | 124 | '''add a new top level command to waf''' | 186 | '''add a new top level command to waf''' |
2802 | 125 | Utils.g_module.__dict__[name] = function | 187 | Context.g_module.__dict__[name] = function |
2803 | 126 | opt.name = function | 188 | opt.name = function |
2805 | 127 | Options.Handler.ADD_COMMAND = ADD_COMMAND | 189 | Options.OptionsContext.ADD_COMMAND = ADD_COMMAND |
2806 | 128 | 190 | ||
2807 | 129 | 191 | ||
2808 | 130 | @feature('c', 'cc', 'cshlib', 'cprogram') | 192 | @feature('c', 'cc', 'cshlib', 'cprogram') |
2809 | @@ -199,8 +261,10 @@ def subst_vars_error(string, env): | |||
2810 | 199 | if re.match('\$\{\w+\}', v): | 261 | if re.match('\$\{\w+\}', v): |
2811 | 200 | vname = v[2:-1] | 262 | vname = v[2:-1] |
2812 | 201 | if not vname in env: | 263 | if not vname in env: |
2814 | 202 | raise KeyError("Failed to find variable %s in %s" % (vname, string)) | 264 | raise KeyError("Failed to find variable %s in %s in env %s <%s>" % (vname, string, env.__class__, str(env))) |
2815 | 203 | v = env[vname] | 265 | v = env[vname] |
2816 | 266 | if isinstance(v, list): | ||
2817 | 267 | v = ' '.join(v) | ||
2818 | 204 | out.append(v) | 268 | out.append(v) |
2819 | 205 | return ''.join(out) | 269 | return ''.join(out) |
2820 | 206 | 270 | ||
2821 | @@ -212,51 +276,6 @@ def SUBST_ENV_VAR(ctx, varname): | |||
2822 | 212 | Build.BuildContext.SUBST_ENV_VAR = SUBST_ENV_VAR | 276 | Build.BuildContext.SUBST_ENV_VAR = SUBST_ENV_VAR |
2823 | 213 | 277 | ||
2824 | 214 | 278 | ||
2825 | 215 | def ENFORCE_GROUP_ORDERING(bld): | ||
2826 | 216 | '''enforce group ordering for the project. This | ||
2827 | 217 | makes the group ordering apply only when you specify | ||
2828 | 218 | a target with --target''' | ||
2829 | 219 | if Options.options.compile_targets: | ||
2830 | 220 | @feature('*') | ||
2831 | 221 | @before('exec_rule', 'apply_core', 'collect') | ||
2832 | 222 | def force_previous_groups(self): | ||
2833 | 223 | if getattr(self.bld, 'enforced_group_ordering', False): | ||
2834 | 224 | return | ||
2835 | 225 | self.bld.enforced_group_ordering = True | ||
2836 | 226 | |||
2837 | 227 | def group_name(g): | ||
2838 | 228 | tm = self.bld.task_manager | ||
2839 | 229 | return [x for x in tm.groups_names if id(tm.groups_names[x]) == id(g)][0] | ||
2840 | 230 | |||
2841 | 231 | my_id = id(self) | ||
2842 | 232 | bld = self.bld | ||
2843 | 233 | stop = None | ||
2844 | 234 | for g in bld.task_manager.groups: | ||
2845 | 235 | for t in g.tasks_gen: | ||
2846 | 236 | if id(t) == my_id: | ||
2847 | 237 | stop = id(g) | ||
2848 | 238 | debug('group: Forcing up to group %s for target %s', | ||
2849 | 239 | group_name(g), self.name or self.target) | ||
2850 | 240 | break | ||
2851 | 241 | if stop is not None: | ||
2852 | 242 | break | ||
2853 | 243 | if stop is None: | ||
2854 | 244 | return | ||
2855 | 245 | |||
2856 | 246 | for i in xrange(len(bld.task_manager.groups)): | ||
2857 | 247 | g = bld.task_manager.groups[i] | ||
2858 | 248 | bld.task_manager.current_group = i | ||
2859 | 249 | if id(g) == stop: | ||
2860 | 250 | break | ||
2861 | 251 | debug('group: Forcing group %s', group_name(g)) | ||
2862 | 252 | for t in g.tasks_gen: | ||
2863 | 253 | if not getattr(t, 'forced_groups', False): | ||
2864 | 254 | debug('group: Posting %s', t.name or t.target) | ||
2865 | 255 | t.forced_groups = True | ||
2866 | 256 | t.post() | ||
2867 | 257 | Build.BuildContext.ENFORCE_GROUP_ORDERING = ENFORCE_GROUP_ORDERING | ||
2868 | 258 | |||
2869 | 259 | |||
2870 | 260 | def recursive_dirlist(dir, relbase, pattern=None): | 279 | def recursive_dirlist(dir, relbase, pattern=None): |
2871 | 261 | '''recursive directory list''' | 280 | '''recursive directory list''' |
2872 | 262 | ret = [] | 281 | ret = [] |
2873 | @@ -271,6 +290,18 @@ def recursive_dirlist(dir, relbase, pattern=None): | |||
2874 | 271 | return ret | 290 | return ret |
2875 | 272 | 291 | ||
2876 | 273 | 292 | ||
2877 | 293 | def symlink(src, dst, force=True): | ||
2878 | 294 | """Can create symlink by force""" | ||
2879 | 295 | try: | ||
2880 | 296 | os.symlink(src, dst) | ||
2881 | 297 | except OSError as exc: | ||
2882 | 298 | if exc.errno == errno.EEXIST and force: | ||
2883 | 299 | os.remove(dst) | ||
2884 | 300 | os.symlink(src, dst) | ||
2885 | 301 | else: | ||
2886 | 302 | raise | ||
2887 | 303 | |||
2888 | 304 | |||
2889 | 274 | def mkdir_p(dir): | 305 | def mkdir_p(dir): |
2890 | 275 | '''like mkdir -p''' | 306 | '''like mkdir -p''' |
2891 | 276 | if not dir: | 307 | if not dir: |
2892 | @@ -312,8 +343,7 @@ def EXPAND_VARIABLES(ctx, varstr, vars=None): | |||
2893 | 312 | if not isinstance(varstr, str): | 343 | if not isinstance(varstr, str): |
2894 | 313 | return varstr | 344 | return varstr |
2895 | 314 | 345 | ||
2898 | 315 | import Environment | 346 | env = ConfigSet.ConfigSet() |
2897 | 316 | env = Environment.Environment() | ||
2899 | 317 | ret = varstr | 347 | ret = varstr |
2900 | 318 | # substitute on user supplied dict if avaiilable | 348 | # substitute on user supplied dict if avaiilable |
2901 | 319 | if vars is not None: | 349 | if vars is not None: |
2902 | @@ -345,16 +375,18 @@ def RUN_COMMAND(cmd, | |||
2903 | 345 | return os.WEXITSTATUS(status) | 375 | return os.WEXITSTATUS(status) |
2904 | 346 | if os.WIFSIGNALED(status): | 376 | if os.WIFSIGNALED(status): |
2905 | 347 | return - os.WTERMSIG(status) | 377 | return - os.WTERMSIG(status) |
2907 | 348 | Logs.error("Unknown exit reason %d for command: %s" (status, cmd)) | 378 | Logs.error("Unknown exit reason %d for command: %s" % (status, cmd)) |
2908 | 349 | return -1 | 379 | return -1 |
2909 | 350 | 380 | ||
2910 | 351 | 381 | ||
2911 | 352 | def RUN_PYTHON_TESTS(testfiles, pythonpath=None, extra_env=None): | 382 | def RUN_PYTHON_TESTS(testfiles, pythonpath=None, extra_env=None): |
2912 | 353 | env = LOAD_ENVIRONMENT() | 383 | env = LOAD_ENVIRONMENT() |
2913 | 354 | if pythonpath is None: | 384 | if pythonpath is None: |
2915 | 355 | pythonpath = os.path.join(Utils.g_module.blddir, 'python') | 385 | pythonpath = os.path.join(Context.g_module.out, 'python') |
2916 | 356 | result = 0 | 386 | result = 0 |
2917 | 357 | for interp in env.python_interpreters: | 387 | for interp in env.python_interpreters: |
2918 | 388 | if not isinstance(interp, str): | ||
2919 | 389 | interp = ' '.join(interp) | ||
2920 | 358 | for testfile in testfiles: | 390 | for testfile in testfiles: |
2921 | 359 | cmd = "PYTHONPATH=%s %s %s" % (pythonpath, interp, testfile) | 391 | cmd = "PYTHONPATH=%s %s %s" % (pythonpath, interp, testfile) |
2922 | 360 | if extra_env: | 392 | if extra_env: |
2923 | @@ -374,16 +406,15 @@ try: | |||
2924 | 374 | # Even if hashlib.md5 exists, it may be unusable. | 406 | # Even if hashlib.md5 exists, it may be unusable. |
2925 | 375 | # Try to use MD5 function. In FIPS mode this will cause an exception | 407 | # Try to use MD5 function. In FIPS mode this will cause an exception |
2926 | 376 | # and we'll get to the replacement code | 408 | # and we'll get to the replacement code |
2928 | 377 | foo = md5('abcd') | 409 | foo = md5(b'abcd') |
2929 | 378 | except: | 410 | except: |
2930 | 379 | try: | 411 | try: |
2931 | 380 | import md5 | 412 | import md5 |
2932 | 381 | # repeat the same check here, mere success of import is not enough. | 413 | # repeat the same check here, mere success of import is not enough. |
2933 | 382 | # Try to use MD5 function. In FIPS mode this will cause an exception | 414 | # Try to use MD5 function. In FIPS mode this will cause an exception |
2935 | 383 | foo = md5.md5('abcd') | 415 | foo = md5.md5(b'abcd') |
2936 | 384 | except: | 416 | except: |
2939 | 385 | import Constants | 417 | Context.SIG_NIL = hash('abcd') |
2938 | 386 | Constants.SIG_NIL = hash('abcd') | ||
2940 | 387 | class replace_md5(object): | 418 | class replace_md5(object): |
2941 | 388 | def __init__(self): | 419 | def __init__(self): |
2942 | 389 | self.val = None | 420 | self.val = None |
2943 | @@ -409,20 +440,20 @@ except: | |||
2944 | 409 | def LOAD_ENVIRONMENT(): | 440 | def LOAD_ENVIRONMENT(): |
2945 | 410 | '''load the configuration environment, allowing access to env vars | 441 | '''load the configuration environment, allowing access to env vars |
2946 | 411 | from new commands''' | 442 | from new commands''' |
2949 | 412 | import Environment | 443 | env = ConfigSet.ConfigSet() |
2948 | 413 | env = Environment.Environment() | ||
2950 | 414 | try: | 444 | try: |
2954 | 415 | env.load('.lock-wscript') | 445 | p = os.path.join(Context.g_module.out, 'c4che/default'+CACHE_SUFFIX) |
2955 | 416 | env.load(env.blddir + '/c4che/default.cache.py') | 446 | env.load(p) |
2956 | 417 | except: | 447 | except (OSError, IOError): |
2957 | 418 | pass | 448 | pass |
2958 | 419 | return env | 449 | return env |
2959 | 420 | 450 | ||
2960 | 421 | 451 | ||
2961 | 422 | def IS_NEWER(bld, file1, file2): | 452 | def IS_NEWER(bld, file1, file2): |
2962 | 423 | '''return True if file1 is newer than file2''' | 453 | '''return True if file1 is newer than file2''' |
2965 | 424 | t1 = os.stat(os.path.join(bld.curdir, file1)).st_mtime | 454 | curdir = bld.path.abspath() |
2966 | 425 | t2 = os.stat(os.path.join(bld.curdir, file2)).st_mtime | 455 | t1 = os.stat(os.path.join(curdir, file1)).st_mtime |
2967 | 456 | t2 = os.stat(os.path.join(curdir, file2)).st_mtime | ||
2968 | 426 | return t1 > t2 | 457 | return t1 > t2 |
2969 | 427 | Build.BuildContext.IS_NEWER = IS_NEWER | 458 | Build.BuildContext.IS_NEWER = IS_NEWER |
2970 | 428 | 459 | ||
2971 | @@ -432,47 +463,46 @@ def RECURSE(ctx, directory): | |||
2972 | 432 | '''recurse into a directory, relative to the curdir or top level''' | 463 | '''recurse into a directory, relative to the curdir or top level''' |
2973 | 433 | try: | 464 | try: |
2974 | 434 | visited_dirs = ctx.visited_dirs | 465 | visited_dirs = ctx.visited_dirs |
2976 | 435 | except: | 466 | except AttributeError: |
2977 | 436 | visited_dirs = ctx.visited_dirs = set() | 467 | visited_dirs = ctx.visited_dirs = set() |
2979 | 437 | d = os.path.join(ctx.curdir, directory) | 468 | d = os.path.join(ctx.path.abspath(), directory) |
2980 | 438 | if os.path.exists(d): | 469 | if os.path.exists(d): |
2981 | 439 | abspath = os.path.abspath(d) | 470 | abspath = os.path.abspath(d) |
2982 | 440 | else: | 471 | else: |
2984 | 441 | abspath = os.path.abspath(os.path.join(Utils.g_module.srcdir, directory)) | 472 | abspath = os.path.abspath(os.path.join(Context.g_module.top, directory)) |
2985 | 442 | ctxclass = ctx.__class__.__name__ | 473 | ctxclass = ctx.__class__.__name__ |
2986 | 443 | key = ctxclass + ':' + abspath | 474 | key = ctxclass + ':' + abspath |
2987 | 444 | if key in visited_dirs: | 475 | if key in visited_dirs: |
2988 | 445 | # already done it | 476 | # already done it |
2989 | 446 | return | 477 | return |
2990 | 447 | visited_dirs.add(key) | 478 | visited_dirs.add(key) |
2999 | 448 | relpath = os_path_relpath(abspath, ctx.curdir) | 479 | relpath = os_path_relpath(abspath, ctx.path.abspath()) |
3000 | 449 | if ctxclass == 'Handler': | 480 | if ctxclass in ['tmp', 'OptionsContext', 'ConfigurationContext', 'BuildContext']: |
3001 | 450 | return ctx.sub_options(relpath) | 481 | return ctx.recurse(relpath) |
3002 | 451 | if ctxclass == 'ConfigurationContext': | 482 | if 'waflib.extras.compat15' in sys.modules: |
3003 | 452 | return ctx.sub_config(relpath) | 483 | return ctx.recurse(relpath) |
3004 | 453 | if ctxclass == 'BuildContext': | 484 | Logs.error('Unknown RECURSE context class: {}'.format(ctxclass)) |
2997 | 454 | return ctx.add_subdirs(relpath) | ||
2998 | 455 | Logs.error('Unknown RECURSE context class', ctxclass) | ||
3005 | 456 | raise | 485 | raise |
3007 | 457 | Options.Handler.RECURSE = RECURSE | 486 | Options.OptionsContext.RECURSE = RECURSE |
3008 | 458 | Build.BuildContext.RECURSE = RECURSE | 487 | Build.BuildContext.RECURSE = RECURSE |
3009 | 459 | 488 | ||
3010 | 460 | 489 | ||
3012 | 461 | def CHECK_MAKEFLAGS(bld): | 490 | def CHECK_MAKEFLAGS(options): |
3013 | 462 | '''check for MAKEFLAGS environment variable in case we are being | 491 | '''check for MAKEFLAGS environment variable in case we are being |
3014 | 463 | called from a Makefile try to honor a few make command line flags''' | 492 | called from a Makefile try to honor a few make command line flags''' |
3015 | 464 | if not 'WAF_MAKE' in os.environ: | 493 | if not 'WAF_MAKE' in os.environ: |
3016 | 465 | return | 494 | return |
3017 | 466 | makeflags = os.environ.get('MAKEFLAGS') | 495 | makeflags = os.environ.get('MAKEFLAGS') |
3018 | 467 | if makeflags is None: | 496 | if makeflags is None: |
3020 | 468 | return | 497 | makeflags = "" |
3021 | 469 | jobs_set = False | 498 | jobs_set = False |
3022 | 499 | jobs = None | ||
3023 | 470 | # we need to use shlex.split to cope with the escaping of spaces | 500 | # we need to use shlex.split to cope with the escaping of spaces |
3024 | 471 | # in makeflags | 501 | # in makeflags |
3025 | 472 | for opt in shlex.split(makeflags): | 502 | for opt in shlex.split(makeflags): |
3026 | 473 | # options can come either as -x or as x | 503 | # options can come either as -x or as x |
3027 | 474 | if opt[0:2] == 'V=': | 504 | if opt[0:2] == 'V=': |
3029 | 475 | Options.options.verbose = Logs.verbose = int(opt[2:]) | 505 | options.verbose = Logs.verbose = int(opt[2:]) |
3030 | 476 | if Logs.verbose > 0: | 506 | if Logs.verbose > 0: |
3031 | 477 | Logs.zones = ['runner'] | 507 | Logs.zones = ['runner'] |
3032 | 478 | if Logs.verbose > 2: | 508 | if Logs.verbose > 2: |
3033 | @@ -486,22 +516,53 @@ def CHECK_MAKEFLAGS(bld): | |||
3034 | 486 | # this is also how "make test TESTS=testpattern" works, and | 516 | # this is also how "make test TESTS=testpattern" works, and |
3035 | 487 | # "make VERBOSE=1" as well as things like "make SYMBOLCHECK=1" | 517 | # "make VERBOSE=1" as well as things like "make SYMBOLCHECK=1" |
3036 | 488 | loc = opt.find('=') | 518 | loc = opt.find('=') |
3038 | 489 | setattr(Options.options, opt[0:loc], opt[loc+1:]) | 519 | setattr(options, opt[0:loc], opt[loc+1:]) |
3039 | 490 | elif opt[0] != '-': | 520 | elif opt[0] != '-': |
3040 | 491 | for v in opt: | 521 | for v in opt: |
3042 | 492 | if v == 'j': | 522 | if re.search(r'j[0-9]*$', v): |
3043 | 493 | jobs_set = True | 523 | jobs_set = True |
3044 | 524 | jobs = opt.strip('j') | ||
3045 | 494 | elif v == 'k': | 525 | elif v == 'k': |
3048 | 495 | Options.options.keep = True | 526 | options.keep = True |
3049 | 496 | elif opt == '-j': | 527 | elif re.search(r'-j[0-9]*$', opt): |
3050 | 497 | jobs_set = True | 528 | jobs_set = True |
3051 | 529 | jobs = opt.strip('-j') | ||
3052 | 498 | elif opt == '-k': | 530 | elif opt == '-k': |
3054 | 499 | Options.options.keep = True | 531 | options.keep = True |
3055 | 500 | if not jobs_set: | 532 | if not jobs_set: |
3056 | 501 | # default to one job | 533 | # default to one job |
3060 | 502 | Options.options.jobs = 1 | 534 | options.jobs = 1 |
3061 | 503 | 535 | elif jobs_set and jobs: | |
3062 | 504 | Build.BuildContext.CHECK_MAKEFLAGS = CHECK_MAKEFLAGS | 536 | options.jobs = int(jobs) |
3063 | 537 | |||
3064 | 538 | waflib_options_parse_cmd_args = Options.OptionsContext.parse_cmd_args | ||
3065 | 539 | def wafsamba_options_parse_cmd_args(self, _args=None, cwd=None, allow_unknown=False): | ||
3066 | 540 | (options, commands, envvars) = \ | ||
3067 | 541 | waflib_options_parse_cmd_args(self, | ||
3068 | 542 | _args=_args, | ||
3069 | 543 | cwd=cwd, | ||
3070 | 544 | allow_unknown=allow_unknown) | ||
3071 | 545 | CHECK_MAKEFLAGS(options) | ||
3072 | 546 | if options.jobs == 1: | ||
3073 | 547 | # | ||
3074 | 548 | # waflib.Runner.Parallel processes jobs inline if the possible number | ||
3075 | 549 | # of jobs is just 1. But (at least in waf <= 2.0.12) it still calls | ||
3076 | 550 | # create a waflib.Runner.Spawner() which creates a single | ||
3077 | 551 | # waflib.Runner.Consumer() thread that tries to process jobs from the | ||
3078 | 552 | # queue. | ||
3079 | 553 | # | ||
3080 | 554 | # This has strange effects, which are not noticed typically, | ||
3081 | 555 | # but at least on AIX python has broken threading and fails | ||
3082 | 556 | # in random ways. | ||
3083 | 557 | # | ||
3084 | 558 | # So we just add a dummy Spawner class. | ||
3085 | 559 | class NoOpSpawner(object): | ||
3086 | 560 | def __init__(self, master): | ||
3087 | 561 | return | ||
3088 | 562 | from waflib import Runner | ||
3089 | 563 | Runner.Spawner = NoOpSpawner | ||
3090 | 564 | return options, commands, envvars | ||
3091 | 565 | Options.OptionsContext.parse_cmd_args = wafsamba_options_parse_cmd_args | ||
3092 | 505 | 566 | ||
3093 | 506 | option_groups = {} | 567 | option_groups = {} |
3094 | 507 | 568 | ||
3095 | @@ -513,7 +574,7 @@ def option_group(opt, name): | |||
3096 | 513 | gr = opt.add_option_group(name) | 574 | gr = opt.add_option_group(name) |
3097 | 514 | option_groups[name] = gr | 575 | option_groups[name] = gr |
3098 | 515 | return gr | 576 | return gr |
3100 | 516 | Options.Handler.option_group = option_group | 577 | Options.OptionsContext.option_group = option_group |
3101 | 517 | 578 | ||
3102 | 518 | 579 | ||
3103 | 519 | def save_file(filename, contents, create_dir=False): | 580 | def save_file(filename, contents, create_dir=False): |
3104 | @@ -542,9 +603,9 @@ def load_file(filename): | |||
3105 | 542 | 603 | ||
3106 | 543 | def reconfigure(ctx): | 604 | def reconfigure(ctx): |
3107 | 544 | '''rerun configure if necessary''' | 605 | '''rerun configure if necessary''' |
3111 | 545 | import Configure, samba_wildcard, Scripting | 606 | if not os.path.exists(os.environ.get('WAFLOCK', '.lock-wscript')): |
3112 | 546 | if not os.path.exists(".lock-wscript"): | 607 | raise Errors.WafError('configure has not been run') |
3113 | 547 | raise Utils.WafError('configure has not been run') | 608 | import samba_wildcard |
3114 | 548 | bld = samba_wildcard.fake_build_environment() | 609 | bld = samba_wildcard.fake_build_environment() |
3115 | 549 | Configure.autoconfig = True | 610 | Configure.autoconfig = True |
3116 | 550 | Scripting.check_configured(bld) | 611 | Scripting.check_configured(bld) |
3117 | @@ -561,7 +622,7 @@ def map_shlib_extension(ctx, name, python=False): | |||
3118 | 561 | if python: | 622 | if python: |
3119 | 562 | return ctx.env.pyext_PATTERN % root1 | 623 | return ctx.env.pyext_PATTERN % root1 |
3120 | 563 | else: | 624 | else: |
3122 | 564 | (root2, ext2) = os.path.splitext(ctx.env.shlib_PATTERN) | 625 | (root2, ext2) = os.path.splitext(ctx.env.cshlib_PATTERN) |
3123 | 565 | return root1+ext2 | 626 | return root1+ext2 |
3124 | 566 | Build.BuildContext.map_shlib_extension = map_shlib_extension | 627 | Build.BuildContext.map_shlib_extension = map_shlib_extension |
3125 | 567 | 628 | ||
3126 | @@ -583,7 +644,7 @@ def make_libname(ctx, name, nolibprefix=False, version=None, python=False): | |||
3127 | 583 | if python: | 644 | if python: |
3128 | 584 | libname = apply_pattern(name, ctx.env.pyext_PATTERN) | 645 | libname = apply_pattern(name, ctx.env.pyext_PATTERN) |
3129 | 585 | else: | 646 | else: |
3131 | 586 | libname = apply_pattern(name, ctx.env.shlib_PATTERN) | 647 | libname = apply_pattern(name, ctx.env.cshlib_PATTERN) |
3132 | 587 | if nolibprefix and libname[0:3] == 'lib': | 648 | if nolibprefix and libname[0:3] == 'lib': |
3133 | 588 | libname = libname[3:] | 649 | libname = libname[3:] |
3134 | 589 | if version: | 650 | if version: |
3135 | @@ -617,7 +678,7 @@ def get_tgt_list(bld): | |||
3136 | 617 | tgt_list.append(t) | 678 | tgt_list.append(t) |
3137 | 618 | return tgt_list | 679 | return tgt_list |
3138 | 619 | 680 | ||
3140 | 620 | from Constants import WSCRIPT_FILE | 681 | from waflib.Context import WSCRIPT_FILE |
3141 | 621 | def PROCESS_SEPARATE_RULE(self, rule): | 682 | def PROCESS_SEPARATE_RULE(self, rule): |
3142 | 622 | ''' cause waf to process additional script based on `rule'. | 683 | ''' cause waf to process additional script based on `rule'. |
3143 | 623 | You should have file named wscript_<stage>_rule in the current directory | 684 | You should have file named wscript_<stage>_rule in the current directory |
3144 | @@ -628,15 +689,21 @@ def PROCESS_SEPARATE_RULE(self, rule): | |||
3145 | 628 | stage = 'configure' | 689 | stage = 'configure' |
3146 | 629 | elif isinstance(self, Build.BuildContext): | 690 | elif isinstance(self, Build.BuildContext): |
3147 | 630 | stage = 'build' | 691 | stage = 'build' |
3157 | 631 | file_path = os.path.join(self.curdir, WSCRIPT_FILE+'_'+stage+'_'+rule) | 692 | file_path = os.path.join(self.path.abspath(), WSCRIPT_FILE+'_'+stage+'_'+rule) |
3158 | 632 | txt = load_file(file_path) | 693 | node = self.root.find_node(file_path) |
3159 | 633 | if txt: | 694 | if node: |
3160 | 634 | dc = {'ctx': self} | 695 | try: |
3161 | 635 | if getattr(self.__class__, 'pre_recurse', None): | 696 | cache = self.recurse_cache |
3162 | 636 | dc = self.pre_recurse(txt, file_path, self.curdir) | 697 | except AttributeError: |
3163 | 637 | exec(compile(txt, file_path, 'exec'), dc) | 698 | cache = self.recurse_cache = {} |
3164 | 638 | if getattr(self.__class__, 'post_recurse', None): | 699 | if node not in cache: |
3165 | 639 | dc = self.post_recurse(txt, file_path, self.curdir) | 700 | cache[node] = True |
3166 | 701 | self.pre_recurse(node) | ||
3167 | 702 | try: | ||
3168 | 703 | function_code = node.read('rU', None) | ||
3169 | 704 | exec(compile(function_code, node.abspath(), 'exec'), self.exec_dict) | ||
3170 | 705 | finally: | ||
3171 | 706 | self.post_recurse(node) | ||
3172 | 640 | 707 | ||
3173 | 641 | Build.BuildContext.PROCESS_SEPARATE_RULE = PROCESS_SEPARATE_RULE | 708 | Build.BuildContext.PROCESS_SEPARATE_RULE = PROCESS_SEPARATE_RULE |
3174 | 642 | ConfigurationContext.PROCESS_SEPARATE_RULE = PROCESS_SEPARATE_RULE | 709 | ConfigurationContext.PROCESS_SEPARATE_RULE = PROCESS_SEPARATE_RULE |
3175 | @@ -693,4 +760,4 @@ def samba_add_onoff_option(opt, option, help=(), dest=None, default=True, | |||
3176 | 693 | default=default) | 760 | default=default) |
3177 | 694 | opt.add_option(without_val, help=SUPPRESS_HELP, action="store_false", | 761 | opt.add_option(without_val, help=SUPPRESS_HELP, action="store_false", |
3178 | 695 | dest=dest) | 762 | dest=dest) |
3180 | 696 | Options.Handler.samba_add_onoff_option = samba_add_onoff_option | 763 | Options.OptionsContext.samba_add_onoff_option = samba_add_onoff_option |
3181 | diff --git a/buildtools/wafsamba/samba_version.py b/buildtools/wafsamba/samba_version.py | |||
3182 | index be26439..f0e7b4d 100644 | |||
3183 | --- a/buildtools/wafsamba/samba_version.py | |||
3184 | +++ b/buildtools/wafsamba/samba_version.py | |||
3185 | @@ -1,5 +1,5 @@ | |||
3188 | 1 | import os | 1 | import os, sys |
3189 | 2 | import Utils | 2 | from waflib import Utils, Context |
3190 | 3 | import samba_utils | 3 | import samba_utils |
3191 | 4 | from samba_git import find_git | 4 | from samba_git import find_git |
3192 | 5 | 5 | ||
3193 | @@ -14,7 +14,7 @@ def git_version_summary(path, env=None): | |||
3194 | 14 | environ = dict(os.environ) | 14 | environ = dict(os.environ) |
3195 | 15 | environ["GIT_DIR"] = '%s/.git' % path | 15 | environ["GIT_DIR"] = '%s/.git' % path |
3196 | 16 | environ["GIT_WORK_TREE"] = path | 16 | environ["GIT_WORK_TREE"] = path |
3198 | 17 | git = Utils.cmd_output(env.GIT + ' show --pretty=format:"%h%n%ct%n%H%n%cd" --stat HEAD', silent=True, env=environ) | 17 | git = samba_utils.get_string(Utils.cmd_output(env.GIT + ' show --pretty=format:"%h%n%ct%n%H%n%cd" --stat HEAD', silent=True, env=environ)) |
3199 | 18 | 18 | ||
3200 | 19 | lines = git.splitlines() | 19 | lines = git.splitlines() |
3201 | 20 | if not lines or len(lines) < 4: | 20 | if not lines or len(lines) < 4: |
3202 | @@ -198,7 +198,10 @@ also accepted as dictionary entries here | |||
3203 | 198 | for name in sorted(self.vcs_fields.keys()): | 198 | for name in sorted(self.vcs_fields.keys()): |
3204 | 199 | string+="#define SAMBA_VERSION_%s " % name | 199 | string+="#define SAMBA_VERSION_%s " % name |
3205 | 200 | value = self.vcs_fields[name] | 200 | value = self.vcs_fields[name] |
3207 | 201 | if isinstance(value, basestring): | 201 | string_types = str |
3208 | 202 | if sys.version_info[0] < 3: | ||
3209 | 203 | string_types = basestring | ||
3210 | 204 | if isinstance(value, string_types): | ||
3211 | 202 | string += "\"%s\"" % value | 205 | string += "\"%s\"" % value |
3212 | 203 | elif type(value) is int: | 206 | elif type(value) is int: |
3213 | 204 | string += "%d" % value | 207 | string += "%d" % value |
3214 | @@ -260,5 +263,5 @@ def load_version(env=None, is_install=True): | |||
3215 | 260 | env = samba_utils.LOAD_ENVIRONMENT() | 263 | env = samba_utils.LOAD_ENVIRONMENT() |
3216 | 261 | 264 | ||
3217 | 262 | version = samba_version_file("./VERSION", ".", env, is_install=is_install) | 265 | version = samba_version_file("./VERSION", ".", env, is_install=is_install) |
3219 | 263 | Utils.g_module.VERSION = version.STRING | 266 | Context.g_module.VERSION = version.STRING |
3220 | 264 | return version | 267 | return version |
3221 | diff --git a/buildtools/wafsamba/samba_waf18.py b/buildtools/wafsamba/samba_waf18.py | |||
3222 | 265 | new file mode 100644 | 268 | new file mode 100644 |
3223 | index 0000000..cc310fb | |||
3224 | --- /dev/null | |||
3225 | +++ b/buildtools/wafsamba/samba_waf18.py | |||
3226 | @@ -0,0 +1,429 @@ | |||
3227 | 1 | # compatibility layer for building with more recent waf versions | ||
3228 | 2 | |||
3229 | 3 | import os, shlex, sys | ||
3230 | 4 | from waflib import Build, Configure, Node, Utils, Options, Logs, TaskGen | ||
3231 | 5 | from waflib import ConfigSet | ||
3232 | 6 | from waflib.TaskGen import feature, after | ||
3233 | 7 | from waflib.Configure import conf, ConfigurationContext | ||
3234 | 8 | |||
3235 | 9 | from waflib.Tools.flex import decide_ext | ||
3236 | 10 | |||
3237 | 11 | # This version of flexfun runs in tsk.get_cwd() as opposed to the | ||
3238 | 12 | # bld.variant_dir: since input paths adjusted against tsk.get_cwd(), we have to | ||
3239 | 13 | # use tsk.get_cwd() for the work directory as well. | ||
3240 | 14 | def flexfun(tsk): | ||
3241 | 15 | env = tsk.env | ||
3242 | 16 | bld = tsk.generator.bld | ||
3243 | 17 | def to_list(xx): | ||
3244 | 18 | if isinstance(xx, str): | ||
3245 | 19 | return [xx] | ||
3246 | 20 | return xx | ||
3247 | 21 | tsk.last_cmd = lst = [] | ||
3248 | 22 | lst.extend(to_list(env.FLEX)) | ||
3249 | 23 | lst.extend(to_list(env.FLEXFLAGS)) | ||
3250 | 24 | inputs = [a.path_from(tsk.get_cwd()) for a in tsk.inputs] | ||
3251 | 25 | if env.FLEX_MSYS: | ||
3252 | 26 | inputs = [x.replace(os.sep, '/') for x in inputs] | ||
3253 | 27 | lst.extend(inputs) | ||
3254 | 28 | lst = [x for x in lst if x] | ||
3255 | 29 | txt = bld.cmd_and_log(lst, cwd=tsk.get_cwd(), env=env.env or None, quiet=0) | ||
3256 | 30 | tsk.outputs[0].write(txt.replace('\r\n', '\n').replace('\r', '\n')) # issue #1207 | ||
3257 | 31 | |||
3258 | 32 | TaskGen.declare_chain( | ||
3259 | 33 | name = 'flex', | ||
3260 | 34 | rule = flexfun, # issue #854 | ||
3261 | 35 | ext_in = '.l', | ||
3262 | 36 | decider = decide_ext, | ||
3263 | 37 | ) | ||
3264 | 38 | |||
3265 | 39 | |||
3266 | 40 | for y in (Build.BuildContext, Build.CleanContext, Build.InstallContext, Build.UninstallContext, Build.ListContext): | ||
3267 | 41 | class tmp(y): | ||
3268 | 42 | variant = 'default' | ||
3269 | 43 | |||
3270 | 44 | def abspath(self, env=None): | ||
3271 | 45 | if env and hasattr(self, 'children'): | ||
3272 | 46 | return self.get_bld().abspath() | ||
3273 | 47 | return self.old_abspath() | ||
3274 | 48 | Node.Node.old_abspath = Node.Node.abspath | ||
3275 | 49 | Node.Node.abspath = abspath | ||
3276 | 50 | |||
3277 | 51 | def bldpath(self, env=None): | ||
3278 | 52 | return self.abspath() | ||
3279 | 53 | #return self.path_from(self.ctx.bldnode.parent) | ||
3280 | 54 | Node.Node.bldpath = bldpath | ||
3281 | 55 | |||
3282 | 56 | def srcpath(self, env=None): | ||
3283 | 57 | return self.abspath() | ||
3284 | 58 | #return self.path_from(self.ctx.bldnode.parent) | ||
3285 | 59 | Node.Node.srcpath = srcpath | ||
3286 | 60 | |||
3287 | 61 | def store_fast(self, filename): | ||
3288 | 62 | file = open(filename, 'wb') | ||
3289 | 63 | data = self.get_merged_dict() | ||
3290 | 64 | try: | ||
3291 | 65 | Build.cPickle.dump(data, file, -1) | ||
3292 | 66 | finally: | ||
3293 | 67 | file.close() | ||
3294 | 68 | ConfigSet.ConfigSet.store_fast = store_fast | ||
3295 | 69 | |||
3296 | 70 | def load_fast(self, filename): | ||
3297 | 71 | file = open(filename, 'rb') | ||
3298 | 72 | try: | ||
3299 | 73 | data = Build.cPickle.load(file) | ||
3300 | 74 | finally: | ||
3301 | 75 | file.close() | ||
3302 | 76 | self.table.update(data) | ||
3303 | 77 | ConfigSet.ConfigSet.load_fast = load_fast | ||
3304 | 78 | |||
3305 | 79 | @feature('c', 'cxx', 'd', 'asm', 'fc', 'includes') | ||
3306 | 80 | @after('propagate_uselib_vars', 'process_source') | ||
3307 | 81 | def apply_incpaths(self): | ||
3308 | 82 | lst = self.to_incnodes(self.to_list(getattr(self, 'includes', [])) + self.env['INCLUDES']) | ||
3309 | 83 | self.includes_nodes = lst | ||
3310 | 84 | cwdx = getattr(self.bld, 'cwdx', self.bld.bldnode) | ||
3311 | 85 | self.env['INCPATHS'] = [x.path_from(cwdx) for x in lst] | ||
3312 | 86 | |||
3313 | 87 | @conf | ||
3314 | 88 | def define(self, key, val, quote=True, comment=None): | ||
3315 | 89 | assert key and isinstance(key, str) | ||
3316 | 90 | |||
3317 | 91 | if val is None: | ||
3318 | 92 | val = () | ||
3319 | 93 | elif isinstance(val, bool): | ||
3320 | 94 | val = int(val) | ||
3321 | 95 | |||
3322 | 96 | # waf 1.5 | ||
3323 | 97 | self.env[key] = val | ||
3324 | 98 | |||
3325 | 99 | if isinstance(val, int) or isinstance(val, float): | ||
3326 | 100 | s = '%s=%s' | ||
3327 | 101 | else: | ||
3328 | 102 | s = quote and '%s="%s"' or '%s=%s' | ||
3329 | 103 | app = s % (key, str(val)) | ||
3330 | 104 | |||
3331 | 105 | ban = key + '=' | ||
3332 | 106 | lst = self.env.DEFINES | ||
3333 | 107 | for x in lst: | ||
3334 | 108 | if x.startswith(ban): | ||
3335 | 109 | lst[lst.index(x)] = app | ||
3336 | 110 | break | ||
3337 | 111 | else: | ||
3338 | 112 | self.env.append_value('DEFINES', app) | ||
3339 | 113 | |||
3340 | 114 | self.env.append_unique('define_key', key) | ||
3341 | 115 | |||
3342 | 116 | # compat15 removes this but we want to keep it | ||
3343 | 117 | @conf | ||
3344 | 118 | def undefine(self, key, from_env=True, comment=None): | ||
3345 | 119 | assert key and isinstance(key, str) | ||
3346 | 120 | |||
3347 | 121 | ban = key + '=' | ||
3348 | 122 | self.env.DEFINES = [x for x in self.env.DEFINES if not x.startswith(ban)] | ||
3349 | 123 | self.env.append_unique('define_key', key) | ||
3350 | 124 | # waf 1.5 | ||
3351 | 125 | if from_env: | ||
3352 | 126 | self.env[key] = () | ||
3353 | 127 | |||
3354 | 128 | class ConfigurationContext(Configure.ConfigurationContext): | ||
3355 | 129 | def init_dirs(self): | ||
3356 | 130 | self.setenv('default') | ||
3357 | 131 | self.env.merge_config_header = True | ||
3358 | 132 | return super(ConfigurationContext, self).init_dirs() | ||
3359 | 133 | |||
3360 | 134 | def find_program_samba(self, *k, **kw): | ||
3361 | 135 | kw['mandatory'] = False | ||
3362 | 136 | ret = self.find_program_old(*k, **kw) | ||
3363 | 137 | return ret | ||
3364 | 138 | Configure.ConfigurationContext.find_program_old = Configure.ConfigurationContext.find_program | ||
3365 | 139 | Configure.ConfigurationContext.find_program = find_program_samba | ||
3366 | 140 | |||
3367 | 141 | Build.BuildContext.ENFORCE_GROUP_ORDERING = Utils.nada | ||
3368 | 142 | Build.BuildContext.AUTOCLEANUP_STALE_FILES = Utils.nada | ||
3369 | 143 | |||
3370 | 144 | @conf | ||
3371 | 145 | def check(self, *k, **kw): | ||
3372 | 146 | '''Override the waf defaults to inject --with-directory options''' | ||
3373 | 147 | |||
3374 | 148 | # match the configuration test with speficic options, for example: | ||
3375 | 149 | # --with-libiconv -> Options.options.iconv_open -> "Checking for library iconv" | ||
3376 | 150 | self.validate_c(kw) | ||
3377 | 151 | |||
3378 | 152 | additional_dirs = [] | ||
3379 | 153 | if 'msg' in kw: | ||
3380 | 154 | msg = kw['msg'] | ||
3381 | 155 | for x in Options.OptionsContext.parser.parser.option_list: | ||
3382 | 156 | if getattr(x, 'match', None) and msg in x.match: | ||
3383 | 157 | d = getattr(Options.options, x.dest, '') | ||
3384 | 158 | if d: | ||
3385 | 159 | additional_dirs.append(d) | ||
3386 | 160 | |||
3387 | 161 | # we add the additional dirs twice: once for the test data, and again if the compilation test suceeds below | ||
3388 | 162 | def add_options_dir(dirs, env): | ||
3389 | 163 | for x in dirs: | ||
3390 | 164 | if not x in env.CPPPATH: | ||
3391 | 165 | env.CPPPATH = [os.path.join(x, 'include')] + env.CPPPATH | ||
3392 | 166 | if not x in env.LIBPATH: | ||
3393 | 167 | env.LIBPATH = [os.path.join(x, 'lib')] + env.LIBPATH | ||
3394 | 168 | |||
3395 | 169 | add_options_dir(additional_dirs, kw['env']) | ||
3396 | 170 | |||
3397 | 171 | self.start_msg(kw['msg'], **kw) | ||
3398 | 172 | ret = None | ||
3399 | 173 | try: | ||
3400 | 174 | ret = self.run_build(*k, **kw) | ||
3401 | 175 | except self.errors.ConfigurationError: | ||
3402 | 176 | self.end_msg(kw['errmsg'], 'YELLOW', **kw) | ||
3403 | 177 | if Logs.verbose > 1: | ||
3404 | 178 | raise | ||
3405 | 179 | else: | ||
3406 | 180 | self.fatal('The configuration failed') | ||
3407 | 181 | else: | ||
3408 | 182 | kw['success'] = ret | ||
3409 | 183 | # success! time for brandy | ||
3410 | 184 | add_options_dir(additional_dirs, self.env) | ||
3411 | 185 | |||
3412 | 186 | ret = self.post_check(*k, **kw) | ||
3413 | 187 | if not ret: | ||
3414 | 188 | self.end_msg(kw['errmsg'], 'YELLOW', **kw) | ||
3415 | 189 | self.fatal('The configuration failed %r' % ret) | ||
3416 | 190 | else: | ||
3417 | 191 | self.end_msg(self.ret_msg(kw['okmsg'], kw), **kw) | ||
3418 | 192 | return ret | ||
3419 | 193 | |||
3420 | 194 | @conf | ||
3421 | 195 | def CHECK_LIBRARY_SUPPORT(conf, rpath=False, version_script=False, msg=None): | ||
3422 | 196 | '''see if the platform supports building libraries''' | ||
3423 | 197 | |||
3424 | 198 | if msg is None: | ||
3425 | 199 | if rpath: | ||
3426 | 200 | msg = "rpath library support" | ||
3427 | 201 | else: | ||
3428 | 202 | msg = "building library support" | ||
3429 | 203 | |||
3430 | 204 | def build(bld): | ||
3431 | 205 | lib_node = bld.srcnode.make_node('libdir/liblc1.c') | ||
3432 | 206 | lib_node.parent.mkdir() | ||
3433 | 207 | lib_node.write('int lib_func(void) { return 42; }\n', 'w') | ||
3434 | 208 | main_node = bld.srcnode.make_node('main.c') | ||
3435 | 209 | main_node.write('int main(void) {return !(lib_func() == 42);}', 'w') | ||
3436 | 210 | linkflags = [] | ||
3437 | 211 | if version_script: | ||
3438 | 212 | script = bld.srcnode.make_node('ldscript') | ||
3439 | 213 | script.write('TEST_1.0A2 { global: *; };\n', 'w') | ||
3440 | 214 | linkflags.append('-Wl,--version-script=%s' % script.abspath()) | ||
3441 | 215 | bld(features='c cshlib', source=lib_node, target='lib1', linkflags=linkflags, name='lib1') | ||
3442 | 216 | o = bld(features='c cprogram', source=main_node, target='prog1', uselib_local='lib1') | ||
3443 | 217 | if rpath: | ||
3444 | 218 | o.rpath = [lib_node.parent.abspath()] | ||
3445 | 219 | def run_app(self): | ||
3446 | 220 | args = conf.SAMBA_CROSS_ARGS(msg=msg) | ||
3447 | 221 | env = dict(os.environ) | ||
3448 | 222 | env['LD_LIBRARY_PATH'] = self.inputs[0].parent.abspath() + os.pathsep + env.get('LD_LIBRARY_PATH', '') | ||
3449 | 223 | self.generator.bld.cmd_and_log([self.inputs[0].abspath()] + args, env=env) | ||
3450 | 224 | o.post() | ||
3451 | 225 | bld(rule=run_app, source=o.link_task.outputs[0]) | ||
3452 | 226 | |||
3453 | 227 | # ok, so it builds | ||
3454 | 228 | try: | ||
3455 | 229 | conf.check(build_fun=build, msg='Checking for %s' % msg) | ||
3456 | 230 | except conf.errors.ConfigurationError: | ||
3457 | 231 | return False | ||
3458 | 232 | return True | ||
3459 | 233 | |||
3460 | 234 | @conf | ||
3461 | 235 | def CHECK_NEED_LC(conf, msg): | ||
3462 | 236 | '''check if we need -lc''' | ||
3463 | 237 | def build(bld): | ||
3464 | 238 | lib_node = bld.srcnode.make_node('libdir/liblc1.c') | ||
3465 | 239 | lib_node.parent.mkdir() | ||
3466 | 240 | lib_node.write('#include <stdio.h>\nint lib_func(void) { FILE *f = fopen("foo", "r");}\n', 'w') | ||
3467 | 241 | bld(features='c cshlib', source=[lib_node], linkflags=conf.env.EXTRA_LDFLAGS, target='liblc') | ||
3468 | 242 | try: | ||
3469 | 243 | conf.check(build_fun=build, msg=msg, okmsg='-lc is unnecessary', errmsg='-lc is necessary') | ||
3470 | 244 | except conf.errors.ConfigurationError: | ||
3471 | 245 | return False | ||
3472 | 246 | return True | ||
3473 | 247 | |||
3474 | 248 | # already implemented on "waf -v" | ||
3475 | 249 | def order(bld, tgt_list): | ||
3476 | 250 | return True | ||
3477 | 251 | Build.BuildContext.check_group_ordering = order | ||
3478 | 252 | |||
3479 | 253 | @conf | ||
3480 | 254 | def CHECK_CFG(self, *k, **kw): | ||
3481 | 255 | if 'args' in kw: | ||
3482 | 256 | kw['args'] = shlex.split(kw['args']) | ||
3483 | 257 | if not 'mandatory' in kw: | ||
3484 | 258 | kw['mandatory'] = False | ||
3485 | 259 | kw['global_define'] = True | ||
3486 | 260 | return self.check_cfg(*k, **kw) | ||
3487 | 261 | |||
3488 | 262 | def cmd_output(cmd, **kw): | ||
3489 | 263 | |||
3490 | 264 | silent = False | ||
3491 | 265 | if 'silent' in kw: | ||
3492 | 266 | silent = kw['silent'] | ||
3493 | 267 | del(kw['silent']) | ||
3494 | 268 | |||
3495 | 269 | if 'e' in kw: | ||
3496 | 270 | tmp = kw['e'] | ||
3497 | 271 | del(kw['e']) | ||
3498 | 272 | kw['env'] = tmp | ||
3499 | 273 | |||
3500 | 274 | kw['shell'] = isinstance(cmd, str) | ||
3501 | 275 | kw['stdout'] = Utils.subprocess.PIPE | ||
3502 | 276 | if silent: | ||
3503 | 277 | kw['stderr'] = Utils.subprocess.PIPE | ||
3504 | 278 | |||
3505 | 279 | try: | ||
3506 | 280 | p = Utils.subprocess.Popen(cmd, **kw) | ||
3507 | 281 | output = p.communicate()[0] | ||
3508 | 282 | except OSError as e: | ||
3509 | 283 | raise ValueError(str(e)) | ||
3510 | 284 | |||
3511 | 285 | if p.returncode: | ||
3512 | 286 | if not silent: | ||
3513 | 287 | msg = "command execution failed: %s -> %r" % (cmd, str(output)) | ||
3514 | 288 | raise ValueError(msg) | ||
3515 | 289 | output = '' | ||
3516 | 290 | return output | ||
3517 | 291 | Utils.cmd_output = cmd_output | ||
3518 | 292 | |||
3519 | 293 | |||
3520 | 294 | @TaskGen.feature('c', 'cxx', 'd') | ||
3521 | 295 | @TaskGen.before('apply_incpaths', 'propagate_uselib_vars') | ||
3522 | 296 | @TaskGen.after('apply_link', 'process_source') | ||
3523 | 297 | def apply_uselib_local(self): | ||
3524 | 298 | """ | ||
3525 | 299 | process the uselib_local attribute | ||
3526 | 300 | execute after apply_link because of the execution order set on 'link_task' | ||
3527 | 301 | """ | ||
3528 | 302 | env = self.env | ||
3529 | 303 | from waflib.Tools.ccroot import stlink_task | ||
3530 | 304 | |||
3531 | 305 | # 1. the case of the libs defined in the project (visit ancestors first) | ||
3532 | 306 | # the ancestors external libraries (uselib) will be prepended | ||
3533 | 307 | self.uselib = self.to_list(getattr(self, 'uselib', [])) | ||
3534 | 308 | self.includes = self.to_list(getattr(self, 'includes', [])) | ||
3535 | 309 | names = self.to_list(getattr(self, 'uselib_local', [])) | ||
3536 | 310 | get = self.bld.get_tgen_by_name | ||
3537 | 311 | seen = set() | ||
3538 | 312 | seen_uselib = set() | ||
3539 | 313 | tmp = Utils.deque(names) # consume a copy of the list of names | ||
3540 | 314 | if tmp: | ||
3541 | 315 | if Logs.verbose: | ||
3542 | 316 | Logs.warn('compat: "uselib_local" is deprecated, replace by "use"') | ||
3543 | 317 | while tmp: | ||
3544 | 318 | lib_name = tmp.popleft() | ||
3545 | 319 | # visit dependencies only once | ||
3546 | 320 | if lib_name in seen: | ||
3547 | 321 | continue | ||
3548 | 322 | |||
3549 | 323 | y = get(lib_name) | ||
3550 | 324 | y.post() | ||
3551 | 325 | seen.add(lib_name) | ||
3552 | 326 | |||
3553 | 327 | # object has ancestors to process (shared libraries): add them to the end of the list | ||
3554 | 328 | if getattr(y, 'uselib_local', None): | ||
3555 | 329 | for x in self.to_list(getattr(y, 'uselib_local', [])): | ||
3556 | 330 | obj = get(x) | ||
3557 | 331 | obj.post() | ||
3558 | 332 | if getattr(obj, 'link_task', None): | ||
3559 | 333 | if not isinstance(obj.link_task, stlink_task): | ||
3560 | 334 | tmp.append(x) | ||
3561 | 335 | |||
3562 | 336 | # link task and flags | ||
3563 | 337 | if getattr(y, 'link_task', None): | ||
3564 | 338 | |||
3565 | 339 | link_name = y.target[y.target.rfind(os.sep) + 1:] | ||
3566 | 340 | if isinstance(y.link_task, stlink_task): | ||
3567 | 341 | env.append_value('STLIB', [link_name]) | ||
3568 | 342 | else: | ||
3569 | 343 | # some linkers can link against programs | ||
3570 | 344 | env.append_value('LIB', [link_name]) | ||
3571 | 345 | |||
3572 | 346 | # the order | ||
3573 | 347 | self.link_task.set_run_after(y.link_task) | ||
3574 | 348 | |||
3575 | 349 | # for the recompilation | ||
3576 | 350 | self.link_task.dep_nodes += y.link_task.outputs | ||
3577 | 351 | |||
3578 | 352 | # add the link path too | ||
3579 | 353 | tmp_path = y.link_task.outputs[0].parent.bldpath() | ||
3580 | 354 | if not tmp_path in env['LIBPATH']: | ||
3581 | 355 | env.prepend_value('LIBPATH', [tmp_path]) | ||
3582 | 356 | |||
3583 | 357 | # add ancestors uselib too - but only propagate those that have no staticlib defined | ||
3584 | 358 | for v in self.to_list(getattr(y, 'uselib', [])): | ||
3585 | 359 | if v not in seen_uselib: | ||
3586 | 360 | seen_uselib.add(v) | ||
3587 | 361 | if not env['STLIB_' + v]: | ||
3588 | 362 | if not v in self.uselib: | ||
3589 | 363 | self.uselib.insert(0, v) | ||
3590 | 364 | |||
3591 | 365 | # if the library task generator provides 'export_includes', add to the include path | ||
3592 | 366 | # the export_includes must be a list of paths relative to the other library | ||
3593 | 367 | if getattr(y, 'export_includes', None): | ||
3594 | 368 | self.includes.extend(y.to_incnodes(y.export_includes)) | ||
3595 | 369 | |||
3596 | 370 | @TaskGen.feature('cprogram', 'cxxprogram', 'cstlib', 'cxxstlib', 'cshlib', 'cxxshlib', 'dprogram', 'dstlib', 'dshlib') | ||
3597 | 371 | @TaskGen.after('apply_link') | ||
3598 | 372 | def apply_objdeps(self): | ||
3599 | 373 | "add the .o files produced by some other object files in the same manner as uselib_local" | ||
3600 | 374 | names = getattr(self, 'add_objects', []) | ||
3601 | 375 | if not names: | ||
3602 | 376 | return | ||
3603 | 377 | names = self.to_list(names) | ||
3604 | 378 | |||
3605 | 379 | get = self.bld.get_tgen_by_name | ||
3606 | 380 | seen = [] | ||
3607 | 381 | while names: | ||
3608 | 382 | x = names[0] | ||
3609 | 383 | |||
3610 | 384 | # visit dependencies only once | ||
3611 | 385 | if x in seen: | ||
3612 | 386 | names = names[1:] | ||
3613 | 387 | continue | ||
3614 | 388 | |||
3615 | 389 | # object does not exist ? | ||
3616 | 390 | y = get(x) | ||
3617 | 391 | |||
3618 | 392 | # object has ancestors to process first ? update the list of names | ||
3619 | 393 | if getattr(y, 'add_objects', None): | ||
3620 | 394 | added = 0 | ||
3621 | 395 | lst = y.to_list(y.add_objects) | ||
3622 | 396 | lst.reverse() | ||
3623 | 397 | for u in lst: | ||
3624 | 398 | if u in seen: | ||
3625 | 399 | continue | ||
3626 | 400 | added = 1 | ||
3627 | 401 | names = [u]+names | ||
3628 | 402 | if added: | ||
3629 | 403 | continue # list of names modified, loop | ||
3630 | 404 | |||
3631 | 405 | # safe to process the current object | ||
3632 | 406 | y.post() | ||
3633 | 407 | seen.append(x) | ||
3634 | 408 | |||
3635 | 409 | for t in getattr(y, 'compiled_tasks', []): | ||
3636 | 410 | self.link_task.inputs.extend(t.outputs) | ||
3637 | 411 | |||
3638 | 412 | @TaskGen.after('apply_link') | ||
3639 | 413 | def process_obj_files(self): | ||
3640 | 414 | if not hasattr(self, 'obj_files'): | ||
3641 | 415 | return | ||
3642 | 416 | for x in self.obj_files: | ||
3643 | 417 | node = self.path.find_resource(x) | ||
3644 | 418 | self.link_task.inputs.append(node) | ||
3645 | 419 | |||
3646 | 420 | @TaskGen.taskgen_method | ||
3647 | 421 | def add_obj_file(self, file): | ||
3648 | 422 | """Small example on how to link object files as if they were source | ||
3649 | 423 | obj = bld.create_obj('cc') | ||
3650 | 424 | obj.add_obj_file('foo.o')""" | ||
3651 | 425 | if not hasattr(self, 'obj_files'): | ||
3652 | 426 | self.obj_files = [] | ||
3653 | 427 | if not 'process_obj_files' in self.meths: | ||
3654 | 428 | self.meths.append('process_obj_files') | ||
3655 | 429 | self.obj_files.append(file) | ||
3656 | diff --git a/buildtools/wafsamba/samba_wildcard.py b/buildtools/wafsamba/samba_wildcard.py | |||
3657 | index ed3e0c2..6173ce8 100644 | |||
3658 | --- a/buildtools/wafsamba/samba_wildcard.py | |||
3659 | +++ b/buildtools/wafsamba/samba_wildcard.py | |||
3660 | @@ -1,15 +1,15 @@ | |||
3661 | 1 | # based on playground/evil in the waf svn tree | 1 | # based on playground/evil in the waf svn tree |
3662 | 2 | 2 | ||
3663 | 3 | import os, datetime, fnmatch | 3 | import os, datetime, fnmatch |
3666 | 4 | import Scripting, Utils, Options, Logs, Environment | 4 | from waflib import Scripting, Utils, Options, Logs, Errors |
3667 | 5 | from Constants import SRCDIR, BLDDIR | 5 | from waflib import ConfigSet, Context |
3668 | 6 | from samba_utils import LOCAL_CACHE, os_path_relpath | 6 | from samba_utils import LOCAL_CACHE, os_path_relpath |
3669 | 7 | 7 | ||
3670 | 8 | def run_task(t, k): | 8 | def run_task(t, k): |
3671 | 9 | '''run a single build task''' | 9 | '''run a single build task''' |
3672 | 10 | ret = t.run() | 10 | ret = t.run() |
3673 | 11 | if ret: | 11 | if ret: |
3675 | 12 | raise Utils.WafError("Failed to build %s: %u" % (k, ret)) | 12 | raise Errors.WafError("Failed to build %s: %u" % (k, ret)) |
3676 | 13 | 13 | ||
3677 | 14 | 14 | ||
3678 | 15 | def run_named_build_task(cmd): | 15 | def run_named_build_task(cmd): |
3679 | @@ -45,7 +45,7 @@ def run_named_build_task(cmd): | |||
3680 | 45 | 45 | ||
3681 | 46 | 46 | ||
3682 | 47 | if not found: | 47 | if not found: |
3684 | 48 | raise Utils.WafError("Unable to find build target matching %s" % cmd) | 48 | raise Errors.WafError("Unable to find build target matching %s" % cmd) |
3685 | 49 | 49 | ||
3686 | 50 | 50 | ||
3687 | 51 | def rewrite_compile_targets(): | 51 | def rewrite_compile_targets(): |
3688 | @@ -125,7 +125,7 @@ def wildcard_main(missing_cmd_fn): | |||
3689 | 125 | def fake_build_environment(info=True, flush=False): | 125 | def fake_build_environment(info=True, flush=False): |
3690 | 126 | """create all the tasks for the project, but do not run the build | 126 | """create all the tasks for the project, but do not run the build |
3691 | 127 | return the build context in use""" | 127 | return the build context in use""" |
3693 | 128 | bld = getattr(Utils.g_module, 'build_context', Utils.Context)() | 128 | bld = getattr(Context.g_module, 'build_context', Utils.Context)() |
3694 | 129 | bld = Scripting.check_configured(bld) | 129 | bld = Scripting.check_configured(bld) |
3695 | 130 | 130 | ||
3696 | 131 | Options.commands['install'] = False | 131 | Options.commands['install'] = False |
3697 | @@ -134,16 +134,15 @@ def fake_build_environment(info=True, flush=False): | |||
3698 | 134 | bld.is_install = 0 # False | 134 | bld.is_install = 0 # False |
3699 | 135 | 135 | ||
3700 | 136 | try: | 136 | try: |
3702 | 137 | proj = Environment.Environment(Options.lockfile) | 137 | proj = ConfigSet.ConfigSet(Options.lockfile) |
3703 | 138 | except IOError: | 138 | except IOError: |
3705 | 139 | raise Utils.WafError("Project not configured (run 'waf configure' first)") | 139 | raise Errors.WafError("Project not configured (run 'waf configure' first)") |
3706 | 140 | 140 | ||
3707 | 141 | bld.load_dirs(proj[SRCDIR], proj[BLDDIR]) | ||
3708 | 142 | bld.load_envs() | 141 | bld.load_envs() |
3709 | 143 | 142 | ||
3710 | 144 | if info: | 143 | if info: |
3711 | 145 | Logs.info("Waf: Entering directory `%s'" % bld.bldnode.abspath()) | 144 | Logs.info("Waf: Entering directory `%s'" % bld.bldnode.abspath()) |
3713 | 146 | bld.add_subdirs([os.path.split(Utils.g_module.root_path)[0]]) | 145 | bld.add_subdirs([os.path.split(Context.g_module.root_path)[0]]) |
3714 | 147 | 146 | ||
3715 | 148 | bld.pre_build() | 147 | bld.pre_build() |
3716 | 149 | if flush: | 148 | if flush: |
3717 | diff --git a/buildtools/wafsamba/stale_files.py b/buildtools/wafsamba/stale_files.py | |||
3718 | index 2dd08e1..175f573 100644 | |||
3719 | --- a/buildtools/wafsamba/stale_files.py | |||
3720 | +++ b/buildtools/wafsamba/stale_files.py | |||
3721 | @@ -14,7 +14,9 @@ nodes/tasks, in which case the method will have to be modified | |||
3722 | 14 | to exclude some folders for example. | 14 | to exclude some folders for example. |
3723 | 15 | """ | 15 | """ |
3724 | 16 | 16 | ||
3726 | 17 | import Logs, Build, os, samba_utils, Options, Utils | 17 | from waflib import Logs, Build, Options, Utils, Errors |
3727 | 18 | import os | ||
3728 | 19 | from wafsamba import samba_utils | ||
3729 | 18 | from Runner import Parallel | 20 | from Runner import Parallel |
3730 | 19 | 21 | ||
3731 | 20 | old_refill_task_list = Parallel.refill_task_list | 22 | old_refill_task_list = Parallel.refill_task_list |
3732 | @@ -46,7 +48,7 @@ def replace_refill_task_list(self): | |||
3733 | 46 | 48 | ||
3734 | 47 | # paranoia | 49 | # paranoia |
3735 | 48 | if bin_base[-4:] != '/bin': | 50 | if bin_base[-4:] != '/bin': |
3737 | 49 | raise Utils.WafError("Invalid bin base: %s" % bin_base) | 51 | raise Errors.WafError("Invalid bin base: %s" % bin_base) |
3738 | 50 | 52 | ||
3739 | 51 | # obtain the expected list of files | 53 | # obtain the expected list of files |
3740 | 52 | expected = [] | 54 | expected = [] |
3741 | diff --git a/buildtools/wafsamba/symbols.py b/buildtools/wafsamba/symbols.py | |||
3742 | index 7ff4bac..3eca3d4 100644 | |||
3743 | --- a/buildtools/wafsamba/symbols.py | |||
3744 | +++ b/buildtools/wafsamba/symbols.py | |||
3745 | @@ -2,8 +2,8 @@ | |||
3746 | 2 | # using nm, producing a set of exposed defined/undefined symbols | 2 | # using nm, producing a set of exposed defined/undefined symbols |
3747 | 3 | 3 | ||
3748 | 4 | import os, re, subprocess | 4 | import os, re, subprocess |
3751 | 5 | import Utils, Build, Options, Logs | 5 | from waflib import Utils, Build, Options, Logs, Errors |
3752 | 6 | from Logs import debug | 6 | from waflib.Logs import debug |
3753 | 7 | from samba_utils import TO_LIST, LOCAL_CACHE, get_tgt_list, os_path_relpath | 7 | from samba_utils import TO_LIST, LOCAL_CACHE, get_tgt_list, os_path_relpath |
3754 | 8 | 8 | ||
3755 | 9 | # these are the data structures used in symbols.py: | 9 | # these are the data structures used in symbols.py: |
3756 | @@ -59,12 +59,12 @@ def symbols_extract(bld, objfiles, dynamic=False): | |||
3757 | 59 | 59 | ||
3758 | 60 | for line in nmpipe: | 60 | for line in nmpipe: |
3759 | 61 | line = line.strip() | 61 | line = line.strip() |
3761 | 62 | if line.endswith(':'): | 62 | if line.endswith(b':'): |
3762 | 63 | filename = line[:-1] | 63 | filename = line[:-1] |
3763 | 64 | ret[filename] = { "PUBLIC": set(), "UNDEFINED" : set() } | 64 | ret[filename] = { "PUBLIC": set(), "UNDEFINED" : set() } |
3764 | 65 | continue | 65 | continue |
3767 | 66 | cols = line.split(" ") | 66 | cols = line.split(b" ") |
3768 | 67 | if cols == ['']: | 67 | if cols == [b'']: |
3769 | 68 | continue | 68 | continue |
3770 | 69 | # see if the line starts with an address | 69 | # see if the line starts with an address |
3771 | 70 | if len(cols) == 3: | 70 | if len(cols) == 3: |
3772 | @@ -73,10 +73,10 @@ def symbols_extract(bld, objfiles, dynamic=False): | |||
3773 | 73 | else: | 73 | else: |
3774 | 74 | symbol_type = cols[0] | 74 | symbol_type = cols[0] |
3775 | 75 | symbol = cols[1] | 75 | symbol = cols[1] |
3777 | 76 | if symbol_type in "BDGTRVWSi": | 76 | if symbol_type in b"BDGTRVWSi": |
3778 | 77 | # its a public symbol | 77 | # its a public symbol |
3779 | 78 | ret[filename]["PUBLIC"].add(symbol) | 78 | ret[filename]["PUBLIC"].add(symbol) |
3781 | 79 | elif symbol_type in "U": | 79 | elif symbol_type in b"U": |
3782 | 80 | ret[filename]["UNDEFINED"].add(symbol) | 80 | ret[filename]["UNDEFINED"].add(symbol) |
3783 | 81 | 81 | ||
3784 | 82 | # add to the cache | 82 | # add to the cache |
3785 | @@ -106,10 +106,10 @@ def find_ldd_path(bld, libname, binary): | |||
3786 | 106 | lddpipe = subprocess.Popen(['ldd', binary], stdout=subprocess.PIPE).stdout | 106 | lddpipe = subprocess.Popen(['ldd', binary], stdout=subprocess.PIPE).stdout |
3787 | 107 | for line in lddpipe: | 107 | for line in lddpipe: |
3788 | 108 | line = line.strip() | 108 | line = line.strip() |
3791 | 109 | cols = line.split(" ") | 109 | cols = line.split(b" ") |
3792 | 110 | if len(cols) < 3 or cols[1] != "=>": | 110 | if len(cols) < 3 or cols[1] != b"=>": |
3793 | 111 | continue | 111 | continue |
3795 | 112 | if cols[0].startswith("libc."): | 112 | if cols[0].startswith(b"libc."): |
3796 | 113 | # save this one too | 113 | # save this one too |
3797 | 114 | bld.env.libc_path = cols[2] | 114 | bld.env.libc_path = cols[2] |
3798 | 115 | if cols[0].startswith(libname): | 115 | if cols[0].startswith(libname): |
3799 | @@ -119,8 +119,9 @@ def find_ldd_path(bld, libname, binary): | |||
3800 | 119 | 119 | ||
3801 | 120 | 120 | ||
3802 | 121 | # some regular expressions for parsing readelf output | 121 | # some regular expressions for parsing readelf output |
3805 | 122 | re_sharedlib = re.compile('Shared library: \[(.*)\]') | 122 | re_sharedlib = re.compile(b'Shared library: \[(.*)\]') |
3806 | 123 | re_rpath = re.compile('Library rpath: \[(.*)\]') | 123 | # output from readelf could be `Library rpath` or `Libray runpath` |
3807 | 124 | re_rpath = re.compile(b'Library (rpath|runpath): \[(.*)\]') | ||
3808 | 124 | 125 | ||
3809 | 125 | def get_libs(bld, binname): | 126 | def get_libs(bld, binname): |
3810 | 126 | '''find the list of linked libraries for any binary or library | 127 | '''find the list of linked libraries for any binary or library |
3811 | @@ -146,7 +147,8 @@ def get_libs(bld, binname): | |||
3812 | 146 | libs.add(m.group(1)) | 147 | libs.add(m.group(1)) |
3813 | 147 | m = re_rpath.search(line) | 148 | m = re_rpath.search(line) |
3814 | 148 | if m: | 149 | if m: |
3816 | 149 | rpath.extend(m.group(1).split(":")) | 150 | # output from Popen is always bytestr even in py3 |
3817 | 151 | rpath.extend(m.group(2).split(b":")) | ||
3818 | 150 | 152 | ||
3819 | 151 | ret = set() | 153 | ret = set() |
3820 | 152 | for lib in libs: | 154 | for lib in libs: |
3821 | @@ -410,7 +412,7 @@ def check_library_deps(bld, t): | |||
3822 | 410 | if dep2 == name and t.in_library != t2.in_library: | 412 | if dep2 == name and t.in_library != t2.in_library: |
3823 | 411 | Logs.warn("WARNING: mutual dependency %s <=> %s" % (name, real_name(t2.sname))) | 413 | Logs.warn("WARNING: mutual dependency %s <=> %s" % (name, real_name(t2.sname))) |
3824 | 412 | Logs.warn("Libraries should match. %s != %s" % (t.in_library, t2.in_library)) | 414 | Logs.warn("Libraries should match. %s != %s" % (t.in_library, t2.in_library)) |
3826 | 413 | # raise Utils.WafError("illegal mutual dependency") | 415 | # raise Errors.WafError("illegal mutual dependency") |
3827 | 414 | 416 | ||
3828 | 415 | 417 | ||
3829 | 416 | def check_syslib_collisions(bld, tgt_list): | 418 | def check_syslib_collisions(bld, tgt_list): |
3830 | @@ -430,7 +432,7 @@ def check_syslib_collisions(bld, tgt_list): | |||
3831 | 430 | Logs.error("ERROR: Target '%s' has symbols '%s' which is also in syslib '%s'" % (t.sname, common, lib)) | 432 | Logs.error("ERROR: Target '%s' has symbols '%s' which is also in syslib '%s'" % (t.sname, common, lib)) |
3832 | 431 | has_error = True | 433 | has_error = True |
3833 | 432 | if has_error: | 434 | if has_error: |
3835 | 433 | raise Utils.WafError("symbols in common with system libraries") | 435 | raise Errors.WafError("symbols in common with system libraries") |
3836 | 434 | 436 | ||
3837 | 435 | 437 | ||
3838 | 436 | def check_dependencies(bld, t): | 438 | def check_dependencies(bld, t): |
3839 | @@ -546,7 +548,7 @@ def symbols_whyneeded(task): | |||
3840 | 546 | 548 | ||
3841 | 547 | why = Options.options.WHYNEEDED.split(":") | 549 | why = Options.options.WHYNEEDED.split(":") |
3842 | 548 | if len(why) != 2: | 550 | if len(why) != 2: |
3844 | 549 | raise Utils.WafError("usage: WHYNEEDED=TARGET:DEPENDENCY") | 551 | raise Errors.WafError("usage: WHYNEEDED=TARGET:DEPENDENCY") |
3845 | 550 | target = why[0] | 552 | target = why[0] |
3846 | 551 | subsystem = why[1] | 553 | subsystem = why[1] |
3847 | 552 | 554 | ||
3848 | @@ -579,7 +581,7 @@ def report_duplicate(bld, binname, sym, libs, fail_on_error): | |||
3849 | 579 | else: | 581 | else: |
3850 | 580 | libnames.append(lib) | 582 | libnames.append(lib) |
3851 | 581 | if fail_on_error: | 583 | if fail_on_error: |
3853 | 582 | raise Utils.WafError("%s: Symbol %s linked in multiple libraries %s" % (binname, sym, libnames)) | 584 | raise Errors.WafError("%s: Symbol %s linked in multiple libraries %s" % (binname, sym, libnames)) |
3854 | 583 | else: | 585 | else: |
3855 | 584 | print("%s: Symbol %s linked in multiple libraries %s" % (binname, sym, libnames)) | 586 | print("%s: Symbol %s linked in multiple libraries %s" % (binname, sym, libnames)) |
3856 | 585 | 587 | ||
3857 | diff --git a/buildtools/wafsamba/test_duplicate_symbol.sh b/buildtools/wafsamba/test_duplicate_symbol.sh | |||
3858 | index 89a4027..46f44a6 100755 | |||
3859 | --- a/buildtools/wafsamba/test_duplicate_symbol.sh | |||
3860 | +++ b/buildtools/wafsamba/test_duplicate_symbol.sh | |||
3861 | @@ -5,7 +5,7 @@ | |||
3862 | 5 | 5 | ||
3863 | 6 | subunit_start_test duplicate_symbols | 6 | subunit_start_test duplicate_symbols |
3864 | 7 | 7 | ||
3866 | 8 | if ./buildtools/bin/waf build --dup-symbol-check; then | 8 | if $PYTHON ./buildtools/bin/waf build --dup-symbol-check; then |
3867 | 9 | subunit_pass_test duplicate_symbols | 9 | subunit_pass_test duplicate_symbols |
3868 | 10 | else | 10 | else |
3869 | 11 | echo | subunit_fail_test duplicate_symbols | 11 | echo | subunit_fail_test duplicate_symbols |
3870 | diff --git a/buildtools/wafsamba/tests/test_abi.py b/buildtools/wafsamba/tests/test_abi.py | |||
3871 | index bba78c1..d6bdb04 100644 | |||
3872 | --- a/buildtools/wafsamba/tests/test_abi.py | |||
3873 | +++ b/buildtools/wafsamba/tests/test_abi.py | |||
3874 | @@ -21,7 +21,7 @@ from wafsamba.samba_abi import ( | |||
3875 | 21 | normalise_signature, | 21 | normalise_signature, |
3876 | 22 | ) | 22 | ) |
3877 | 23 | 23 | ||
3879 | 24 | from cStringIO import StringIO | 24 | from samba.compat import StringIO |
3880 | 25 | 25 | ||
3881 | 26 | 26 | ||
3882 | 27 | class NormaliseSignatureTests(TestCase): | 27 | class NormaliseSignatureTests(TestCase): |
3883 | @@ -66,6 +66,10 @@ class WriteVscriptTests(TestCase): | |||
3884 | 66 | 1.0 { | 66 | 1.0 { |
3885 | 67 | \tglobal: | 67 | \tglobal: |
3886 | 68 | \t\t*; | 68 | \t\t*; |
3887 | 69 | \tlocal: | ||
3888 | 70 | \t\t_end; | ||
3889 | 71 | \t\t__bss_start; | ||
3890 | 72 | \t\t_edata; | ||
3891 | 69 | }; | 73 | }; |
3892 | 70 | """) | 74 | """) |
3893 | 71 | 75 | ||
3894 | @@ -84,6 +88,10 @@ MYLIB_0.1 { | |||
3895 | 84 | 1.0 { | 88 | 1.0 { |
3896 | 85 | \tglobal: | 89 | \tglobal: |
3897 | 86 | \t\t*; | 90 | \t\t*; |
3898 | 91 | \tlocal: | ||
3899 | 92 | \t\t_end; | ||
3900 | 93 | \t\t__bss_start; | ||
3901 | 94 | \t\t_edata; | ||
3902 | 87 | }; | 95 | }; |
3903 | 88 | """) | 96 | """) |
3904 | 89 | 97 | ||
3905 | @@ -99,6 +107,9 @@ MYLIB_0.1 { | |||
3906 | 99 | \t\t*; | 107 | \t\t*; |
3907 | 100 | \tlocal: | 108 | \tlocal: |
3908 | 101 | \t\texc_*; | 109 | \t\texc_*; |
3909 | 110 | \t\t_end; | ||
3910 | 111 | \t\t__bss_start; | ||
3911 | 112 | \t\t_edata; | ||
3912 | 102 | }; | 113 | }; |
3913 | 103 | """) | 114 | """) |
3914 | 104 | 115 | ||
3915 | @@ -115,6 +126,9 @@ MYLIB_0.1 { | |||
3916 | 115 | \t\tpub_*; | 126 | \t\tpub_*; |
3917 | 116 | \tlocal: | 127 | \tlocal: |
3918 | 117 | \t\texc_*; | 128 | \t\texc_*; |
3919 | 129 | \t\t_end; | ||
3920 | 130 | \t\t__bss_start; | ||
3921 | 131 | \t\t_edata; | ||
3922 | 118 | \t\t*; | 132 | \t\t*; |
3923 | 119 | }; | 133 | }; |
3924 | 120 | """) | 134 | """) |
3925 | diff --git a/buildtools/wafsamba/tru64cc.py b/buildtools/wafsamba/tru64cc.py | |||
3926 | 121 | deleted file mode 100644 | 135 | deleted file mode 100644 |
3927 | index e1bbb1d..0000000 | |||
3928 | --- a/buildtools/wafsamba/tru64cc.py | |||
3929 | +++ /dev/null | |||
3930 | @@ -1,77 +0,0 @@ | |||
3931 | 1 | |||
3932 | 2 | # compiler definition for tru64/OSF1 cc compiler | ||
3933 | 3 | # based on suncc.py from waf | ||
3934 | 4 | |||
3935 | 5 | import os, optparse | ||
3936 | 6 | import Utils, Options, Configure | ||
3937 | 7 | import ccroot, ar | ||
3938 | 8 | from Configure import conftest | ||
3939 | 9 | |||
3940 | 10 | from compiler_cc import c_compiler | ||
3941 | 11 | |||
3942 | 12 | c_compiler['osf1V'] = ['gcc', 'tru64cc'] | ||
3943 | 13 | |||
3944 | 14 | @conftest | ||
3945 | 15 | def find_tru64cc(conf): | ||
3946 | 16 | v = conf.env | ||
3947 | 17 | cc = None | ||
3948 | 18 | if v['CC']: cc = v['CC'] | ||
3949 | 19 | elif 'CC' in conf.environ: cc = conf.environ['CC'] | ||
3950 | 20 | if not cc: cc = conf.find_program('cc', var='CC') | ||
3951 | 21 | if not cc: conf.fatal('tru64cc was not found') | ||
3952 | 22 | cc = conf.cmd_to_list(cc) | ||
3953 | 23 | |||
3954 | 24 | try: | ||
3955 | 25 | if not Utils.cmd_output(cc + ['-V']): | ||
3956 | 26 | conf.fatal('tru64cc %r was not found' % cc) | ||
3957 | 27 | except ValueError: | ||
3958 | 28 | conf.fatal('tru64cc -V could not be executed') | ||
3959 | 29 | |||
3960 | 30 | v['CC'] = cc | ||
3961 | 31 | v['CC_NAME'] = 'tru64' | ||
3962 | 32 | |||
3963 | 33 | @conftest | ||
3964 | 34 | def tru64cc_common_flags(conf): | ||
3965 | 35 | v = conf.env | ||
3966 | 36 | |||
3967 | 37 | v['CC_SRC_F'] = '' | ||
3968 | 38 | v['CC_TGT_F'] = ['-c', '-o', ''] | ||
3969 | 39 | v['CPPPATH_ST'] = '-I%s' # template for adding include paths | ||
3970 | 40 | |||
3971 | 41 | # linker | ||
3972 | 42 | if not v['LINK_CC']: v['LINK_CC'] = v['CC'] | ||
3973 | 43 | v['CCLNK_SRC_F'] = '' | ||
3974 | 44 | v['CCLNK_TGT_F'] = ['-o', ''] | ||
3975 | 45 | |||
3976 | 46 | v['LIB_ST'] = '-l%s' # template for adding libs | ||
3977 | 47 | v['LIBPATH_ST'] = '-L%s' # template for adding libpaths | ||
3978 | 48 | v['STATICLIB_ST'] = '-l%s' | ||
3979 | 49 | v['STATICLIBPATH_ST'] = '-L%s' | ||
3980 | 50 | v['CCDEFINES_ST'] = '-D%s' | ||
3981 | 51 | |||
3982 | 52 | # v['SONAME_ST'] = '-Wl,-h -Wl,%s' | ||
3983 | 53 | # v['SHLIB_MARKER'] = '-Bdynamic' | ||
3984 | 54 | # v['STATICLIB_MARKER'] = '-Bstatic' | ||
3985 | 55 | |||
3986 | 56 | # program | ||
3987 | 57 | v['program_PATTERN'] = '%s' | ||
3988 | 58 | |||
3989 | 59 | # shared library | ||
3990 | 60 | # v['shlib_CCFLAGS'] = ['-Kpic', '-DPIC'] | ||
3991 | 61 | v['shlib_LINKFLAGS'] = ['-shared'] | ||
3992 | 62 | v['shlib_PATTERN'] = 'lib%s.so' | ||
3993 | 63 | |||
3994 | 64 | # static lib | ||
3995 | 65 | # v['staticlib_LINKFLAGS'] = ['-Bstatic'] | ||
3996 | 66 | # v['staticlib_PATTERN'] = 'lib%s.a' | ||
3997 | 67 | |||
3998 | 68 | detect = ''' | ||
3999 | 69 | find_tru64cc | ||
4000 | 70 | find_cpp | ||
4001 | 71 | find_ar | ||
4002 | 72 | tru64cc_common_flags | ||
4003 | 73 | cc_load_tools | ||
4004 | 74 | cc_add_flags | ||
4005 | 75 | link_add_flags | ||
4006 | 76 | ''' | ||
4007 | 77 | |||
4008 | diff --git a/buildtools/wafsamba/wafsamba.py b/buildtools/wafsamba/wafsamba.py | |||
4009 | index 12d5421..1b98e1c 100644 | |||
4010 | --- a/buildtools/wafsamba/wafsamba.py | |||
4011 | +++ b/buildtools/wafsamba/wafsamba.py | |||
4012 | @@ -1,15 +1,16 @@ | |||
4013 | 1 | # a waf tool to add autoconf-like macros to the configure section | 1 | # a waf tool to add autoconf-like macros to the configure section |
4014 | 2 | # and for SAMBA_ macros for building libraries, binaries etc | 2 | # and for SAMBA_ macros for building libraries, binaries etc |
4015 | 3 | 3 | ||
4019 | 4 | import Build, os, sys, Options, Task, Utils, cc, TaskGen, fnmatch, re, shutil, Logs, Constants | 4 | import os, sys, re, shutil, fnmatch |
4020 | 5 | from Configure import conf | 5 | from waflib import Build, Options, Task, Utils, TaskGen, Logs, Context, Errors |
4021 | 6 | from Logs import debug | 6 | from waflib.Configure import conf |
4022 | 7 | from waflib.Logs import debug | ||
4023 | 7 | from samba_utils import SUBST_VARS_RECURSIVE | 8 | from samba_utils import SUBST_VARS_RECURSIVE |
4024 | 8 | TaskGen.task_gen.apply_verif = Utils.nada | 9 | TaskGen.task_gen.apply_verif = Utils.nada |
4025 | 9 | 10 | ||
4026 | 10 | # bring in the other samba modules | 11 | # bring in the other samba modules |
4027 | 11 | from samba_optimisation import * | ||
4028 | 12 | from samba_utils import * | 12 | from samba_utils import * |
4029 | 13 | from samba_utils import symlink | ||
4030 | 13 | from samba_version import * | 14 | from samba_version import * |
4031 | 14 | from samba_autoconf import * | 15 | from samba_autoconf import * |
4032 | 15 | from samba_patterns import * | 16 | from samba_patterns import * |
4033 | @@ -25,27 +26,19 @@ import samba_install | |||
4034 | 25 | import samba_conftests | 26 | import samba_conftests |
4035 | 26 | import samba_abi | 27 | import samba_abi |
4036 | 27 | import samba_headers | 28 | import samba_headers |
4037 | 28 | import tru64cc | ||
4038 | 29 | import irixcc | ||
4039 | 30 | import hpuxcc | ||
4040 | 31 | import generic_cc | 29 | import generic_cc |
4041 | 32 | import samba_dist | 30 | import samba_dist |
4042 | 33 | import samba_wildcard | 31 | import samba_wildcard |
4043 | 34 | import stale_files | ||
4044 | 35 | import symbols | 32 | import symbols |
4045 | 36 | import pkgconfig | 33 | import pkgconfig |
4046 | 37 | import configure_file | 34 | import configure_file |
4051 | 38 | 35 | import samba_waf18 | |
4048 | 39 | # some systems have broken threading in python | ||
4049 | 40 | if os.environ.get('WAF_NOTHREADS') == '1': | ||
4050 | 41 | import nothreads | ||
4052 | 42 | 36 | ||
4053 | 43 | LIB_PATH="shared" | 37 | LIB_PATH="shared" |
4054 | 44 | 38 | ||
4055 | 45 | os.environ['PYTHONUNBUFFERED'] = '1' | 39 | os.environ['PYTHONUNBUFFERED'] = '1' |
4056 | 46 | 40 | ||
4059 | 47 | 41 | if Context.HEXVERSION not in (0x2000800,): | |
4058 | 48 | if Constants.HEXVERSION < 0x105019: | ||
4060 | 49 | Logs.error(''' | 42 | Logs.error(''' |
4061 | 50 | Please use the version of waf that comes with Samba, not | 43 | Please use the version of waf that comes with Samba, not |
4062 | 51 | a system installed version. See http://wiki.samba.org/index.php/Waf | 44 | a system installed version. See http://wiki.samba.org/index.php/Waf |
4063 | @@ -55,26 +48,25 @@ Alternatively, please run ./configure and make as usual. That will | |||
4064 | 55 | call the right version of waf.''') | 48 | call the right version of waf.''') |
4065 | 56 | sys.exit(1) | 49 | sys.exit(1) |
4066 | 57 | 50 | ||
4067 | 58 | |||
4068 | 59 | @conf | 51 | @conf |
4069 | 60 | def SAMBA_BUILD_ENV(conf): | 52 | def SAMBA_BUILD_ENV(conf): |
4070 | 61 | '''create the samba build environment''' | 53 | '''create the samba build environment''' |
4076 | 62 | conf.env.BUILD_DIRECTORY = conf.blddir | 54 | conf.env.BUILD_DIRECTORY = conf.bldnode.abspath() |
4077 | 63 | mkdir_p(os.path.join(conf.blddir, LIB_PATH)) | 55 | mkdir_p(os.path.join(conf.env.BUILD_DIRECTORY, LIB_PATH)) |
4078 | 64 | mkdir_p(os.path.join(conf.blddir, LIB_PATH, "private")) | 56 | mkdir_p(os.path.join(conf.env.BUILD_DIRECTORY, LIB_PATH, "private")) |
4079 | 65 | mkdir_p(os.path.join(conf.blddir, "modules")) | 57 | mkdir_p(os.path.join(conf.env.BUILD_DIRECTORY, "modules")) |
4080 | 66 | mkdir_p(os.path.join(conf.blddir, 'python/samba/dcerpc')) | 58 | mkdir_p(os.path.join(conf.env.BUILD_DIRECTORY, 'python/samba/dcerpc')) |
4081 | 67 | # this allows all of the bin/shared and bin/python targets | 59 | # this allows all of the bin/shared and bin/python targets |
4082 | 68 | # to be expressed in terms of build directory paths | 60 | # to be expressed in terms of build directory paths |
4086 | 69 | mkdir_p(os.path.join(conf.blddir, 'default')) | 61 | mkdir_p(os.path.join(conf.env.BUILD_DIRECTORY, 'default')) |
4087 | 70 | for (source, target) in [('shared', 'shared'), ('modules', 'modules'), ('python', 'python_modules')]: | 62 | for (source, target) in [('shared', 'shared'), ('modules', 'modules'), ('python', 'python')]: |
4088 | 71 | link_target = os.path.join(conf.blddir, 'default/' + target) | 63 | link_target = os.path.join(conf.env.BUILD_DIRECTORY, 'default/' + target) |
4089 | 72 | if not os.path.lexists(link_target): | 64 | if not os.path.lexists(link_target): |
4091 | 73 | os.symlink('../' + source, link_target) | 65 | symlink('../' + source, link_target) |
4092 | 74 | 66 | ||
4093 | 75 | # get perl to put the blib files in the build directory | 67 | # get perl to put the blib files in the build directory |
4096 | 76 | blib_bld = os.path.join(conf.blddir, 'default/pidl/blib') | 68 | blib_bld = os.path.join(conf.env.BUILD_DIRECTORY, 'default/pidl/blib') |
4097 | 77 | blib_src = os.path.join(conf.srcdir, 'pidl/blib') | 69 | blib_src = os.path.join(conf.srcnode.abspath(), 'pidl/blib') |
4098 | 78 | mkdir_p(blib_bld + '/man1') | 70 | mkdir_p(blib_bld + '/man1') |
4099 | 79 | mkdir_p(blib_bld + '/man3') | 71 | mkdir_p(blib_bld + '/man3') |
4100 | 80 | if os.path.islink(blib_src): | 72 | if os.path.islink(blib_src): |
4101 | @@ -148,7 +140,7 @@ def SAMBA_LIBRARY(bld, libname, source, | |||
4102 | 148 | public_headers = None | 140 | public_headers = None |
4103 | 149 | 141 | ||
4104 | 150 | if private_library and public_headers: | 142 | if private_library and public_headers: |
4106 | 151 | raise Utils.WafError("private library '%s' must not have public header files" % | 143 | raise Errors.WafError("private library '%s' must not have public header files" % |
4107 | 152 | libname) | 144 | libname) |
4108 | 153 | 145 | ||
4109 | 154 | if LIB_MUST_BE_PRIVATE(bld, libname): | 146 | if LIB_MUST_BE_PRIVATE(bld, libname): |
4110 | @@ -225,13 +217,13 @@ def SAMBA_LIBRARY(bld, libname, source, | |||
4111 | 225 | # we don't want any public libraries without version numbers | 217 | # we don't want any public libraries without version numbers |
4112 | 226 | if (not private_library and target_type != 'PYTHON' and not realname): | 218 | if (not private_library and target_type != 'PYTHON' and not realname): |
4113 | 227 | if vnum is None and soname is None: | 219 | if vnum is None and soname is None: |
4115 | 228 | raise Utils.WafError("public library '%s' must have a vnum" % | 220 | raise Errors.WafError("public library '%s' must have a vnum" % |
4116 | 229 | libname) | 221 | libname) |
4117 | 230 | if pc_files is None: | 222 | if pc_files is None: |
4119 | 231 | raise Utils.WafError("public library '%s' must have pkg-config file" % | 223 | raise Errors.WafError("public library '%s' must have pkg-config file" % |
4120 | 232 | libname) | 224 | libname) |
4121 | 233 | if public_headers is None and not bld.env['IS_EXTRA_PYTHON']: | 225 | if public_headers is None and not bld.env['IS_EXTRA_PYTHON']: |
4123 | 234 | raise Utils.WafError("public library '%s' must have header files" % | 226 | raise Errors.WafError("public library '%s' must have header files" % |
4124 | 235 | libname) | 227 | libname) |
4125 | 236 | 228 | ||
4126 | 237 | if bundled_name is not None: | 229 | if bundled_name is not None: |
4127 | @@ -273,7 +265,7 @@ def SAMBA_LIBRARY(bld, libname, source, | |||
4128 | 273 | vscript = None | 265 | vscript = None |
4129 | 274 | if bld.env.HAVE_LD_VERSION_SCRIPT: | 266 | if bld.env.HAVE_LD_VERSION_SCRIPT: |
4130 | 275 | if private_library: | 267 | if private_library: |
4132 | 276 | version = "%s_%s" % (Utils.g_module.APPNAME, Utils.g_module.VERSION) | 268 | version = "%s_%s" % (Context.g_module.APPNAME, Context.g_module.VERSION) |
4133 | 277 | elif vnum: | 269 | elif vnum: |
4134 | 278 | version = "%s_%s" % (libname, vnum) | 270 | version = "%s_%s" % (libname, vnum) |
4135 | 279 | else: | 271 | else: |
4136 | @@ -282,17 +274,17 @@ def SAMBA_LIBRARY(bld, libname, source, | |||
4137 | 282 | vscript = "%s.vscript" % libname | 274 | vscript = "%s.vscript" % libname |
4138 | 283 | bld.ABI_VSCRIPT(version_libname, abi_directory, version, vscript, | 275 | bld.ABI_VSCRIPT(version_libname, abi_directory, version, vscript, |
4139 | 284 | abi_match) | 276 | abi_match) |
4141 | 285 | fullname = apply_pattern(bundled_name, bld.env.shlib_PATTERN) | 277 | fullname = apply_pattern(bundled_name, bld.env.cshlib_PATTERN) |
4142 | 286 | fullpath = bld.path.find_or_declare(fullname) | 278 | fullpath = bld.path.find_or_declare(fullname) |
4143 | 287 | vscriptpath = bld.path.find_or_declare(vscript) | 279 | vscriptpath = bld.path.find_or_declare(vscript) |
4144 | 288 | if not fullpath: | 280 | if not fullpath: |
4146 | 289 | raise Utils.WafError("unable to find fullpath for %s" % fullname) | 281 | raise Errors.WafError("unable to find fullpath for %s" % fullname) |
4147 | 290 | if not vscriptpath: | 282 | if not vscriptpath: |
4149 | 291 | raise Utils.WafError("unable to find vscript path for %s" % vscript) | 283 | raise Errors.WafError("unable to find vscript path for %s" % vscript) |
4150 | 292 | bld.add_manual_dependency(fullpath, vscriptpath) | 284 | bld.add_manual_dependency(fullpath, vscriptpath) |
4151 | 293 | if bld.is_install: | 285 | if bld.is_install: |
4152 | 294 | # also make the .inst file depend on the vscript | 286 | # also make the .inst file depend on the vscript |
4154 | 295 | instname = apply_pattern(bundled_name + '.inst', bld.env.shlib_PATTERN) | 287 | instname = apply_pattern(bundled_name + '.inst', bld.env.cshlib_PATTERN) |
4155 | 296 | bld.add_manual_dependency(bld.path.find_or_declare(instname), bld.path.find_or_declare(vscript)) | 288 | bld.add_manual_dependency(bld.path.find_or_declare(instname), bld.path.find_or_declare(vscript)) |
4156 | 297 | vscript = os.path.join(bld.path.abspath(bld.env), vscript) | 289 | vscript = os.path.join(bld.path.abspath(bld.env), vscript) |
4157 | 298 | 290 | ||
4158 | @@ -327,10 +319,12 @@ def SAMBA_LIBRARY(bld, libname, source, | |||
4159 | 327 | link_name = 'shared/%s' % realname | 319 | link_name = 'shared/%s' % realname |
4160 | 328 | 320 | ||
4161 | 329 | if link_name: | 321 | if link_name: |
4162 | 322 | if 'waflib.extras.compat15' in sys.modules: | ||
4163 | 323 | link_name = 'default/' + link_name | ||
4164 | 330 | t.link_name = link_name | 324 | t.link_name = link_name |
4165 | 331 | 325 | ||
4166 | 332 | if pc_files is not None and not private_library: | 326 | if pc_files is not None and not private_library: |
4168 | 333 | if pyembed and bld.env['IS_EXTRA_PYTHON']: | 327 | if pyembed: |
4169 | 334 | bld.PKG_CONFIG_FILES(pc_files, vnum=vnum, extra_name=bld.env['PYTHON_SO_ABI_FLAG']) | 328 | bld.PKG_CONFIG_FILES(pc_files, vnum=vnum, extra_name=bld.env['PYTHON_SO_ABI_FLAG']) |
4170 | 335 | else: | 329 | else: |
4171 | 336 | bld.PKG_CONFIG_FILES(pc_files, vnum=vnum) | 330 | bld.PKG_CONFIG_FILES(pc_files, vnum=vnum) |
4172 | @@ -674,7 +668,7 @@ def SAMBA_GENERATOR(bld, name, rule, source='', target='', | |||
4173 | 674 | target=target, | 668 | target=target, |
4174 | 675 | shell=isinstance(rule, str), | 669 | shell=isinstance(rule, str), |
4175 | 676 | update_outputs=True, | 670 | update_outputs=True, |
4177 | 677 | before='cc', | 671 | before='c', |
4178 | 678 | ext_out='.c', | 672 | ext_out='.c', |
4179 | 679 | samba_type='GENERATOR', | 673 | samba_type='GENERATOR', |
4180 | 680 | dep_vars = dep_vars, | 674 | dep_vars = dep_vars, |
4181 | @@ -728,22 +722,6 @@ Build.BuildContext.SET_BUILD_GROUP = SET_BUILD_GROUP | |||
4182 | 728 | 722 | ||
4183 | 729 | 723 | ||
4184 | 730 | 724 | ||
4185 | 731 | @conf | ||
4186 | 732 | def ENABLE_TIMESTAMP_DEPENDENCIES(conf): | ||
4187 | 733 | """use timestamps instead of file contents for deps | ||
4188 | 734 | this currently doesn't work""" | ||
4189 | 735 | def h_file(filename): | ||
4190 | 736 | import stat | ||
4191 | 737 | st = os.stat(filename) | ||
4192 | 738 | if stat.S_ISDIR(st[stat.ST_MODE]): raise IOError('not a file') | ||
4193 | 739 | m = Utils.md5() | ||
4194 | 740 | m.update(str(st.st_mtime)) | ||
4195 | 741 | m.update(str(st.st_size)) | ||
4196 | 742 | m.update(filename) | ||
4197 | 743 | return m.digest() | ||
4198 | 744 | Utils.h_file = h_file | ||
4199 | 745 | |||
4200 | 746 | |||
4201 | 747 | def SAMBA_SCRIPT(bld, name, pattern, installdir, installname=None): | 725 | def SAMBA_SCRIPT(bld, name, pattern, installdir, installname=None): |
4202 | 748 | '''used to copy scripts from the source tree into the build directory | 726 | '''used to copy scripts from the source tree into the build directory |
4203 | 749 | for use by selftest''' | 727 | for use by selftest''' |
4204 | @@ -758,14 +736,14 @@ def SAMBA_SCRIPT(bld, name, pattern, installdir, installname=None): | |||
4205 | 758 | target = os.path.join(installdir, iname) | 736 | target = os.path.join(installdir, iname) |
4206 | 759 | tgtdir = os.path.dirname(os.path.join(bld.srcnode.abspath(bld.env), '..', target)) | 737 | tgtdir = os.path.dirname(os.path.join(bld.srcnode.abspath(bld.env), '..', target)) |
4207 | 760 | mkdir_p(tgtdir) | 738 | mkdir_p(tgtdir) |
4209 | 761 | link_src = os.path.normpath(os.path.join(bld.curdir, s)) | 739 | link_src = os.path.normpath(os.path.join(bld.path.abspath(), s)) |
4210 | 762 | link_dst = os.path.join(tgtdir, os.path.basename(iname)) | 740 | link_dst = os.path.join(tgtdir, os.path.basename(iname)) |
4211 | 763 | if os.path.islink(link_dst) and os.readlink(link_dst) == link_src: | 741 | if os.path.islink(link_dst) and os.readlink(link_dst) == link_src: |
4212 | 764 | continue | 742 | continue |
4214 | 765 | if os.path.exists(link_dst): | 743 | if os.path.islink(link_dst): |
4215 | 766 | os.unlink(link_dst) | 744 | os.unlink(link_dst) |
4216 | 767 | Logs.info("symlink: %s -> %s/%s" % (s, installdir, iname)) | 745 | Logs.info("symlink: %s -> %s/%s" % (s, installdir, iname)) |
4218 | 768 | os.symlink(link_src, link_dst) | 746 | symlink(link_src, link_dst) |
4219 | 769 | Build.BuildContext.SAMBA_SCRIPT = SAMBA_SCRIPT | 747 | Build.BuildContext.SAMBA_SCRIPT = SAMBA_SCRIPT |
4220 | 770 | 748 | ||
4221 | 771 | 749 | ||
4222 | @@ -779,10 +757,10 @@ def copy_and_fix_python_path(task): | |||
4223 | 779 | replacement="""sys.path.insert(0, "%s") | 757 | replacement="""sys.path.insert(0, "%s") |
4224 | 780 | sys.path.insert(1, "%s")""" % (task.env["PYTHONARCHDIR"], task.env["PYTHONDIR"]) | 758 | sys.path.insert(1, "%s")""" % (task.env["PYTHONARCHDIR"], task.env["PYTHONDIR"]) |
4225 | 781 | 759 | ||
4228 | 782 | if task.env["PYTHON"][0] == "/": | 760 | if task.env["PYTHON"][0].startswith("/"): |
4229 | 783 | replacement_shebang = "#!%s\n" % task.env["PYTHON"] | 761 | replacement_shebang = "#!%s\n" % task.env["PYTHON"][0] |
4230 | 784 | else: | 762 | else: |
4232 | 785 | replacement_shebang = "#!/usr/bin/env %s\n" % task.env["PYTHON"] | 763 | replacement_shebang = "#!/usr/bin/env %s\n" % task.env["PYTHON"][0] |
4233 | 786 | 764 | ||
4234 | 787 | installed_location=task.outputs[0].bldpath(task.env) | 765 | installed_location=task.outputs[0].bldpath(task.env) |
4235 | 788 | source_file = open(task.inputs[0].srcpath(task.env)) | 766 | source_file = open(task.inputs[0].srcpath(task.env)) |
4236 | @@ -790,7 +768,7 @@ sys.path.insert(1, "%s")""" % (task.env["PYTHONARCHDIR"], task.env["PYTHONDIR"]) | |||
4237 | 790 | lineno = 0 | 768 | lineno = 0 |
4238 | 791 | for line in source_file: | 769 | for line in source_file: |
4239 | 792 | newline = line | 770 | newline = line |
4241 | 793 | if (lineno == 0 and task.env["PYTHON_SPECIFIED"] is True and | 771 | if (lineno == 0 and |
4242 | 794 | line[:2] == "#!"): | 772 | line[:2] == "#!"): |
4243 | 795 | newline = replacement_shebang | 773 | newline = replacement_shebang |
4244 | 796 | elif pattern in line: | 774 | elif pattern in line: |
4245 | @@ -798,7 +776,7 @@ sys.path.insert(1, "%s")""" % (task.env["PYTHONARCHDIR"], task.env["PYTHONDIR"]) | |||
4246 | 798 | installed_file.write(newline) | 776 | installed_file.write(newline) |
4247 | 799 | lineno = lineno + 1 | 777 | lineno = lineno + 1 |
4248 | 800 | installed_file.close() | 778 | installed_file.close() |
4250 | 801 | os.chmod(installed_location, 0755) | 779 | os.chmod(installed_location, 0o755) |
4251 | 802 | return 0 | 780 | return 0 |
4252 | 803 | 781 | ||
4253 | 804 | def copy_and_fix_perl_path(task): | 782 | def copy_and_fix_perl_path(task): |
4254 | @@ -826,7 +804,7 @@ def copy_and_fix_perl_path(task): | |||
4255 | 826 | installed_file.write(newline) | 804 | installed_file.write(newline) |
4256 | 827 | lineno = lineno + 1 | 805 | lineno = lineno + 1 |
4257 | 828 | installed_file.close() | 806 | installed_file.close() |
4259 | 829 | os.chmod(installed_location, 0755) | 807 | os.chmod(installed_location, 0o755) |
4260 | 830 | return 0 | 808 | return 0 |
4261 | 831 | 809 | ||
4262 | 832 | 810 | ||
4263 | @@ -834,6 +812,8 @@ def install_file(bld, destdir, file, chmod=MODE_644, flat=False, | |||
4264 | 834 | python_fixup=False, perl_fixup=False, | 812 | python_fixup=False, perl_fixup=False, |
4265 | 835 | destname=None, base_name=None): | 813 | destname=None, base_name=None): |
4266 | 836 | '''install a file''' | 814 | '''install a file''' |
4267 | 815 | if not isinstance(file, str): | ||
4268 | 816 | file = file.abspath() | ||
4269 | 837 | destdir = bld.EXPAND_VARIABLES(destdir) | 817 | destdir = bld.EXPAND_VARIABLES(destdir) |
4270 | 838 | if not destname: | 818 | if not destname: |
4271 | 839 | destname = file | 819 | destname = file |
4272 | @@ -898,16 +878,19 @@ def INSTALL_DIR(bld, path, chmod=0o755, env=None): | |||
4273 | 898 | if not path: | 878 | if not path: |
4274 | 899 | return [] | 879 | return [] |
4275 | 900 | 880 | ||
4277 | 901 | destpath = bld.get_install_path(path, env) | 881 | destpath = bld.EXPAND_VARIABLES(path) |
4278 | 882 | if Options.options.destdir: | ||
4279 | 883 | destpath = os.path.join(Options.options.destdir, destpath.lstrip(os.sep)) | ||
4280 | 902 | 884 | ||
4281 | 903 | if bld.is_install > 0: | 885 | if bld.is_install > 0: |
4282 | 904 | if not os.path.isdir(destpath): | 886 | if not os.path.isdir(destpath): |
4283 | 905 | try: | 887 | try: |
4284 | 888 | Logs.info('* create %s', destpath) | ||
4285 | 906 | os.makedirs(destpath) | 889 | os.makedirs(destpath) |
4286 | 907 | os.chmod(destpath, chmod) | 890 | os.chmod(destpath, chmod) |
4287 | 908 | except OSError as e: | 891 | except OSError as e: |
4288 | 909 | if not os.path.isdir(destpath): | 892 | if not os.path.isdir(destpath): |
4290 | 910 | raise Utils.WafError("Cannot create the folder '%s' (error: %s)" % (path, e)) | 893 | raise Errors.WafError("Cannot create the folder '%s' (error: %s)" % (path, e)) |
4291 | 911 | Build.BuildContext.INSTALL_DIR = INSTALL_DIR | 894 | Build.BuildContext.INSTALL_DIR = INSTALL_DIR |
4292 | 912 | 895 | ||
4293 | 913 | def INSTALL_DIRS(bld, destdir, dirs, chmod=0o755, env=None): | 896 | def INSTALL_DIRS(bld, destdir, dirs, chmod=0o755, env=None): |
4294 | @@ -938,7 +921,7 @@ def SAMBAMANPAGES(bld, manpages, extra_source=None): | |||
4295 | 938 | '''build and install manual pages''' | 921 | '''build and install manual pages''' |
4296 | 939 | bld.env.SAMBA_EXPAND_XSL = bld.srcnode.abspath() + '/docs-xml/xslt/expand-sambadoc.xsl' | 922 | bld.env.SAMBA_EXPAND_XSL = bld.srcnode.abspath() + '/docs-xml/xslt/expand-sambadoc.xsl' |
4297 | 940 | bld.env.SAMBA_MAN_XSL = bld.srcnode.abspath() + '/docs-xml/xslt/man.xsl' | 923 | bld.env.SAMBA_MAN_XSL = bld.srcnode.abspath() + '/docs-xml/xslt/man.xsl' |
4299 | 941 | bld.env.SAMBA_CATALOG = bld.srcnode.abspath() + '/bin/default/docs-xml/build/catalog.xml' | 924 | bld.env.SAMBA_CATALOG = bld.bldnode.abspath() + '/docs-xml/build/catalog.xml' |
4300 | 942 | bld.env.SAMBA_CATALOGS = 'file:///etc/xml/catalog file:///usr/local/share/xml/catalog file://' + bld.env.SAMBA_CATALOG | 925 | bld.env.SAMBA_CATALOGS = 'file:///etc/xml/catalog file:///usr/local/share/xml/catalog file://' + bld.env.SAMBA_CATALOG |
4301 | 943 | 926 | ||
4302 | 944 | for m in manpages.split(): | 927 | for m in manpages.split(): |
4303 | @@ -958,59 +941,6 @@ def SAMBAMANPAGES(bld, manpages, extra_source=None): | |||
4304 | 958 | bld.INSTALL_FILES('${MANDIR}/man%s' % m[-1], m, flat=True) | 941 | bld.INSTALL_FILES('${MANDIR}/man%s' % m[-1], m, flat=True) |
4305 | 959 | Build.BuildContext.SAMBAMANPAGES = SAMBAMANPAGES | 942 | Build.BuildContext.SAMBAMANPAGES = SAMBAMANPAGES |
4306 | 960 | 943 | ||
4307 | 961 | ############################################################# | ||
4308 | 962 | # give a nicer display when building different types of files | ||
4309 | 963 | def progress_display(self, msg, fname): | ||
4310 | 964 | col1 = Logs.colors(self.color) | ||
4311 | 965 | col2 = Logs.colors.NORMAL | ||
4312 | 966 | total = self.position[1] | ||
4313 | 967 | n = len(str(total)) | ||
4314 | 968 | fs = '[%%%dd/%%%dd] %s %%s%%s%%s\n' % (n, n, msg) | ||
4315 | 969 | return fs % (self.position[0], self.position[1], col1, fname, col2) | ||
4316 | 970 | |||
4317 | 971 | def link_display(self): | ||
4318 | 972 | if Options.options.progress_bar != 0: | ||
4319 | 973 | return Task.Task.old_display(self) | ||
4320 | 974 | fname = self.outputs[0].bldpath(self.env) | ||
4321 | 975 | return progress_display(self, 'Linking', fname) | ||
4322 | 976 | Task.TaskBase.classes['cc_link'].display = link_display | ||
4323 | 977 | |||
4324 | 978 | def samba_display(self): | ||
4325 | 979 | if Options.options.progress_bar != 0: | ||
4326 | 980 | return Task.Task.old_display(self) | ||
4327 | 981 | |||
4328 | 982 | targets = LOCAL_CACHE(self, 'TARGET_TYPE') | ||
4329 | 983 | if self.name in targets: | ||
4330 | 984 | target_type = targets[self.name] | ||
4331 | 985 | type_map = { 'GENERATOR' : 'Generating', | ||
4332 | 986 | 'PROTOTYPE' : 'Generating' | ||
4333 | 987 | } | ||
4334 | 988 | if target_type in type_map: | ||
4335 | 989 | return progress_display(self, type_map[target_type], self.name) | ||
4336 | 990 | |||
4337 | 991 | if len(self.inputs) == 0: | ||
4338 | 992 | return Task.Task.old_display(self) | ||
4339 | 993 | |||
4340 | 994 | fname = self.inputs[0].bldpath(self.env) | ||
4341 | 995 | if fname[0:3] == '../': | ||
4342 | 996 | fname = fname[3:] | ||
4343 | 997 | ext_loc = fname.rfind('.') | ||
4344 | 998 | if ext_loc == -1: | ||
4345 | 999 | return Task.Task.old_display(self) | ||
4346 | 1000 | ext = fname[ext_loc:] | ||
4347 | 1001 | |||
4348 | 1002 | ext_map = { '.idl' : 'Compiling IDL', | ||
4349 | 1003 | '.et' : 'Compiling ERRTABLE', | ||
4350 | 1004 | '.asn1': 'Compiling ASN1', | ||
4351 | 1005 | '.c' : 'Compiling' } | ||
4352 | 1006 | if ext in ext_map: | ||
4353 | 1007 | return progress_display(self, ext_map[ext], fname) | ||
4354 | 1008 | return Task.Task.old_display(self) | ||
4355 | 1009 | |||
4356 | 1010 | Task.TaskBase.classes['Task'].old_display = Task.TaskBase.classes['Task'].display | ||
4357 | 1011 | Task.TaskBase.classes['Task'].display = samba_display | ||
4358 | 1012 | |||
4359 | 1013 | |||
4360 | 1014 | @after('apply_link') | 944 | @after('apply_link') |
4361 | 1015 | @feature('cshlib') | 945 | @feature('cshlib') |
4362 | 1016 | def apply_bundle_remove_dynamiclib_patch(self): | 946 | def apply_bundle_remove_dynamiclib_patch(self): |
4363 | diff --git a/buildtools/wafsamba/wscript b/buildtools/wafsamba/wscript | |||
4364 | index 0eef330..ab19859 100644 | |||
4365 | --- a/buildtools/wafsamba/wscript | |||
4366 | +++ b/buildtools/wafsamba/wscript | |||
4367 | @@ -3,7 +3,8 @@ | |||
4368 | 3 | # this is a base set of waf rules that everything else pulls in first | 3 | # this is a base set of waf rules that everything else pulls in first |
4369 | 4 | 4 | ||
4370 | 5 | import os, sys | 5 | import os, sys |
4372 | 6 | import wafsamba, Configure, Logs, Options, Utils | 6 | from waflib import Configure, Logs, Options, Utils, Context, Errors |
4373 | 7 | import wafsamba | ||
4374 | 7 | from samba_utils import os_path_relpath | 8 | from samba_utils import os_path_relpath |
4375 | 8 | from optparse import SUPPRESS_HELP | 9 | from optparse import SUPPRESS_HELP |
4376 | 9 | 10 | ||
4377 | @@ -14,12 +15,17 @@ from optparse import SUPPRESS_HELP | |||
4378 | 14 | # are resolved related to WAFCACHE. It will need a lot of testing | 15 | # are resolved related to WAFCACHE. It will need a lot of testing |
4379 | 15 | # before it is enabled by default. | 16 | # before it is enabled by default. |
4380 | 16 | if '--enable-auto-reconfigure' in sys.argv: | 17 | if '--enable-auto-reconfigure' in sys.argv: |
4382 | 17 | Configure.autoconfig = True | 18 | Configure.autoconfig = 'clobber' |
4383 | 18 | 19 | ||
4386 | 19 | def set_options(opt): | 20 | def default_value(option, default=''): |
4387 | 20 | opt.tool_options('compiler_cc') | 21 | if option in Options.options.__dict__: |
4388 | 22 | return Options.options.__dict__[option] | ||
4389 | 23 | return default | ||
4390 | 21 | 24 | ||
4392 | 22 | opt.tool_options('gnu_dirs') | 25 | def options(opt): |
4393 | 26 | opt.load('compiler_cc') | ||
4394 | 27 | |||
4395 | 28 | opt.load('gnu_dirs') | ||
4396 | 23 | 29 | ||
4397 | 24 | gr = opt.option_group('library handling options') | 30 | gr = opt.option_group('library handling options') |
4398 | 25 | 31 | ||
4399 | @@ -31,17 +37,17 @@ def set_options(opt): | |||
4400 | 31 | help=("comma separated list of normally public libraries to build instead as private libraries. May include !LIBNAME to disable making a library private. Can be 'NONE' or 'ALL' [auto]"), | 37 | help=("comma separated list of normally public libraries to build instead as private libraries. May include !LIBNAME to disable making a library private. Can be 'NONE' or 'ALL' [auto]"), |
4401 | 32 | action="store", dest='PRIVATE_LIBS', default='') | 38 | action="store", dest='PRIVATE_LIBS', default='') |
4402 | 33 | 39 | ||
4404 | 34 | extension_default = Options.options['PRIVATE_EXTENSION_DEFAULT'] | 40 | extension_default = default_value('PRIVATE_EXTENSION_DEFAULT') |
4405 | 35 | gr.add_option('--private-library-extension', | 41 | gr.add_option('--private-library-extension', |
4406 | 36 | help=("name extension for private libraries [%s]" % extension_default), | 42 | help=("name extension for private libraries [%s]" % extension_default), |
4407 | 37 | action="store", dest='PRIVATE_EXTENSION', default=extension_default) | 43 | action="store", dest='PRIVATE_EXTENSION', default=extension_default) |
4408 | 38 | 44 | ||
4410 | 39 | extension_exception = Options.options['PRIVATE_EXTENSION_EXCEPTION'] | 45 | extension_exception = default_value('PRIVATE_EXTENSION_EXCEPTION') |
4411 | 40 | gr.add_option('--private-extension-exception', | 46 | gr.add_option('--private-extension-exception', |
4412 | 41 | help=("comma separated list of libraries to not apply extension to [%s]" % extension_exception), | 47 | help=("comma separated list of libraries to not apply extension to [%s]" % extension_exception), |
4413 | 42 | action="store", dest='PRIVATE_EXTENSION_EXCEPTION', default=extension_exception) | 48 | action="store", dest='PRIVATE_EXTENSION_EXCEPTION', default=extension_exception) |
4414 | 43 | 49 | ||
4416 | 44 | builtin_default = Options.options['BUILTIN_LIBRARIES_DEFAULT'] | 50 | builtin_default = default_value('BUILTIN_LIBRARIES_DEFAULT') |
4417 | 45 | gr.add_option('--builtin-libraries', | 51 | gr.add_option('--builtin-libraries', |
4418 | 46 | help=("command separated list of libraries to build directly into binaries [%s]" % builtin_default), | 52 | help=("command separated list of libraries to build directly into binaries [%s]" % builtin_default), |
4419 | 47 | action="store", dest='BUILTIN_LIBRARIES', default=builtin_default) | 53 | action="store", dest='BUILTIN_LIBRARIES', default=builtin_default) |
4420 | @@ -71,7 +77,7 @@ def set_options(opt): | |||
4421 | 71 | action="store", dest='MODULESDIR', default='${PREFIX}/modules') | 77 | action="store", dest='MODULESDIR', default='${PREFIX}/modules') |
4422 | 72 | 78 | ||
4423 | 73 | opt.add_option('--with-privatelibdir', | 79 | opt.add_option('--with-privatelibdir', |
4425 | 74 | help=("private library directory [PREFIX/lib/%s]" % Utils.g_module.APPNAME), | 80 | help=("private library directory [PREFIX/lib/%s]" % Context.g_module.APPNAME), |
4426 | 75 | action="store", dest='PRIVATELIBDIR', default=None) | 81 | action="store", dest='PRIVATELIBDIR', default=None) |
4427 | 76 | 82 | ||
4428 | 77 | opt.add_option('--with-libiconv', | 83 | opt.add_option('--with-libiconv', |
4429 | @@ -109,9 +115,6 @@ def set_options(opt): | |||
4430 | 109 | gr.add_option('--enable-gccdeps', | 115 | gr.add_option('--enable-gccdeps', |
4431 | 110 | help=("Enable use of gcc -MD dependency module"), | 116 | help=("Enable use of gcc -MD dependency module"), |
4432 | 111 | action="store_true", dest='enable_gccdeps', default=True) | 117 | action="store_true", dest='enable_gccdeps', default=True) |
4433 | 112 | gr.add_option('--timestamp-dependencies', | ||
4434 | 113 | help=("use file timestamps instead of content for build dependencies (BROKEN)"), | ||
4435 | 114 | action="store_true", dest='timestamp_dependencies', default=False) | ||
4436 | 115 | gr.add_option('--pedantic', | 118 | gr.add_option('--pedantic', |
4437 | 116 | help=("Enable even more compiler warnings"), | 119 | help=("Enable even more compiler warnings"), |
4438 | 117 | action='store_true', dest='pedantic', default=False) | 120 | action='store_true', dest='pedantic', default=False) |
4439 | @@ -210,23 +213,19 @@ def set_options(opt): | |||
4440 | 210 | @Utils.run_once | 213 | @Utils.run_once |
4441 | 211 | def configure(conf): | 214 | def configure(conf): |
4442 | 212 | conf.env.hlist = [] | 215 | conf.env.hlist = [] |
4444 | 213 | conf.env.srcdir = conf.srcdir | 216 | conf.env.srcdir = conf.srcnode.abspath() |
4445 | 214 | 217 | ||
4446 | 215 | conf.define('SRCDIR', conf.env['srcdir']) | 218 | conf.define('SRCDIR', conf.env['srcdir']) |
4447 | 216 | 219 | ||
4448 | 217 | if Options.options.timestamp_dependencies: | ||
4449 | 218 | conf.ENABLE_TIMESTAMP_DEPENDENCIES() | ||
4450 | 219 | |||
4451 | 220 | conf.SETUP_CONFIGURE_CACHE(Options.options.enable_configure_cache) | 220 | conf.SETUP_CONFIGURE_CACHE(Options.options.enable_configure_cache) |
4452 | 221 | 221 | ||
4453 | 222 | # load our local waf extensions | 222 | # load our local waf extensions |
4457 | 223 | conf.check_tool('gnu_dirs') | 223 | conf.load('gnu_dirs') |
4458 | 224 | conf.check_tool('wafsamba') | 224 | conf.load('wafsamba') |
4456 | 225 | conf.check_tool('print_commands') | ||
4459 | 226 | 225 | ||
4460 | 227 | conf.CHECK_CC_ENV() | 226 | conf.CHECK_CC_ENV() |
4461 | 228 | 227 | ||
4463 | 229 | conf.check_tool('compiler_cc') | 228 | conf.load('compiler_c') |
4464 | 230 | 229 | ||
4465 | 231 | conf.CHECK_STANDARD_LIBPATH() | 230 | conf.CHECK_STANDARD_LIBPATH() |
4466 | 232 | 231 | ||
4467 | @@ -236,31 +235,10 @@ def configure(conf): | |||
4468 | 236 | # older gcc versions (< 4.4) does not work with gccdeps, so we have to see if the .d file is generated | 235 | # older gcc versions (< 4.4) does not work with gccdeps, so we have to see if the .d file is generated |
4469 | 237 | if Options.options.enable_gccdeps: | 236 | if Options.options.enable_gccdeps: |
4470 | 238 | # stale file removal - the configuration may pick up the old .pyc file | 237 | # stale file removal - the configuration may pick up the old .pyc file |
4472 | 239 | p = os.path.join(conf.srcdir, 'buildtools/wafsamba/gccdeps.pyc') | 238 | p = os.path.join(conf.env.srcdir, 'buildtools/wafsamba/gccdeps.pyc') |
4473 | 240 | if os.path.exists(p): | 239 | if os.path.exists(p): |
4474 | 241 | os.remove(p) | 240 | os.remove(p) |
4497 | 242 | 241 | conf.load('gccdeps') | |
4476 | 243 | from TaskGen import feature, after | ||
4477 | 244 | @feature('testd') | ||
4478 | 245 | @after('apply_core') | ||
4479 | 246 | def check_d(self): | ||
4480 | 247 | tsk = self.compiled_tasks[0] | ||
4481 | 248 | tsk.outputs.append(tsk.outputs[0].change_ext('.d')) | ||
4482 | 249 | |||
4483 | 250 | import Task | ||
4484 | 251 | cc = Task.TaskBase.classes['cc'] | ||
4485 | 252 | oldmeth = cc.run | ||
4486 | 253 | |||
4487 | 254 | cc.run = Task.compile_fun_noshell('cc', '${CC} ${CCFLAGS} ${CPPFLAGS} ${_CCINCFLAGS} ${_CCDEFFLAGS} ${CC_SRC_F}${SRC} ${CC_TGT_F}${TGT[0].abspath(env)}')[0] | ||
4488 | 255 | try: | ||
4489 | 256 | try: | ||
4490 | 257 | conf.check(features='c testd', fragment='int main() {return 0;}\n', ccflags=['-MD'], mandatory=True, msg='Check for -MD') | ||
4491 | 258 | except: | ||
4492 | 259 | pass | ||
4493 | 260 | else: | ||
4494 | 261 | conf.check_tool('gccdeps', tooldir=conf.srcdir + "/buildtools/wafsamba") | ||
4495 | 262 | finally: | ||
4496 | 263 | cc.run = oldmeth | ||
4498 | 264 | 242 | ||
4499 | 265 | # make the install paths available in environment | 243 | # make the install paths available in environment |
4500 | 266 | conf.env.LIBDIR = Options.options.LIBDIR or '${PREFIX}/lib' | 244 | conf.env.LIBDIR = Options.options.LIBDIR or '${PREFIX}/lib' |
4501 | @@ -330,15 +308,16 @@ def configure(conf): | |||
4502 | 330 | "-qhalt=w", # IBM xlc | 308 | "-qhalt=w", # IBM xlc |
4503 | 331 | "-w2", # Tru64 | 309 | "-w2", # Tru64 |
4504 | 332 | ]: | 310 | ]: |
4507 | 333 | if conf.CHECK_CFLAGS([f], ''' | 311 | if conf.CHECK_CFLAGS([f]): |
4506 | 334 | '''): | ||
4508 | 335 | if not 'WERROR_CFLAGS' in conf.env: | 312 | if not 'WERROR_CFLAGS' in conf.env: |
4509 | 336 | conf.env['WERROR_CFLAGS'] = [] | 313 | conf.env['WERROR_CFLAGS'] = [] |
4510 | 337 | conf.env['WERROR_CFLAGS'].extend([f]) | 314 | conf.env['WERROR_CFLAGS'].extend([f]) |
4511 | 338 | break | 315 | break |
4512 | 339 | 316 | ||
4513 | 340 | # check which compiler/linker flags are needed for rpath support | 317 | # check which compiler/linker flags are needed for rpath support |
4515 | 341 | if not conf.CHECK_LDFLAGS(['-Wl,-rpath,.']) and conf.CHECK_LDFLAGS(['-Wl,-R,.']): | 318 | if conf.CHECK_LDFLAGS(['-Wl,-rpath,.']): |
4516 | 319 | conf.env['RPATH_ST'] = '-Wl,-rpath,%s' | ||
4517 | 320 | elif conf.CHECK_LDFLAGS(['-Wl,-R,.']): | ||
4518 | 342 | conf.env['RPATH_ST'] = '-Wl,-R,%s' | 321 | conf.env['RPATH_ST'] = '-Wl,-R,%s' |
4519 | 343 | 322 | ||
4520 | 344 | # check for rpath | 323 | # check for rpath |
4521 | @@ -348,7 +327,7 @@ def configure(conf): | |||
4522 | 348 | conf.env.RPATH_ON_INSTALL = (conf.env.RPATH_ON_BUILD and | 327 | conf.env.RPATH_ON_INSTALL = (conf.env.RPATH_ON_BUILD and |
4523 | 349 | not Options.options.disable_rpath_install) | 328 | not Options.options.disable_rpath_install) |
4524 | 350 | if not conf.env.PRIVATELIBDIR: | 329 | if not conf.env.PRIVATELIBDIR: |
4526 | 351 | conf.env.PRIVATELIBDIR = '%s/%s' % (conf.env.LIBDIR, Utils.g_module.APPNAME) | 330 | conf.env.PRIVATELIBDIR = '%s/%s' % (conf.env.LIBDIR, Context.g_module.APPNAME) |
4527 | 352 | conf.env.RPATH_ON_INSTALL_PRIVATE = ( | 331 | conf.env.RPATH_ON_INSTALL_PRIVATE = ( |
4528 | 353 | not Options.options.disable_rpath_private_install) | 332 | not Options.options.disable_rpath_private_install) |
4529 | 354 | else: | 333 | else: |
4530 | @@ -370,10 +349,10 @@ def configure(conf): | |||
4531 | 370 | else: | 349 | else: |
4532 | 371 | conf.env.HAVE_LD_VERSION_SCRIPT = False | 350 | conf.env.HAVE_LD_VERSION_SCRIPT = False |
4533 | 372 | 351 | ||
4535 | 373 | if conf.CHECK_CFLAGS(['-fvisibility=hidden'] + conf.env.WERROR_CFLAGS): | 352 | if conf.CHECK_CFLAGS(['-fvisibility=hidden']): |
4536 | 374 | conf.env.VISIBILITY_CFLAGS = '-fvisibility=hidden' | 353 | conf.env.VISIBILITY_CFLAGS = '-fvisibility=hidden' |
4537 | 375 | conf.CHECK_CODE('''int main(void) { return 0; } | 354 | conf.CHECK_CODE('''int main(void) { return 0; } |
4539 | 376 | __attribute__((visibility("default"))) void vis_foo2(void) {}''', | 355 | __attribute__((visibility("default"))) void vis_foo2(void) {}\n''', |
4540 | 377 | cflags=conf.env.VISIBILITY_CFLAGS, | 356 | cflags=conf.env.VISIBILITY_CFLAGS, |
4541 | 378 | strict=True, | 357 | strict=True, |
4542 | 379 | define='HAVE_VISIBILITY_ATTR', addmain=False) | 358 | define='HAVE_VISIBILITY_ATTR', addmain=False) |
4543 | @@ -466,6 +445,15 @@ def configure(conf): | |||
4544 | 466 | conf.DEFINE('_GNU_SOURCE', 1, add_to_cflags=True) | 445 | conf.DEFINE('_GNU_SOURCE', 1, add_to_cflags=True) |
4545 | 467 | conf.DEFINE('_XOPEN_SOURCE_EXTENDED', 1, add_to_cflags=True) | 446 | conf.DEFINE('_XOPEN_SOURCE_EXTENDED', 1, add_to_cflags=True) |
4546 | 468 | 447 | ||
4547 | 448 | # | ||
4548 | 449 | # Needs to be defined before std*.h and string*.h are included | ||
4549 | 450 | # As Python.h already brings string.h we need it in CFLAGS. | ||
4550 | 451 | # See memset_s() details here: | ||
4551 | 452 | # https://en.cppreference.com/w/c/string/byte/memset | ||
4552 | 453 | # | ||
4553 | 454 | if conf.CHECK_CFLAGS(['-D__STDC_WANT_LIB_EXT1__=1']): | ||
4554 | 455 | conf.ADD_CFLAGS('-D__STDC_WANT_LIB_EXT1__=1') | ||
4555 | 456 | |||
4556 | 469 | # on Tru64 certain features are only available with _OSF_SOURCE set to 1 | 457 | # on Tru64 certain features are only available with _OSF_SOURCE set to 1 |
4557 | 470 | # and _XOPEN_SOURCE set to 600 | 458 | # and _XOPEN_SOURCE set to 600 |
4558 | 471 | if conf.env['SYSTEM_UNAME_SYSNAME'] == 'OSF1': | 459 | if conf.env['SYSTEM_UNAME_SYSNAME'] == 'OSF1': |
4559 | @@ -501,7 +489,7 @@ struct foo bar = { .y = 'X', .x = 1 }; | |||
4560 | 501 | 489 | ||
4561 | 502 | # see if we need special largefile flags | 490 | # see if we need special largefile flags |
4562 | 503 | if not conf.CHECK_LARGEFILE(): | 491 | if not conf.CHECK_LARGEFILE(): |
4564 | 504 | raise Utils.WafError('Samba requires large file support support, but not available on this platform: sizeof(off_t) < 8') | 492 | raise Errors.WafError('Samba requires large file support support, but not available on this platform: sizeof(off_t) < 8') |
4565 | 505 | 493 | ||
4566 | 506 | if conf.env.HAVE_STDDEF_H and conf.env.HAVE_STDLIB_H: | 494 | if conf.env.HAVE_STDDEF_H and conf.env.HAVE_STDLIB_H: |
4567 | 507 | conf.DEFINE('STDC_HEADERS', 1) | 495 | conf.DEFINE('STDC_HEADERS', 1) |
4568 | @@ -512,7 +500,7 @@ struct foo bar = { .y = 'X', .x = 1 }; | |||
4569 | 512 | conf.DEFINE('TIME_WITH_SYS_TIME', 1) | 500 | conf.DEFINE('TIME_WITH_SYS_TIME', 1) |
4570 | 513 | 501 | ||
4571 | 514 | # cope with different extensions for libraries | 502 | # cope with different extensions for libraries |
4573 | 515 | (root, ext) = os.path.splitext(conf.env.shlib_PATTERN) | 503 | (root, ext) = os.path.splitext(conf.env.cshlib_PATTERN) |
4574 | 516 | if ext[0] == '.': | 504 | if ext[0] == '.': |
4575 | 517 | conf.define('SHLIBEXT', ext[1:], quote=True) | 505 | conf.define('SHLIBEXT', ext[1:], quote=True) |
4576 | 518 | else: | 506 | else: |
4577 | @@ -534,7 +522,7 @@ struct foo bar = { .y = 'X', .x = 1 }; | |||
4578 | 534 | #if !defined(LITTLE) || !defined(B) || LITTLE != B | 522 | #if !defined(LITTLE) || !defined(B) || LITTLE != B |
4579 | 535 | #error Not little endian. | 523 | #error Not little endian. |
4580 | 536 | #endif | 524 | #endif |
4582 | 537 | int main(void) { return 0; }""", | 525 | int main(void) { return 0; }\n""", |
4583 | 538 | addmain=False, | 526 | addmain=False, |
4584 | 539 | headers="endian.h sys/endian.h", | 527 | headers="endian.h sys/endian.h", |
4585 | 540 | define="HAVE_LITTLE_ENDIAN") | 528 | define="HAVE_LITTLE_ENDIAN") |
4586 | @@ -553,7 +541,7 @@ struct foo bar = { .y = 'X', .x = 1 }; | |||
4587 | 553 | #if !defined(BIG) || !defined(B) || BIG != B | 541 | #if !defined(BIG) || !defined(B) || BIG != B |
4588 | 554 | #error Not big endian. | 542 | #error Not big endian. |
4589 | 555 | #endif | 543 | #endif |
4591 | 556 | int main(void) { return 0; }""", | 544 | int main(void) { return 0; }\n""", |
4592 | 557 | addmain=False, | 545 | addmain=False, |
4593 | 558 | headers="endian.h sys/endian.h", | 546 | headers="endian.h sys/endian.h", |
4594 | 559 | define="HAVE_BIG_ENDIAN") | 547 | define="HAVE_BIG_ENDIAN") |
4595 | @@ -576,7 +564,7 @@ struct foo bar = { .y = 'X', .x = 1 }; | |||
4596 | 576 | # Extra sanity check. | 564 | # Extra sanity check. |
4597 | 577 | if conf.CONFIG_SET("HAVE_BIG_ENDIAN") == conf.CONFIG_SET("HAVE_LITTLE_ENDIAN"): | 565 | if conf.CONFIG_SET("HAVE_BIG_ENDIAN") == conf.CONFIG_SET("HAVE_LITTLE_ENDIAN"): |
4598 | 578 | Logs.error("Failed endian determination. The PDP-11 is back?") | 566 | Logs.error("Failed endian determination. The PDP-11 is back?") |
4600 | 579 | sys.exit(1) | 567 | sys.exit(1) |
4601 | 580 | else: | 568 | else: |
4602 | 581 | if conf.CONFIG_SET("HAVE_BIG_ENDIAN"): | 569 | if conf.CONFIG_SET("HAVE_BIG_ENDIAN"): |
4603 | 582 | conf.DEFINE('WORDS_BIGENDIAN', 1) | 570 | conf.DEFINE('WORDS_BIGENDIAN', 1) |
4604 | @@ -607,12 +595,13 @@ struct foo bar = { .y = 'X', .x = 1 }; | |||
4605 | 607 | 595 | ||
4606 | 608 | def build(bld): | 596 | def build(bld): |
4607 | 609 | # give a more useful message if the source directory has moved | 597 | # give a more useful message if the source directory has moved |
4609 | 610 | relpath = os_path_relpath(bld.curdir, bld.srcnode.abspath()) | 598 | curdir = bld.path.abspath() |
4610 | 599 | srcdir = bld.srcnode.abspath() | ||
4611 | 600 | relpath = os_path_relpath(curdir, srcdir) | ||
4612 | 611 | if relpath.find('../') != -1: | 601 | if relpath.find('../') != -1: |
4615 | 612 | Logs.error('bld.curdir %s is not a child of %s' % (bld.curdir, bld.srcnode.abspath())) | 602 | Logs.error('bld.path %s is not a child of %s' % (curdir, srcdir)) |
4616 | 613 | raise Utils.WafError('''The top source directory has moved. Please run distclean and reconfigure''') | 603 | raise Errors.WafError('''The top source directory has moved. Please run distclean and reconfigure''') |
4617 | 614 | 604 | ||
4618 | 615 | bld.CHECK_MAKEFLAGS() | ||
4619 | 616 | bld.SETUP_BUILD_GROUPS() | 605 | bld.SETUP_BUILD_GROUPS() |
4620 | 617 | bld.ENFORCE_GROUP_ORDERING() | 606 | bld.ENFORCE_GROUP_ORDERING() |
4621 | 618 | bld.CHECK_PROJECT_RULES() | 607 | bld.CHECK_PROJECT_RULES() |
4622 | diff --git a/common/dump.c b/common/dump.c | |||
4623 | index 73286b8..adcf591 100644 | |||
4624 | --- a/common/dump.c | |||
4625 | +++ b/common/dump.c | |||
4626 | @@ -60,6 +60,7 @@ static tdb_off_t tdb_dump_record(struct tdb_context *tdb, int hash, | |||
4627 | 60 | 60 | ||
4628 | 61 | static int tdb_dump_chain(struct tdb_context *tdb, int i) | 61 | static int tdb_dump_chain(struct tdb_context *tdb, int i) |
4629 | 62 | { | 62 | { |
4630 | 63 | struct tdb_chainwalk_ctx chainwalk; | ||
4631 | 63 | tdb_off_t rec_ptr, top; | 64 | tdb_off_t rec_ptr, top; |
4632 | 64 | 65 | ||
4633 | 65 | if (i == -1) { | 66 | if (i == -1) { |
4634 | @@ -74,11 +75,19 @@ static int tdb_dump_chain(struct tdb_context *tdb, int i) | |||
4635 | 74 | if (tdb_ofs_read(tdb, top, &rec_ptr) == -1) | 75 | if (tdb_ofs_read(tdb, top, &rec_ptr) == -1) |
4636 | 75 | return tdb_unlock(tdb, i, F_WRLCK); | 76 | return tdb_unlock(tdb, i, F_WRLCK); |
4637 | 76 | 77 | ||
4638 | 78 | tdb_chainwalk_init(&chainwalk, rec_ptr); | ||
4639 | 79 | |||
4640 | 77 | if (rec_ptr) | 80 | if (rec_ptr) |
4641 | 78 | printf("hash=%d\n", i); | 81 | printf("hash=%d\n", i); |
4642 | 79 | 82 | ||
4643 | 80 | while (rec_ptr) { | 83 | while (rec_ptr) { |
4644 | 84 | bool ok; | ||
4645 | 81 | rec_ptr = tdb_dump_record(tdb, i, rec_ptr); | 85 | rec_ptr = tdb_dump_record(tdb, i, rec_ptr); |
4646 | 86 | ok = tdb_chainwalk_check(tdb, &chainwalk, rec_ptr); | ||
4647 | 87 | if (!ok) { | ||
4648 | 88 | printf("circular hash chain %d\n", i); | ||
4649 | 89 | break; | ||
4650 | 90 | } | ||
4651 | 82 | } | 91 | } |
4652 | 83 | 92 | ||
4653 | 84 | return tdb_unlock(tdb, i, F_WRLCK); | 93 | return tdb_unlock(tdb, i, F_WRLCK); |
4654 | @@ -86,7 +95,7 @@ static int tdb_dump_chain(struct tdb_context *tdb, int i) | |||
4655 | 86 | 95 | ||
4656 | 87 | _PUBLIC_ void tdb_dump_all(struct tdb_context *tdb) | 96 | _PUBLIC_ void tdb_dump_all(struct tdb_context *tdb) |
4657 | 88 | { | 97 | { |
4659 | 89 | int i; | 98 | uint32_t i; |
4660 | 90 | for (i=0;i<tdb->hash_size;i++) { | 99 | for (i=0;i<tdb->hash_size;i++) { |
4661 | 91 | tdb_dump_chain(tdb, i); | 100 | tdb_dump_chain(tdb, i); |
4662 | 92 | } | 101 | } |
4663 | diff --git a/common/freelist.c b/common/freelist.c | |||
4664 | index 86fac2f..37a4c16 100644 | |||
4665 | --- a/common/freelist.c | |||
4666 | +++ b/common/freelist.c | |||
4667 | @@ -27,12 +27,6 @@ | |||
4668 | 27 | 27 | ||
4669 | 28 | #include "tdb_private.h" | 28 | #include "tdb_private.h" |
4670 | 29 | 29 | ||
4671 | 30 | /* 'right' merges can involve O(n^2) cost when combined with a | ||
4672 | 31 | traverse, so they are disabled until we find a way to do them in | ||
4673 | 32 | O(1) time | ||
4674 | 33 | */ | ||
4675 | 34 | #define USE_RIGHT_MERGES 0 | ||
4676 | 35 | |||
4677 | 36 | /* read a freelist record and check for simple errors */ | 30 | /* read a freelist record and check for simple errors */ |
4678 | 37 | int tdb_rec_free_read(struct tdb_context *tdb, tdb_off_t off, struct tdb_record *rec) | 31 | int tdb_rec_free_read(struct tdb_context *tdb, tdb_off_t off, struct tdb_record *rec) |
4679 | 38 | { | 32 | { |
4680 | @@ -61,30 +55,6 @@ int tdb_rec_free_read(struct tdb_context *tdb, tdb_off_t off, struct tdb_record | |||
4681 | 61 | return 0; | 55 | return 0; |
4682 | 62 | } | 56 | } |
4683 | 63 | 57 | ||
4684 | 64 | |||
4685 | 65 | #if USE_RIGHT_MERGES | ||
4686 | 66 | /* Remove an element from the freelist. Must have alloc lock. */ | ||
4687 | 67 | static int remove_from_freelist(struct tdb_context *tdb, tdb_off_t off, tdb_off_t next) | ||
4688 | 68 | { | ||
4689 | 69 | tdb_off_t last_ptr, i; | ||
4690 | 70 | |||
4691 | 71 | /* read in the freelist top */ | ||
4692 | 72 | last_ptr = FREELIST_TOP; | ||
4693 | 73 | while (tdb_ofs_read(tdb, last_ptr, &i) != -1 && i != 0) { | ||
4694 | 74 | if (i == off) { | ||
4695 | 75 | /* We've found it! */ | ||
4696 | 76 | return tdb_ofs_write(tdb, last_ptr, &next); | ||
4697 | 77 | } | ||
4698 | 78 | /* Follow chain (next offset is at start of record) */ | ||
4699 | 79 | last_ptr = i; | ||
4700 | 80 | } | ||
4701 | 81 | tdb->ecode = TDB_ERR_CORRUPT; | ||
4702 | 82 | TDB_LOG((tdb, TDB_DEBUG_FATAL,"remove_from_freelist: not on list at off=%u\n", off)); | ||
4703 | 83 | return -1; | ||
4704 | 84 | } | ||
4705 | 85 | #endif | ||
4706 | 86 | |||
4707 | 87 | |||
4708 | 88 | /* update a record tailer (must hold allocation lock) */ | 58 | /* update a record tailer (must hold allocation lock) */ |
4709 | 89 | static int update_tailer(struct tdb_context *tdb, tdb_off_t offset, | 59 | static int update_tailer(struct tdb_context *tdb, tdb_off_t offset, |
4710 | 90 | const struct tdb_record *rec) | 60 | const struct tdb_record *rec) |
4711 | @@ -199,7 +169,7 @@ static int merge_with_left_record(struct tdb_context *tdb, | |||
4712 | 199 | * 0 if left was not a free record | 169 | * 0 if left was not a free record |
4713 | 200 | * 1 if left was free and successfully merged. | 170 | * 1 if left was free and successfully merged. |
4714 | 201 | * | 171 | * |
4716 | 202 | * The currend record is handed in with pointer and fully read record. | 172 | * The current record is handed in with pointer and fully read record. |
4717 | 203 | * | 173 | * |
4718 | 204 | * The left record pointer and struct can be retrieved as result | 174 | * The left record pointer and struct can be retrieved as result |
4719 | 205 | * in lp and lr; | 175 | * in lp and lr; |
4720 | @@ -318,33 +288,6 @@ int tdb_free(struct tdb_context *tdb, tdb_off_t offset, struct tdb_record *rec) | |||
4721 | 318 | goto fail; | 288 | goto fail; |
4722 | 319 | } | 289 | } |
4723 | 320 | 290 | ||
4724 | 321 | #if USE_RIGHT_MERGES | ||
4725 | 322 | /* Look right first (I'm an Australian, dammit) */ | ||
4726 | 323 | if (offset + sizeof(*rec) + rec->rec_len + sizeof(*rec) <= tdb->map_size) { | ||
4727 | 324 | tdb_off_t right = offset + sizeof(*rec) + rec->rec_len; | ||
4728 | 325 | struct tdb_record r; | ||
4729 | 326 | |||
4730 | 327 | if (tdb->methods->tdb_read(tdb, right, &r, sizeof(r), DOCONV()) == -1) { | ||
4731 | 328 | TDB_LOG((tdb, TDB_DEBUG_FATAL, "tdb_free: right read failed at %u\n", right)); | ||
4732 | 329 | goto left; | ||
4733 | 330 | } | ||
4734 | 331 | |||
4735 | 332 | /* If it's free, expand to include it. */ | ||
4736 | 333 | if (r.magic == TDB_FREE_MAGIC) { | ||
4737 | 334 | if (remove_from_freelist(tdb, right, r.next) == -1) { | ||
4738 | 335 | TDB_LOG((tdb, TDB_DEBUG_FATAL, "tdb_free: right free failed at %u\n", right)); | ||
4739 | 336 | goto left; | ||
4740 | 337 | } | ||
4741 | 338 | rec->rec_len += sizeof(r) + r.rec_len; | ||
4742 | 339 | if (update_tailer(tdb, offset, rec) == -1) { | ||
4743 | 340 | TDB_LOG((tdb, TDB_DEBUG_FATAL, "tdb_free: update_tailer failed at %u\n", offset)); | ||
4744 | 341 | goto fail; | ||
4745 | 342 | } | ||
4746 | 343 | } | ||
4747 | 344 | } | ||
4748 | 345 | left: | ||
4749 | 346 | #endif | ||
4750 | 347 | |||
4751 | 348 | ret = check_merge_with_left_record(tdb, offset, rec, NULL, NULL); | 291 | ret = check_merge_with_left_record(tdb, offset, rec, NULL, NULL); |
4752 | 349 | if (ret == -1) { | 292 | if (ret == -1) { |
4753 | 350 | goto fail; | 293 | goto fail; |
4754 | @@ -444,6 +387,8 @@ static tdb_off_t tdb_allocate_from_freelist( | |||
4755 | 444 | struct tdb_context *tdb, tdb_len_t length, struct tdb_record *rec) | 387 | struct tdb_context *tdb, tdb_len_t length, struct tdb_record *rec) |
4756 | 445 | { | 388 | { |
4757 | 446 | tdb_off_t rec_ptr, last_ptr, newrec_ptr; | 389 | tdb_off_t rec_ptr, last_ptr, newrec_ptr; |
4758 | 390 | struct tdb_chainwalk_ctx chainwalk; | ||
4759 | 391 | bool modified; | ||
4760 | 447 | struct { | 392 | struct { |
4761 | 448 | tdb_off_t rec_ptr, last_ptr; | 393 | tdb_off_t rec_ptr, last_ptr; |
4762 | 449 | tdb_len_t rec_len; | 394 | tdb_len_t rec_len; |
4763 | @@ -466,6 +411,9 @@ static tdb_off_t tdb_allocate_from_freelist( | |||
4764 | 466 | if (tdb_ofs_read(tdb, FREELIST_TOP, &rec_ptr) == -1) | 411 | if (tdb_ofs_read(tdb, FREELIST_TOP, &rec_ptr) == -1) |
4765 | 467 | return 0; | 412 | return 0; |
4766 | 468 | 413 | ||
4767 | 414 | modified = false; | ||
4768 | 415 | tdb_chainwalk_init(&chainwalk, rec_ptr); | ||
4769 | 416 | |||
4770 | 469 | bestfit.rec_ptr = 0; | 417 | bestfit.rec_ptr = 0; |
4771 | 470 | bestfit.last_ptr = 0; | 418 | bestfit.last_ptr = 0; |
4772 | 471 | bestfit.rec_len = 0; | 419 | bestfit.rec_len = 0; |
4773 | @@ -526,6 +474,8 @@ static tdb_off_t tdb_allocate_from_freelist( | |||
4774 | 526 | merge_created_candidate = true; | 474 | merge_created_candidate = true; |
4775 | 527 | } | 475 | } |
4776 | 528 | 476 | ||
4777 | 477 | modified = true; | ||
4778 | 478 | |||
4779 | 529 | continue; | 479 | continue; |
4780 | 530 | } | 480 | } |
4781 | 531 | 481 | ||
4782 | @@ -542,6 +492,14 @@ static tdb_off_t tdb_allocate_from_freelist( | |||
4783 | 542 | last_ptr = rec_ptr; | 492 | last_ptr = rec_ptr; |
4784 | 543 | rec_ptr = rec->next; | 493 | rec_ptr = rec->next; |
4785 | 544 | 494 | ||
4786 | 495 | if (!modified) { | ||
4787 | 496 | bool ok; | ||
4788 | 497 | ok = tdb_chainwalk_check(tdb, &chainwalk, rec_ptr); | ||
4789 | 498 | if (!ok) { | ||
4790 | 499 | return 0; | ||
4791 | 500 | } | ||
4792 | 501 | } | ||
4793 | 502 | |||
4794 | 545 | /* if we've found a record that is big enough, then | 503 | /* if we've found a record that is big enough, then |
4795 | 546 | stop searching if its also not too big. The | 504 | stop searching if its also not too big. The |
4796 | 547 | definition of 'too big' changes as we scan | 505 | definition of 'too big' changes as we scan |
4797 | @@ -597,6 +555,17 @@ static bool tdb_alloc_dead( | |||
4798 | 597 | return (tdb_ofs_write(tdb, last_ptr, &rec->next) == 0); | 555 | return (tdb_ofs_write(tdb, last_ptr, &rec->next) == 0); |
4799 | 598 | } | 556 | } |
4800 | 599 | 557 | ||
4801 | 558 | static void tdb_purge_dead(struct tdb_context *tdb, uint32_t hash) | ||
4802 | 559 | { | ||
4803 | 560 | int max_dead_records = tdb->max_dead_records; | ||
4804 | 561 | |||
4805 | 562 | tdb->max_dead_records = 0; | ||
4806 | 563 | |||
4807 | 564 | tdb_trim_dead(tdb, hash); | ||
4808 | 565 | |||
4809 | 566 | tdb->max_dead_records = max_dead_records; | ||
4810 | 567 | } | ||
4811 | 568 | |||
4812 | 600 | /* | 569 | /* |
4813 | 601 | * Chain "hash" is assumed to be locked | 570 | * Chain "hash" is assumed to be locked |
4814 | 602 | */ | 571 | */ |
4815 | @@ -605,7 +574,7 @@ tdb_off_t tdb_allocate(struct tdb_context *tdb, int hash, tdb_len_t length, | |||
4816 | 605 | struct tdb_record *rec) | 574 | struct tdb_record *rec) |
4817 | 606 | { | 575 | { |
4818 | 607 | tdb_off_t ret; | 576 | tdb_off_t ret; |
4820 | 608 | int i; | 577 | uint32_t i; |
4821 | 609 | 578 | ||
4822 | 610 | if (tdb->max_dead_records == 0) { | 579 | if (tdb->max_dead_records == 0) { |
4823 | 611 | /* | 580 | /* |
4824 | @@ -661,6 +630,12 @@ blocking_freelist_allocate: | |||
4825 | 661 | if (tdb_lock(tdb, -1, F_WRLCK) == -1) { | 630 | if (tdb_lock(tdb, -1, F_WRLCK) == -1) { |
4826 | 662 | return 0; | 631 | return 0; |
4827 | 663 | } | 632 | } |
4828 | 633 | /* | ||
4829 | 634 | * Dead records can happen even if max_dead_records==0, they | ||
4830 | 635 | * are older than the max_dead_records concept: They happen if | ||
4831 | 636 | * tdb_delete happens concurrently with a traverse. | ||
4832 | 637 | */ | ||
4833 | 638 | tdb_purge_dead(tdb, hash); | ||
4834 | 664 | ret = tdb_allocate_from_freelist(tdb, length, rec); | 639 | ret = tdb_allocate_from_freelist(tdb, length, rec); |
4835 | 665 | tdb_unlock(tdb, -1, F_WRLCK); | 640 | tdb_unlock(tdb, -1, F_WRLCK); |
4836 | 666 | return ret; | 641 | return ret; |
4837 | diff --git a/common/io.c b/common/io.c | |||
4838 | index 94b3163..df46017 100644 | |||
4839 | --- a/common/io.c | |||
4840 | +++ b/common/io.c | |||
4841 | @@ -96,7 +96,7 @@ static int tdb_ftruncate(struct tdb_context *tdb, off_t length) | |||
4842 | 96 | return ret; | 96 | return ret; |
4843 | 97 | } | 97 | } |
4844 | 98 | 98 | ||
4846 | 99 | #if HAVE_POSIX_FALLOCATE | 99 | #ifdef HAVE_POSIX_FALLOCATE |
4847 | 100 | static int tdb_posix_fallocate(struct tdb_context *tdb, off_t offset, | 100 | static int tdb_posix_fallocate(struct tdb_context *tdb, off_t offset, |
4848 | 101 | off_t len) | 101 | off_t len) |
4849 | 102 | { | 102 | { |
4850 | @@ -413,7 +413,7 @@ static int tdb_expand_file(struct tdb_context *tdb, tdb_off_t size, tdb_off_t ad | |||
4851 | 413 | return -1; | 413 | return -1; |
4852 | 414 | } | 414 | } |
4853 | 415 | 415 | ||
4855 | 416 | #if HAVE_POSIX_FALLOCATE | 416 | #ifdef HAVE_POSIX_FALLOCATE |
4856 | 417 | ret = tdb_posix_fallocate(tdb, size, addition); | 417 | ret = tdb_posix_fallocate(tdb, size, addition); |
4857 | 418 | if (ret == 0) { | 418 | if (ret == 0) { |
4858 | 419 | return 0; | 419 | return 0; |
4859 | diff --git a/common/lock.c b/common/lock.c | |||
4860 | index 9f30c7a..f55184d 100644 | |||
4861 | --- a/common/lock.c | |||
4862 | +++ b/common/lock.c | |||
4863 | @@ -149,8 +149,8 @@ static int fcntl_unlock(struct tdb_context *tdb, int rw, off_t off, off_t len) | |||
4864 | 149 | * This is the memory layout of the hashchain array: | 149 | * This is the memory layout of the hashchain array: |
4865 | 150 | * | 150 | * |
4866 | 151 | * FREELIST_TOP + 0 = freelist | 151 | * FREELIST_TOP + 0 = freelist |
4869 | 152 | * FREELIST_TOP + 4 = hashtbale list 0 | 152 | * FREELIST_TOP + 4 = hashtable list 0 |
4870 | 153 | * FREELIST_TOP + 8 = hashtbale list 1 | 153 | * FREELIST_TOP + 8 = hashtable list 1 |
4871 | 154 | * ... | 154 | * ... |
4872 | 155 | * | 155 | * |
4873 | 156 | * Otoh lock_offset computes: | 156 | * Otoh lock_offset computes: |
4874 | diff --git a/common/open.c b/common/open.c | |||
4875 | index 8baa7e4..dd5783e 100644 | |||
4876 | --- a/common/open.c | |||
4877 | +++ b/common/open.c | |||
4878 | @@ -230,8 +230,6 @@ static bool check_header_hash(struct tdb_context *tdb, | |||
4879 | 230 | static bool tdb_mutex_open_ok(struct tdb_context *tdb, | 230 | static bool tdb_mutex_open_ok(struct tdb_context *tdb, |
4880 | 231 | const struct tdb_header *header) | 231 | const struct tdb_header *header) |
4881 | 232 | { | 232 | { |
4882 | 233 | int locked; | ||
4883 | 234 | |||
4884 | 235 | if (tdb->flags & TDB_NOLOCK) { | 233 | if (tdb->flags & TDB_NOLOCK) { |
4885 | 236 | /* | 234 | /* |
4886 | 237 | * We don't look at locks, so it does not matter to have a | 235 | * We don't look at locks, so it does not matter to have a |
4887 | @@ -240,37 +238,6 @@ static bool tdb_mutex_open_ok(struct tdb_context *tdb, | |||
4888 | 240 | return true; | 238 | return true; |
4889 | 241 | } | 239 | } |
4890 | 242 | 240 | ||
4891 | 243 | locked = tdb_nest_lock(tdb, ACTIVE_LOCK, F_WRLCK, | ||
4892 | 244 | TDB_LOCK_NOWAIT|TDB_LOCK_PROBE); | ||
4893 | 245 | |||
4894 | 246 | if ((locked == -1) && (tdb->ecode == TDB_ERR_LOCK)) { | ||
4895 | 247 | /* | ||
4896 | 248 | * CLEAR_IF_FIRST still active. The tdb was created on this | ||
4897 | 249 | * host, so we can assume the mutex implementation is | ||
4898 | 250 | * compatible. Important for tools like tdbdump on a still | ||
4899 | 251 | * open locking.tdb. | ||
4900 | 252 | */ | ||
4901 | 253 | goto check_local_settings; | ||
4902 | 254 | } | ||
4903 | 255 | |||
4904 | 256 | /* | ||
4905 | 257 | * We got the CLEAR_IF_FIRST lock. That means the database was | ||
4906 | 258 | * potentially copied from somewhere else. The mutex implementation | ||
4907 | 259 | * might be incompatible. | ||
4908 | 260 | */ | ||
4909 | 261 | |||
4910 | 262 | if (tdb_nest_unlock(tdb, ACTIVE_LOCK, F_WRLCK, false) == -1) { | ||
4911 | 263 | /* | ||
4912 | 264 | * Should not happen | ||
4913 | 265 | */ | ||
4914 | 266 | TDB_LOG((tdb, TDB_DEBUG_ERROR, "tdb_mutex_open_ok: " | ||
4915 | 267 | "failed to release ACTIVE_LOCK on %s: %s\n", | ||
4916 | 268 | tdb->name, strerror(errno))); | ||
4917 | 269 | return false; | ||
4918 | 270 | } | ||
4919 | 271 | |||
4920 | 272 | check_local_settings: | ||
4921 | 273 | |||
4922 | 274 | if (!(tdb->flags & TDB_MUTEX_LOCKING)) { | 241 | if (!(tdb->flags & TDB_MUTEX_LOCKING)) { |
4923 | 275 | TDB_LOG((tdb, TDB_DEBUG_ERROR, "tdb_mutex_open_ok[%s]: " | 242 | TDB_LOG((tdb, TDB_DEBUG_ERROR, "tdb_mutex_open_ok[%s]: " |
4924 | 276 | "Can use mutexes only with " | 243 | "Can use mutexes only with " |
4925 | @@ -281,10 +248,10 @@ check_local_settings: | |||
4926 | 281 | 248 | ||
4927 | 282 | if (tdb_mutex_size(tdb) != header->mutex_size) { | 249 | if (tdb_mutex_size(tdb) != header->mutex_size) { |
4928 | 283 | TDB_LOG((tdb, TDB_DEBUG_ERROR, "tdb_mutex_open_ok[%s]: " | 250 | TDB_LOG((tdb, TDB_DEBUG_ERROR, "tdb_mutex_open_ok[%s]: " |
4930 | 284 | "Mutex size changed from %u to %u\n.", | 251 | "Mutex size changed from %"PRIu32" to %zu\n.", |
4931 | 285 | tdb->name, | 252 | tdb->name, |
4934 | 286 | (unsigned int)header->mutex_size, | 253 | header->mutex_size, |
4935 | 287 | (unsigned int)tdb_mutex_size(tdb))); | 254 | tdb_mutex_size(tdb))); |
4936 | 288 | return false; | 255 | return false; |
4937 | 289 | } | 256 | } |
4938 | 290 | 257 | ||
4939 | @@ -297,7 +264,9 @@ _PUBLIC_ struct tdb_context *tdb_open_ex(const char *name, int hash_size, int td | |||
4940 | 297 | tdb_hash_func hash_fn) | 264 | tdb_hash_func hash_fn) |
4941 | 298 | { | 265 | { |
4942 | 299 | int orig_errno = errno; | 266 | int orig_errno = errno; |
4944 | 300 | struct tdb_header header; | 267 | struct tdb_header header = { |
4945 | 268 | .version = 0, | ||
4946 | 269 | }; | ||
4947 | 301 | struct tdb_context *tdb; | 270 | struct tdb_context *tdb; |
4948 | 302 | struct stat st; | 271 | struct stat st; |
4949 | 303 | int rev = 0; | 272 | int rev = 0; |
4950 | @@ -309,8 +278,6 @@ _PUBLIC_ struct tdb_context *tdb_open_ex(const char *name, int hash_size, int td | |||
4951 | 309 | uint32_t magic1, magic2; | 278 | uint32_t magic1, magic2; |
4952 | 310 | int ret; | 279 | int ret; |
4953 | 311 | 280 | ||
4954 | 312 | ZERO_STRUCT(header); | ||
4955 | 313 | |||
4956 | 314 | if (!(tdb = (struct tdb_context *)calloc(1, sizeof *tdb))) { | 281 | if (!(tdb = (struct tdb_context *)calloc(1, sizeof *tdb))) { |
4957 | 315 | /* Can't log this */ | 282 | /* Can't log this */ |
4958 | 316 | errno = ENOMEM; | 283 | errno = ENOMEM; |
4959 | @@ -418,14 +385,6 @@ _PUBLIC_ struct tdb_context *tdb_open_ex(const char *name, int hash_size, int td | |||
4960 | 418 | * the runtime check for existing tdb's comes later. | 385 | * the runtime check for existing tdb's comes later. |
4961 | 419 | */ | 386 | */ |
4962 | 420 | 387 | ||
4963 | 421 | if (!(tdb->flags & TDB_CLEAR_IF_FIRST)) { | ||
4964 | 422 | TDB_LOG((tdb, TDB_DEBUG_ERROR, "tdb_open_ex: " | ||
4965 | 423 | "invalid flags for %s - TDB_MUTEX_LOCKING " | ||
4966 | 424 | "requires TDB_CLEAR_IF_FIRST\n", name)); | ||
4967 | 425 | errno = EINVAL; | ||
4968 | 426 | goto fail; | ||
4969 | 427 | } | ||
4970 | 428 | |||
4971 | 429 | if (tdb->flags & TDB_INTERNAL) { | 388 | if (tdb->flags & TDB_INTERNAL) { |
4972 | 430 | TDB_LOG((tdb, TDB_DEBUG_ERROR, "tdb_open_ex: " | 389 | TDB_LOG((tdb, TDB_DEBUG_ERROR, "tdb_open_ex: " |
4973 | 431 | "invalid flags for %s - TDB_MUTEX_LOCKING and " | 390 | "invalid flags for %s - TDB_MUTEX_LOCKING and " |
4974 | @@ -634,6 +593,30 @@ _PUBLIC_ struct tdb_context *tdb_open_ex(const char *name, int hash_size, int td | |||
4975 | 634 | * mutex locking. | 593 | * mutex locking. |
4976 | 635 | */ | 594 | */ |
4977 | 636 | tdb->hdr_ofs = header.mutex_size; | 595 | tdb->hdr_ofs = header.mutex_size; |
4978 | 596 | |||
4979 | 597 | if ((!(tdb_flags & TDB_CLEAR_IF_FIRST)) && (!tdb->read_only)) { | ||
4980 | 598 | /* | ||
4981 | 599 | * Open an existing mutexed tdb, but without | ||
4982 | 600 | * CLEAR_IF_FIRST. We need to initialize the | ||
4983 | 601 | * mutex array and keep the CLEAR_IF_FIRST | ||
4984 | 602 | * lock locked. | ||
4985 | 603 | */ | ||
4986 | 604 | ret = tdb_nest_lock(tdb, ACTIVE_LOCK, F_WRLCK, | ||
4987 | 605 | TDB_LOCK_NOWAIT|TDB_LOCK_PROBE); | ||
4988 | 606 | locked = (ret == 0); | ||
4989 | 607 | |||
4990 | 608 | if (locked) { | ||
4991 | 609 | ret = tdb_mutex_init(tdb); | ||
4992 | 610 | if (ret == -1) { | ||
4993 | 611 | TDB_LOG((tdb, | ||
4994 | 612 | TDB_DEBUG_FATAL, | ||
4995 | 613 | "tdb_open_ex: tdb_mutex_init " | ||
4996 | 614 | "failed for ""%s: %s\n", | ||
4997 | 615 | name, strerror(errno))); | ||
4998 | 616 | goto fail; | ||
4999 | 617 | } | ||
5000 | 618 | } |
From IRC:
<cjwatson> bzr-git's tdb thing is used by LP
<cjwatson> for git-to-bzr code imports
<cjwatson> this doesn't mean you should block on it, since we maintain our own copies of the modules we need, just FYI on what sorts of things it's used for