diff -Nru a2jmidid-8~dfsg0/a2j_control a2jmidid-9/a2j_control --- a2jmidid-8~dfsg0/a2j_control 2012-07-03 00:13:26.000000000 +0000 +++ a2jmidid-9/a2j_control 2019-09-23 22:34:12.000000000 +0000 @@ -1,106 +1,203 @@ -#!/usr/bin/env python - -name_base = 'org.gna.home.a2jmidid' -control_interface_name = name_base + '.control' -service_name = name_base - -import sys -import os -from traceback import print_exc +#!/usr/bin/env python3 +from argparse import ArgumentParser import dbus -def main(): - if len(sys.argv) == 1: - print "Usage: %s [command] [command] ..." % os.path.basename(sys.argv[0]) - print "Commands:" - print " exit - exit a2j bridge dbus service" - print " start - start bridging" - print " stop - stop brdiging" - print " status - get bridging status" - print " gjcn - get JACK client name" - print " ma2jp - map ALSA to JACK playback port" - print " ma2jc - map ALSA to JACK capture port" - print " mj2a - map JACK port to ALSA port" - print " ehw - enable export of hardware ports" - print " dhw - disable export of hardware ports" - sys.exit(0) - - bus = dbus.SessionBus() - - controller = bus.get_object(service_name, "/") - control_iface = dbus.Interface(controller, control_interface_name) - - # check arguments - index = 1 - while index < len(sys.argv): - arg = sys.argv[index] - index += 1 - try: - if arg == "exit": - print "--- exit" - control_iface.exit() - elif arg == "start": - print "--- start" - control_iface.start() - elif arg == "stop": - print "--- stop" - control_iface.stop() - elif arg == "status": - print "--- status" - if control_iface.is_started(): - print "Bridging enabled" - else: - print "Bridging disabled" - if control_iface.get_hw_export(): - print "Hardware exported" - else: - print "Hardware not exported" - elif arg == "gjcn": - print "--- get jack client name" - print control_iface.get_jack_client_name() - elif arg == 'ma2jp': - print "--- map ALSA to JACK playback port" - if index + 1 >= len(sys.argv): - print "map ALSA to JACK playback port command requires ALSA client ID and ALSA port ID arguments" - sys.exit() - client_id = sys.argv[index] - index += 1 - port_id = sys.argv[index] - index += 1 - - print "'%s'" % control_iface.map_alsa_to_jack_port(client_id, port_id, True) - elif arg == 'ma2jc': - print "--- map ALSA to JACK capture port" - if index + 1 >= len(sys.argv): - print "map ALSA to JACK capture port command requires ALSA client ID and ALSA port ID arguments" - sys.exit() - client_id = sys.argv[index] - index += 1 - port_id = sys.argv[index] - index += 1 - - print "'%s'" % control_iface.map_alsa_to_jack_port(client_id, port_id, False) - elif arg == 'mj2a': - print "--- map JACK to ALSA port" - if index >= len(sys.argv): - print "map JACK to ALSA port command requires JACK port name argument" - sys.exit() - jack_port = sys.argv[index] - index += 1 - - out = control_iface.map_jack_port_to_alsa(jack_port) - print "%u:%u ('%s':'%s')" % (int(out[0]), int(out[1]), str(out[2]), str(out[3])) - elif arg == 'ehw': - print "--- enable export of hardware ports" - control_iface.set_hw_export(True) - elif arg == 'dhw': - print "--- disable export of hardware ports" - control_iface.set_hw_export(False) - else: - print "Unknown command '%s'" % arg - except dbus.DBusException, e: - print "DBus exception: %s" % str(e) + +class A2JControl: + """ + Class holding all data and relevant functions to interact with the a2j dbus + controller interface. + """ + + service_name = 'org.gna.home.a2jmidid' + control_interface_name = service_name + '.control' + controller_interface = None + parser = None + args = None + + def add_arg_parser(self): + self.parser = ArgumentParser( + description='Bridge ALSA MIDI to JACK MIDI') + self.parser.add_argument( + '--start', + action='store_true', + default=False, + help='Start bridging') + self.parser.add_argument( + '--stop', + action='store_true', + default=False, + help='Stop bridging') + self.parser.add_argument( + '--exit', + action='store_true', + default=False, + help='Exit a2j bridge dbus service') + self.parser.add_argument( + '--status', + action='store_true', + default=False, + help='Display bridging status') + self.parser.add_argument( + '--gjcn', '--jack-client-name', + action='store_true', + default=False, + help='Get JACK client name') + self.parser.add_argument( + '--ma2jp', + default=None, + nargs=2, + help='Map ALSA input port to JACK output port' + + ' (requires ALSA client ID and JACK port ID arguments)') + self.parser.add_argument( + '--ma2jc', + default=None, + nargs=2, + help='Map ALSA output port to JACK input port ' + + '(requires ALSA client ID and JACK port ID arguments)') + self.parser.add_argument( + '--mj2a', + default=None, + nargs=1, + help='Map JACK port to ALSA port (requires JACK port name)') + self.parser.add_argument( + '--ehw', + action='store_true', + default=False, + help='Enable export of ALSA hardware ports') + self.parser.add_argument( + '--dhw', + action='store_true', + default=False, + help='Disable export of ALSA hardware ports') + self.parser.add_argument( + '--aup', + action='store_true', + default=False, + help='Allow unique port names') + self.parser.add_argument( + '--dup', + action='store_true', + default=False, + help='Disallow unique port names') + self.args = self.parser.parse_args() + + def initialize_dbus_controller_interface(self): + controller = dbus.SessionBus().get_object(self.service_name, "/") + self.controller_interface = dbus.Interface( + controller, + self.control_interface_name) + + def controller_start(self): + print('--- start') + self.controller_interface.start() + + def controller_stop(self): + print('--- stop') + self.controller_interface.stop() + + def controller_exit(self): + print('--- exit') + self.controller_interface.exit() + + def controller_status(self): + print('--- status') + if self.controller_interface.is_started(): + print('Bridging enabled') + else: + print('Bridging disabled') + if self.controller_interface.get_hw_export(): + print('Hardware exported') + else: + print('Hardware not exported') + if self.controller_interface.get_disable_port_uniqueness(): + print('Avoiding unique port names') + else: + print('Allowing unique port names') + + def controller_map_alsa_in_to_jack_playback( + self, + alsa_client_id, + jack_port): + print('--- map ALSA to JACK playback port') + print('{}'.format(self.controller_interface.map_alsa_to_jack_port( + alsa_client_id, + jack_port, + True))) + + def controller_map_alsa_out_to_jack_capture( + self, + alsa_client_id, + jack_port): + print('--- map ALSA to JACK capture port') + print('{}'.format(self.controller_interface.map_alsa_to_jack_port( + alsa_client_id, + jack_port, + False))) + + def controller_map_jack_port_to_alsa(self, jack_port): + print('--- map JACK to ALSA port') + out = self.controller_interface.map_jack_port_to_alsa(jack_port) + print('{}:{} ({}:{})'.format(out[0], out[1], out[2], out[3])) + + def controller_get_jack_client_name(self): + print('--- get jack client name') + print(self.controller_interface.get_jack_client_name()) + + def controller_export_hardware_ports(self, state): + if state: + print('--- enable export of hardware ports') + self.controller_interface.set_hw_export(True) + else: + print('--- disable export of hardware ports') + self.controller_interface.set_hw_export(False) + + def controller_set_port_name_uniqueness(self, state): + if state: + print('--- allow unique port names') + self.controller_interface.set_disable_port_uniqueness(False) + else: + print('--- disallow unique port names') + self.controller_interface.set_disable_port_uniqueness(True) + + def call_controller_function(self): + if self.args.start: + self.controller_start() + elif self.args.stop: + self.controller_stop() + elif self.args.exit: + self.controller_exit() + elif self.args.status: + self.controller_status() + elif self.args.gjcn: + self.controller_get_jack_client_name() + elif self.args.ma2jp: + self.controller_map_alsa_in_to_jack_playback( + self.args.ma2jp[0], + self.args.ma2jp[1]) + elif self.args.ma2jc: + self.controller_map_alsa_in_to_jack_playback( + self.args.ma2jc[0], + self.args.ma2jc[1]) + elif self.args.mj2a: + self.controller_map_jack_port_to_alsa(self.args.mj2a[0]) + elif self.args.ehw: + self.controller_export_hardware_ports(True) + elif self.args.dhw: + self.controller_export_hardware_ports(False) + elif self.args.dup: + self.controller_set_port_name_uniqueness(False) + elif self.args.aup: + self.controller_set_port_name_uniqueness(True) + else: + self.parser.print_help() + + def __init__(self): + self.initialize_dbus_controller_interface() + self.add_arg_parser() + self.call_controller_function() + if __name__ == '__main__': - main() + A2JControl() diff -Nru a2jmidid-8~dfsg0/a2jmidid.c a2jmidid-9/a2jmidid.c --- a2jmidid-8~dfsg0/a2jmidid.c 2012-07-05 01:13:48.000000000 +0000 +++ a2jmidid-9/a2jmidid.c 2019-09-23 22:34:12.000000000 +0000 @@ -52,7 +52,6 @@ #include "conf.h" #include "jack.h" #include "sigsegv.h" -#include "gitversion.h" #include "dbus_iface_control.h" #define MAIN_LOOP_SLEEP_INTERVAL 50 // in milliseconds @@ -499,7 +498,7 @@ a2j_info("----------------------------"); } - a2j_info("JACK MIDI <-> ALSA sequencer MIDI bridge, version " A2J_VERSION " (" GIT_VERSION ") built on %s", timestamp_str); + a2j_info("JACK MIDI <-> ALSA sequencer MIDI bridge, version " A2J_VERSION " built on %s", timestamp_str); a2j_info("Copyright 2006,2007 Dmitry S. Baikov"); a2j_info("Copyright 2007,2008,2009,2011,2012 Nedko Arnaudov"); diff -Nru a2jmidid-8~dfsg0/AUTHORS a2jmidid-9/AUTHORS --- a2jmidid-8~dfsg0/AUTHORS 2009-12-07 00:46:57.000000000 +0000 +++ a2jmidid-9/AUTHORS 1970-01-01 00:00:00.000000000 +0000 @@ -1,6 +0,0 @@ -Nedko Arnaudov -Dmitry S. Baikov -Paul Davis -Torben Hohn - -Juuso Alasuutari has copyright ownership on some D-Bus related code, taken from other projects (lash, jackdbus). diff -Nru a2jmidid-8~dfsg0/AUTHORS.rst a2jmidid-9/AUTHORS.rst --- a2jmidid-8~dfsg0/AUTHORS.rst 1970-01-01 00:00:00.000000000 +0000 +++ a2jmidid-9/AUTHORS.rst 2019-09-23 22:34:12.000000000 +0000 @@ -0,0 +1,10 @@ +======= +Authors +======= + +* David Runge +* Dmitry S. Baikov +* Juuso Alasuutari has copyright ownership on some D-Bus related code, taken from other projects (lash, jackdbus). +* Nedko Arnaudov +* Paul Davis +* Torben Hohn diff -Nru a2jmidid-8~dfsg0/CHANGELOG.rst a2jmidid-9/CHANGELOG.rst --- a2jmidid-8~dfsg0/CHANGELOG.rst 1970-01-01 00:00:00.000000000 +0000 +++ a2jmidid-9/CHANGELOG.rst 2019-09-23 22:34:12.000000000 +0000 @@ -0,0 +1,93 @@ +========= +Changelog +========= + +Version 9 on 2019-09-24 +----------------------- + +* architecture fixes for aarch64 and powerpc64 +* porting a2j_control to python3 +* replace waf with meson as build system +* control unique port names over D-Bus +* man page spelling fixes +* fix linking against libpthread + +Version 8 "Sophronius of Vratsa" on 2012-07-05 +---------------------------------------------- + +* -u commandline option +* D-Bus method for checking whether hw export is enabled. Kudos to Danni Coy +* Fix for resource leak. Kudos to Dan A. Muresan +* Improved error message for snd_seq_connect_to() failures +* --mandir= option in waf. Kudos to Dan Church + +Version 7 "Paisius of Hilendar" on 2011-01-16 +--------------------------------------------- + +* MIDI processing improvements +* Use the JACK limit for max port name size (sr #2526) +* Adopt to shared library policy changes in some distros (sr #2547) +* dbus support can now be disabled at configure stage +* fix build on not so common platforms (LP: #556351) +* man pages (from Debian) +* reopen log file when it is deleted or replaced + +Version 6 "Indzhe Voyvoda" on 2009-12-29 +---------------------------------------- + +* MIDI processing improvements +* Handle large number of ports +* a2j script (non-dbus-like behaviour for dbus environment) +* Allow tweaking through dbus of the hardware port export option +* Fix a use of invalid memory +* Fix port miss that can occur if port appears during bridge startup + +Version 5 "Athos" on 2009-06-13 +------------------------------- + +* Fix thight loop on D-Bus disconnect +* D-Bus signals for bridge start and stop +* Fixed alsamidi "disappearing output" bug. (backport from jack1) +* MIDI note-off normalization fix from Dave Robillard (Backport from jack1) +* Removed wrong assert from alsa_seqmidi.c reported by Ken Ellinwood (Backport + from jack1) +* Mark anything that looks like a hardware port as physical&terminal (Backport + from jack1/jack2) +* Fix potential crash when D-Bus is not used +* Support for multiple ALSA clients with same name +* Merge midibridge changeset by Paul Davis that is expected to fix midi event + timing problems that some people have reported. + +Version 4 "Devsirme" on 2008-08-03 +---------------------------------- + +* Fix typos in docs +* Disable use of C++ demangling in sigsegv. Fix for sr #2074 +* Fix a2j_control help text (thanks kfoltman!) +* Request fixed JACK client name. Fix for bug #12139 +* Handle missing svnversion executable nicely. Fixes bug #12138 + +Version 3 "Bodrum" on 2008-08-03 +-------------------------------- + +* Improved port naming, support for bidirectional ports +* Allow exporting of hardware ports (disabled by default) +* Switch from autotools to waf +* Remove support for old JACK MIDI API variants +* Remove usage of posix semaphores that could cause sleep in realtime context, + in rare circumstances +* D-Bus background service mode. The old manual mode is still working. +* Log file when running in background service mode. +* Improved documentation +* Import, with slight modifications, static bridges created by Sean Bolton and + Lars Luthman. + +Version 2 on 2007-10-27 +----------------------- + +* Improved build system (autotools) and support for older JACK variants + +Version 1 on 2007-08-26 +----------------------- + +* Initial release diff -Nru a2jmidid-8~dfsg0/conf.h a2jmidid-9/conf.h --- a2jmidid-8~dfsg0/conf.h 2009-05-07 18:39:13.000000000 +0000 +++ a2jmidid-9/conf.h 2019-09-23 22:34:12.000000000 +0000 @@ -22,6 +22,7 @@ #define CONF_H__AE361BE4_EE60_4F5C_B2D4_13D71A525018__INCLUDED extern bool g_a2j_export_hw_ports; +extern bool g_disable_port_uniqueness; extern char * g_a2j_jack_server_name; void diff -Nru a2jmidid-8~dfsg0/config.h.in a2jmidid-9/config.h.in --- a2jmidid-8~dfsg0/config.h.in 1970-01-01 00:00:00.000000000 +0000 +++ a2jmidid-9/config.h.in 2019-09-23 22:34:12.000000000 +0000 @@ -0,0 +1,10 @@ +#ifndef _CONFIG_H +#define _CONFIG_H + +#define HAVE_ALSA 1 +#define HAVE_JACK 1 +#define HAVE_DBUS_1 @dbus@ +#define HAVE_GETOPT_H 1 +#define A2J_VERSION "@version@" + +#endif diff -Nru a2jmidid-8~dfsg0/dbus_iface_control.c a2jmidid-9/dbus_iface_control.c --- a2jmidid-8~dfsg0/dbus_iface_control.c 2012-07-03 00:13:26.000000000 +0000 +++ a2jmidid-9/dbus_iface_control.c 2019-09-23 22:34:12.000000000 +0000 @@ -99,6 +99,49 @@ &hw_export); } +static void a2j_dbus_get_disable_port_uniqueness(struct a2j_dbus_method_call * call_ptr) +{ + dbus_bool_t disable_port_uniqueness; + + disable_port_uniqueness = g_disable_port_uniqueness; + + a2j_dbus_construct_method_return_single( + call_ptr, + DBUS_TYPE_BOOLEAN, + &disable_port_uniqueness); +} + +static void a2j_dbus_set_disable_port_uniqueness(struct a2j_dbus_method_call * call_ptr) +{ + DBusError error; + dbus_bool_t disable_port_uniqueness; + + if (a2j_is_started()) + { + a2j_dbus_error(call_ptr, A2J_DBUS_ERROR_BRIDGE_RUNNING, "Bridge is started"); + return; + } + + dbus_error_init(&error); + + if (!dbus_message_get_args( + call_ptr->message, + &error, + DBUS_TYPE_BOOLEAN, &disable_port_uniqueness, + DBUS_TYPE_INVALID)) + { + a2j_dbus_error(call_ptr, A2J_DBUS_ERROR_INVALID_ARGS, "Invalid arguments to method \"%s\"", call_ptr->method_name); + dbus_error_free(&error); + return; + } + + g_disable_port_uniqueness = disable_port_uniqueness; + + a2j_info("Unique port names %s.", g_disable_port_uniqueness ? "disabled": "enabled"); + + a2j_dbus_construct_method_return_void(call_ptr); +} + static void a2j_dbus_start( @@ -377,6 +420,14 @@ A2J_DBUS_METHOD_ARGUMENT("hw_export", DBUS_TYPE_BOOLEAN_AS_STRING, A2J_DBUS_DIRECTION_OUT) A2J_DBUS_METHOD_ARGUMENTS_END +A2J_DBUS_METHOD_ARGUMENTS_BEGIN(set_disable_port_uniqueness) + A2J_DBUS_METHOD_ARGUMENT("disable_port_uniqueness", DBUS_TYPE_BOOLEAN_AS_STRING, A2J_DBUS_DIRECTION_IN) +A2J_DBUS_METHOD_ARGUMENTS_END + +A2J_DBUS_METHOD_ARGUMENTS_BEGIN(get_disable_port_uniqueness) + A2J_DBUS_METHOD_ARGUMENT("disable_port_uniqueness", DBUS_TYPE_BOOLEAN_AS_STRING, A2J_DBUS_DIRECTION_OUT) +A2J_DBUS_METHOD_ARGUMENTS_END + A2J_DBUS_METHODS_BEGIN A2J_DBUS_METHOD_DESCRIBE(exit, a2j_dbus_exit) A2J_DBUS_METHOD_DESCRIBE(start, a2j_dbus_start) @@ -387,6 +438,8 @@ A2J_DBUS_METHOD_DESCRIBE(map_jack_port_to_alsa, a2j_dbus_map_jack_port_to_alsa) A2J_DBUS_METHOD_DESCRIBE(set_hw_export, a2j_dbus_set_hw_export) A2J_DBUS_METHOD_DESCRIBE(get_hw_export, a2j_dbus_get_hw_export) + A2J_DBUS_METHOD_DESCRIBE(set_disable_port_uniqueness, a2j_dbus_set_disable_port_uniqueness) + A2J_DBUS_METHOD_DESCRIBE(get_disable_port_uniqueness, a2j_dbus_get_disable_port_uniqueness) A2J_DBUS_METHODS_END A2J_DBUS_SIGNAL_ARGUMENTS_BEGIN(bridge_started) diff -Nru a2jmidid-8~dfsg0/debian/a2jmidid.install a2jmidid-9/debian/a2jmidid.install --- a2jmidid-8~dfsg0/debian/a2jmidid.install 2016-12-28 08:48:25.000000000 +0000 +++ a2jmidid-9/debian/a2jmidid.install 1970-01-01 00:00:00.000000000 +0000 @@ -1,2 +0,0 @@ -debian/tmp/usr/bin -debian/tmp/usr/share/dbus-1 diff -Nru a2jmidid-8~dfsg0/debian/a2jmidid.manpages a2jmidid-9/debian/a2jmidid.manpages --- a2jmidid-8~dfsg0/debian/a2jmidid.manpages 2016-12-28 08:48:25.000000000 +0000 +++ a2jmidid-9/debian/a2jmidid.manpages 1970-01-01 00:00:00.000000000 +0000 @@ -1 +0,0 @@ -man/*.1 diff -Nru a2jmidid-8~dfsg0/debian/changelog a2jmidid-9/debian/changelog --- a2jmidid-8~dfsg0/debian/changelog 2016-12-28 08:50:56.000000000 +0000 +++ a2jmidid-9/debian/changelog 2019-10-13 16:36:50.000000000 +0000 @@ -1,3 +1,36 @@ +a2jmidid (9-2) unstable; urgency=medium + + * Team upload + * Brown paper bag release + - Include proper changelog + + -- Sebastian Ramacher Sun, 13 Oct 2019 18:36:50 +0200 + +a2jmidid (9-1) unstable; urgency=medium + + * Team upload + + [ Ondřej Nový ] + * d/control: Set Vcs-* to salsa.debian.org + + [ Felipe Sateler ] + * Change maintainer address to debian-multimedia@lists.debian.org + + [ Ondřej Nový ] + * Use debhelper-compat instead of debian/compat + + [ Sebastian Ramacher ] + * New upstream release (Closes: #941167) + - Port to Python 3 (Closes: #936104) + * debian/control: + - Bump debhelper compat to 12 + - Bump Standards-Version + * debian/: + - Point to new upstream + - Use meson build system and Python 3 + + -- Sebastian Ramacher Sun, 13 Oct 2019 18:07:19 +0200 + a2jmidid (8~dfsg0-3) unstable; urgency=medium * Team upload. diff -Nru a2jmidid-8~dfsg0/debian/compat a2jmidid-9/debian/compat --- a2jmidid-8~dfsg0/debian/compat 2016-12-28 08:30:58.000000000 +0000 +++ a2jmidid-9/debian/compat 1970-01-01 00:00:00.000000000 +0000 @@ -1 +0,0 @@ -10 diff -Nru a2jmidid-8~dfsg0/debian/control a2jmidid-9/debian/control --- a2jmidid-8~dfsg0/debian/control 2016-12-28 08:49:44.000000000 +0000 +++ a2jmidid-9/debian/control 2019-10-13 16:09:59.000000000 +0000 @@ -1,27 +1,28 @@ Source: a2jmidid Section: sound Priority: optional -Maintainer: Debian Multimedia Maintainers +Maintainer: Debian Multimedia Maintainers Uploaders: Adrian Knoth , Build-Depends: - debhelper (>= 10), + debhelper-compat (= 12), dh-python, libasound2-dev, libdbus-1-dev, libjack-dev (>= 0.107.0) | libjack-jackd2-dev, - python-all (>= 2.6.6-3), -Standards-Version: 3.9.8 -Vcs-Git: https://anonscm.debian.org/git/pkg-multimedia/a2jmidid.git -Vcs-Browser: https://anonscm.debian.org/git/pkg-multimedia/a2jmidid.git -Homepage: http://home.gna.org/a2jmidid/ + python3, + meson +Standards-Version: 4.4.1 +Vcs-Git: https://salsa.debian.org/multimedia-team/a2jmidid.git +Vcs-Browser: https://salsa.debian.org/multimedia-team/a2jmidid +Homepage: https://github.com/linuxaudio/a2jmidid/ Package: a2jmidid Architecture: any Depends: - python-dbus, + python3-dbus, ${misc:Depends}, - ${python:Depends}, + ${python3:Depends}, ${shlibs:Depends}, Description: Daemon for exposing legacy ALSA MIDI in JACK MIDI systems Main goal of this project is to ease usage of legacy, not JACK-ified diff -Nru a2jmidid-8~dfsg0/debian/patches/arm64-sigsegv.patch a2jmidid-9/debian/patches/arm64-sigsegv.patch --- a2jmidid-8~dfsg0/debian/patches/arm64-sigsegv.patch 2016-12-28 08:42:25.000000000 +0000 +++ a2jmidid-9/debian/patches/arm64-sigsegv.patch 1970-01-01 00:00:00.000000000 +0000 @@ -1,17 +0,0 @@ -Description: disable NGREG check on arm64 -Author: Logan Rosen -Last-Update: 2016-12-28 -Bug-Debian: https://bugs.debian.org/773088 -Forwarded: no - ---- a/sigsegv.c -+++ b/sigsegv.c -@@ -91,7 +91,7 @@ - a2j_error("info.si_errno = %d", info->si_errno); - a2j_error("info.si_code = %d (%s)", info->si_code, si_codes[info->si_code]); - a2j_error("info.si_addr = %p", info->si_addr); --#if !defined(__alpha__) && !defined(__ia64__) && !defined(__FreeBSD_kernel__) && !defined(__arm__) && !defined(__hppa__) && !defined(__sh__) -+#if !defined(__alpha__) && !defined(__ia64__) && !defined(__FreeBSD_kernel__) && !defined(__arm__) && !defined(__hppa__) && !defined(__sh__) && !defined(__aarch64__) - for(i = 0; i < NGREG; i++) - a2j_error("reg[%02d] = 0x" REGFORMAT, i, - #if defined(__powerpc__) && !defined(__powerpc64__) diff -Nru a2jmidid-8~dfsg0/debian/patches/ppc64-sigsegv.patch a2jmidid-9/debian/patches/ppc64-sigsegv.patch --- a2jmidid-8~dfsg0/debian/patches/ppc64-sigsegv.patch 2016-12-28 08:43:41.000000000 +0000 +++ a2jmidid-9/debian/patches/ppc64-sigsegv.patch 1970-01-01 00:00:00.000000000 +0000 @@ -1,21 +0,0 @@ -Description: Following the same variable and struct usage within sys header - file ucontext.h to cover powerpc 64 bit archs. -Author: Fernando Seiti Furusato -Last-Update: 2016-12-27 -Bug-Debian: https://bugs.debian.org/769141 - ---- a2jmidid-8~dfsg0.orig/sigsegv.c -+++ a2jmidid-8~dfsg0/sigsegv.c -@@ -94,8 +94,11 @@ static void signal_segv(int signum, sigi - #if !defined(__alpha__) && !defined(__ia64__) && !defined(__FreeBSD_kernel__) && !defined(__arm__) && !defined(__hppa__) && !defined(__sh__) - for(i = 0; i < NGREG; i++) - a2j_error("reg[%02d] = 0x" REGFORMAT, i, --#if defined(__powerpc__) -+#if defined(__powerpc__) && !defined(__powerpc64__) - ucontext->uc_mcontext.uc_regs[i] -+/* just following the variable type and member logic as the ones used here to cover ppc64*/ -+#elif defined(__powerpc64__) -+ ucontext->uc_mcontext.gp_regs[i] - #elif defined(__sparc__) && defined(__arch64__) - ucontext->uc_mcontext.mc_gregs[i] - #else diff -Nru a2jmidid-8~dfsg0/debian/patches/series a2jmidid-9/debian/patches/series --- a2jmidid-8~dfsg0/debian/patches/series 2016-12-28 08:43:26.000000000 +0000 +++ a2jmidid-9/debian/patches/series 1970-01-01 00:00:00.000000000 +0000 @@ -1,3 +0,0 @@ -shebangs.diff -ppc64-sigsegv.patch -arm64-sigsegv.patch diff -Nru a2jmidid-8~dfsg0/debian/patches/shebangs.diff a2jmidid-9/debian/patches/shebangs.diff --- a2jmidid-8~dfsg0/debian/patches/shebangs.diff 2016-12-28 08:03:29.000000000 +0000 +++ a2jmidid-9/debian/patches/shebangs.diff 1970-01-01 00:00:00.000000000 +0000 @@ -1,16 +0,0 @@ -Description: Fix shebangs of python scripts. -Author: Alessio Treglia -Origin: Debian -Forwarded: no ---- - a2j_control | 2 +- - 1 file changed, 1 insertion(+), 1 deletion(-) - ---- a2jmidid.orig/a2j_control -+++ a2jmidid/a2j_control -@@ -1,4 +1,4 @@ --#!/usr/bin/env python -+#!/usr/bin/python - - name_base = 'org.gna.home.a2jmidid' - control_interface_name = name_base + '.control' diff -Nru a2jmidid-8~dfsg0/debian/rules a2jmidid-9/debian/rules --- a2jmidid-8~dfsg0/debian/rules 2016-12-28 08:29:45.000000000 +0000 +++ a2jmidid-9/debian/rules 2019-10-13 16:06:04.000000000 +0000 @@ -1,38 +1,7 @@ #!/usr/bin/make -f -upstream_version ?= $(shell dpkg-parsechangelog -S Version | sed -rne 's/^([0-9.]+)(\+dfsg\d+)?.*$$/\1/p') -dfsg_version = $(upstream_version)~dfsg0 -pkg = $(shell dpkg-parsechangelog -S Source) - %: - dh $@ --with python2 - -override_dh_auto_configure: - ./waf configure --prefix=/usr - -override_dh_auto_build: - ./waf - -override_dh_auto_clean: - ./waf clean || true - find wafadmin -name "*.pyc" -delete - rm -rf build - dh_auto_clean - -override_dh_auto_install: - ./waf --destdir=$(CURDIR)/debian/tmp install + dh $@ --with python3 override_dh_installchangelogs: - dh_installchangelogs NEWS - -# get-orig-source to drop waf -get-orig-source: - uscan --noconf --force-download --rename --repack --download-current-version --destdir=. - tar -xf $(pkg)_$(upstream_version).orig.tar.bz2 - mv $(pkg)-$(upstream_version) $(pkg)-$(dfsg_version) - cd $(pkg)-$(dfsg_version) ; python waf --help > /dev/null - mv $(pkg)-$(dfsg_version)/.waf-*/* $(pkg)-$(dfsg_version) - sed -i '/^#==>$$/,$$d' $(pkg)-$(dfsg_version)/waf - rmdir $(pkg)-$(dfsg_version)/.waf-* - BZIP="-9f" tar -czf $(pkg)_$(dfsg_version).orig.tar.bz2 $(pkg)-$(dfsg_version) - rm -rf $(pkg)-$(dfsg_version) + dh_installchangelogs CHANGELOG.rst diff -Nru a2jmidid-8~dfsg0/debian/watch a2jmidid-9/debian/watch --- a2jmidid-8~dfsg0/debian/watch 2016-12-28 08:32:27.000000000 +0000 +++ a2jmidid-9/debian/watch 2019-10-13 15:58:33.000000000 +0000 @@ -1,3 +1,4 @@ version=4 -opts=dversionmangle=s/~dfsg\d+$// \ -http://download.gna.org/a2jmidid/a2jmidid-(.*)@ARCHIVE_EXT@ +opts="filenamemangle=s%(?:.*?)?v?(\d[\d.]*)\.tar\.gz%a2jmidid-$1.tar.gz%" +https://github.com/linuxaudio/a2jmidid/tags \ + (?:.*?/)?v?(\d[\d.]*)\.tar\.gz diff -Nru a2jmidid-8~dfsg0/.gitignore a2jmidid-9/.gitignore --- a2jmidid-8~dfsg0/.gitignore 1970-01-01 00:00:00.000000000 +0000 +++ a2jmidid-9/.gitignore 2019-09-23 22:34:12.000000000 +0000 @@ -0,0 +1,2 @@ +/TAGS +/build diff -Nru a2jmidid-8~dfsg0/gitversion.h a2jmidid-9/gitversion.h --- a2jmidid-8~dfsg0/gitversion.h 2012-07-05 01:18:29.000000000 +0000 +++ a2jmidid-9/gitversion.h 1970-01-01 00:00:00.000000000 +0000 @@ -1 +0,0 @@ -#define GIT_VERSION "7383d268c4bfe85df9f10df6351677659211d1ca" diff -Nru a2jmidid-8~dfsg0/gpl2.txt a2jmidid-9/gpl2.txt --- a2jmidid-8~dfsg0/gpl2.txt 2008-09-16 08:05:10.000000000 +0000 +++ a2jmidid-9/gpl2.txt 1970-01-01 00:00:00.000000000 +0000 @@ -1,339 +0,0 @@ - GNU GENERAL PUBLIC LICENSE - Version 2, June 1991 - - Copyright (C) 1989, 1991 Free Software Foundation, Inc., - 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA - Everyone is permitted to copy and distribute verbatim copies - of this license document, but changing it is not allowed. - - Preamble - - The licenses for most software are designed to take away your -freedom to share and change it. By contrast, the GNU General Public -License is intended to guarantee your freedom to share and change free -software--to make sure the software is free for all its users. This -General Public License applies to most of the Free Software -Foundation's software and to any other program whose authors commit to -using it. (Some other Free Software Foundation software is covered by -the GNU Lesser General Public License instead.) You can apply it to -your programs, too. - - When we speak of free software, we are referring to freedom, not -price. Our General Public Licenses are designed to make sure that you -have the freedom to distribute copies of free software (and charge for -this service if you wish), that you receive source code or can get it -if you want it, that you can change the software or use pieces of it -in new free programs; and that you know you can do these things. - - To protect your rights, we need to make restrictions that forbid -anyone to deny you these rights or to ask you to surrender the rights. -These restrictions translate to certain responsibilities for you if you -distribute copies of the software, or if you modify it. - - For example, if you distribute copies of such a program, whether -gratis or for a fee, you must give the recipients all the rights that -you have. You must make sure that they, too, receive or can get the -source code. And you must show them these terms so they know their -rights. - - We protect your rights with two steps: (1) copyright the software, and -(2) offer you this license which gives you legal permission to copy, -distribute and/or modify the software. - - Also, for each author's protection and ours, we want to make certain -that everyone understands that there is no warranty for this free -software. If the software is modified by someone else and passed on, we -want its recipients to know that what they have is not the original, so -that any problems introduced by others will not reflect on the original -authors' reputations. - - Finally, any free program is threatened constantly by software -patents. We wish to avoid the danger that redistributors of a free -program will individually obtain patent licenses, in effect making the -program proprietary. To prevent this, we have made it clear that any -patent must be licensed for everyone's free use or not licensed at all. - - The precise terms and conditions for copying, distribution and -modification follow. - - GNU GENERAL PUBLIC LICENSE - TERMS AND CONDITIONS FOR COPYING, DISTRIBUTION AND MODIFICATION - - 0. This License applies to any program or other work which contains -a notice placed by the copyright holder saying it may be distributed -under the terms of this General Public License. The "Program", below, -refers to any such program or work, and a "work based on the Program" -means either the Program or any derivative work under copyright law: -that is to say, a work containing the Program or a portion of it, -either verbatim or with modifications and/or translated into another -language. (Hereinafter, translation is included without limitation in -the term "modification".) Each licensee is addressed as "you". - -Activities other than copying, distribution and modification are not -covered by this License; they are outside its scope. The act of -running the Program is not restricted, and the output from the Program -is covered only if its contents constitute a work based on the -Program (independent of having been made by running the Program). -Whether that is true depends on what the Program does. - - 1. You may copy and distribute verbatim copies of the Program's -source code as you receive it, in any medium, provided that you -conspicuously and appropriately publish on each copy an appropriate -copyright notice and disclaimer of warranty; keep intact all the -notices that refer to this License and to the absence of any warranty; -and give any other recipients of the Program a copy of this License -along with the Program. - -You may charge a fee for the physical act of transferring a copy, and -you may at your option offer warranty protection in exchange for a fee. - - 2. You may modify your copy or copies of the Program or any portion -of it, thus forming a work based on the Program, and copy and -distribute such modifications or work under the terms of Section 1 -above, provided that you also meet all of these conditions: - - a) You must cause the modified files to carry prominent notices - stating that you changed the files and the date of any change. - - b) You must cause any work that you distribute or publish, that in - whole or in part contains or is derived from the Program or any - part thereof, to be licensed as a whole at no charge to all third - parties under the terms of this License. - - c) If the modified program normally reads commands interactively - when run, you must cause it, when started running for such - interactive use in the most ordinary way, to print or display an - announcement including an appropriate copyright notice and a - notice that there is no warranty (or else, saying that you provide - a warranty) and that users may redistribute the program under - these conditions, and telling the user how to view a copy of this - License. (Exception: if the Program itself is interactive but - does not normally print such an announcement, your work based on - the Program is not required to print an announcement.) - -These requirements apply to the modified work as a whole. If -identifiable sections of that work are not derived from the Program, -and can be reasonably considered independent and separate works in -themselves, then this License, and its terms, do not apply to those -sections when you distribute them as separate works. But when you -distribute the same sections as part of a whole which is a work based -on the Program, the distribution of the whole must be on the terms of -this License, whose permissions for other licensees extend to the -entire whole, and thus to each and every part regardless of who wrote it. - -Thus, it is not the intent of this section to claim rights or contest -your rights to work written entirely by you; rather, the intent is to -exercise the right to control the distribution of derivative or -collective works based on the Program. - -In addition, mere aggregation of another work not based on the Program -with the Program (or with a work based on the Program) on a volume of -a storage or distribution medium does not bring the other work under -the scope of this License. - - 3. You may copy and distribute the Program (or a work based on it, -under Section 2) in object code or executable form under the terms of -Sections 1 and 2 above provided that you also do one of the following: - - a) Accompany it with the complete corresponding machine-readable - source code, which must be distributed under the terms of Sections - 1 and 2 above on a medium customarily used for software interchange; or, - - b) Accompany it with a written offer, valid for at least three - years, to give any third party, for a charge no more than your - cost of physically performing source distribution, a complete - machine-readable copy of the corresponding source code, to be - distributed under the terms of Sections 1 and 2 above on a medium - customarily used for software interchange; or, - - c) Accompany it with the information you received as to the offer - to distribute corresponding source code. (This alternative is - allowed only for noncommercial distribution and only if you - received the program in object code or executable form with such - an offer, in accord with Subsection b above.) - -The source code for a work means the preferred form of the work for -making modifications to it. For an executable work, complete source -code means all the source code for all modules it contains, plus any -associated interface definition files, plus the scripts used to -control compilation and installation of the executable. However, as a -special exception, the source code distributed need not include -anything that is normally distributed (in either source or binary -form) with the major components (compiler, kernel, and so on) of the -operating system on which the executable runs, unless that component -itself accompanies the executable. - -If distribution of executable or object code is made by offering -access to copy from a designated place, then offering equivalent -access to copy the source code from the same place counts as -distribution of the source code, even though third parties are not -compelled to copy the source along with the object code. - - 4. You may not copy, modify, sublicense, or distribute the Program -except as expressly provided under this License. Any attempt -otherwise to copy, modify, sublicense or distribute the Program is -void, and will automatically terminate your rights under this License. -However, parties who have received copies, or rights, from you under -this License will not have their licenses terminated so long as such -parties remain in full compliance. - - 5. You are not required to accept this License, since you have not -signed it. However, nothing else grants you permission to modify or -distribute the Program or its derivative works. These actions are -prohibited by law if you do not accept this License. Therefore, by -modifying or distributing the Program (or any work based on the -Program), you indicate your acceptance of this License to do so, and -all its terms and conditions for copying, distributing or modifying -the Program or works based on it. - - 6. Each time you redistribute the Program (or any work based on the -Program), the recipient automatically receives a license from the -original licensor to copy, distribute or modify the Program subject to -these terms and conditions. You may not impose any further -restrictions on the recipients' exercise of the rights granted herein. -You are not responsible for enforcing compliance by third parties to -this License. - - 7. If, as a consequence of a court judgment or allegation of patent -infringement or for any other reason (not limited to patent issues), -conditions are imposed on you (whether by court order, agreement or -otherwise) that contradict the conditions of this License, they do not -excuse you from the conditions of this License. If you cannot -distribute so as to satisfy simultaneously your obligations under this -License and any other pertinent obligations, then as a consequence you -may not distribute the Program at all. For example, if a patent -license would not permit royalty-free redistribution of the Program by -all those who receive copies directly or indirectly through you, then -the only way you could satisfy both it and this License would be to -refrain entirely from distribution of the Program. - -If any portion of this section is held invalid or unenforceable under -any particular circumstance, the balance of the section is intended to -apply and the section as a whole is intended to apply in other -circumstances. - -It is not the purpose of this section to induce you to infringe any -patents or other property right claims or to contest validity of any -such claims; this section has the sole purpose of protecting the -integrity of the free software distribution system, which is -implemented by public license practices. Many people have made -generous contributions to the wide range of software distributed -through that system in reliance on consistent application of that -system; it is up to the author/donor to decide if he or she is willing -to distribute software through any other system and a licensee cannot -impose that choice. - -This section is intended to make thoroughly clear what is believed to -be a consequence of the rest of this License. - - 8. If the distribution and/or use of the Program is restricted in -certain countries either by patents or by copyrighted interfaces, the -original copyright holder who places the Program under this License -may add an explicit geographical distribution limitation excluding -those countries, so that distribution is permitted only in or among -countries not thus excluded. In such case, this License incorporates -the limitation as if written in the body of this License. - - 9. The Free Software Foundation may publish revised and/or new versions -of the General Public License from time to time. Such new versions will -be similar in spirit to the present version, but may differ in detail to -address new problems or concerns. - -Each version is given a distinguishing version number. If the Program -specifies a version number of this License which applies to it and "any -later version", you have the option of following the terms and conditions -either of that version or of any later version published by the Free -Software Foundation. If the Program does not specify a version number of -this License, you may choose any version ever published by the Free Software -Foundation. - - 10. If you wish to incorporate parts of the Program into other free -programs whose distribution conditions are different, write to the author -to ask for permission. For software which is copyrighted by the Free -Software Foundation, write to the Free Software Foundation; we sometimes -make exceptions for this. Our decision will be guided by the two goals -of preserving the free status of all derivatives of our free software and -of promoting the sharing and reuse of software generally. - - NO WARRANTY - - 11. BECAUSE THE PROGRAM IS LICENSED FREE OF CHARGE, THERE IS NO WARRANTY -FOR THE PROGRAM, TO THE EXTENT PERMITTED BY APPLICABLE LAW. EXCEPT WHEN -OTHERWISE STATED IN WRITING THE COPYRIGHT HOLDERS AND/OR OTHER PARTIES -PROVIDE THE PROGRAM "AS IS" WITHOUT WARRANTY OF ANY KIND, EITHER EXPRESSED -OR IMPLIED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF -MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE. THE ENTIRE RISK AS -TO THE QUALITY AND PERFORMANCE OF THE PROGRAM IS WITH YOU. SHOULD THE -PROGRAM PROVE DEFECTIVE, YOU ASSUME THE COST OF ALL NECESSARY SERVICING, -REPAIR OR CORRECTION. - - 12. IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN WRITING -WILL ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MAY MODIFY AND/OR -REDISTRIBUTE THE PROGRAM AS PERMITTED ABOVE, BE LIABLE TO YOU FOR DAMAGES, -INCLUDING ANY GENERAL, SPECIAL, INCIDENTAL OR CONSEQUENTIAL DAMAGES ARISING -OUT OF THE USE OR INABILITY TO USE THE PROGRAM (INCLUDING BUT NOT LIMITED -TO LOSS OF DATA OR DATA BEING RENDERED INACCURATE OR LOSSES SUSTAINED BY -YOU OR THIRD PARTIES OR A FAILURE OF THE PROGRAM TO OPERATE WITH ANY OTHER -PROGRAMS), EVEN IF SUCH HOLDER OR OTHER PARTY HAS BEEN ADVISED OF THE -POSSIBILITY OF SUCH DAMAGES. - - END OF TERMS AND CONDITIONS - - How to Apply These Terms to Your New Programs - - If you develop a new program, and you want it to be of the greatest -possible use to the public, the best way to achieve this is to make it -free software which everyone can redistribute and change under these terms. - - To do so, attach the following notices to the program. It is safest -to attach them to the start of each source file to most effectively -convey the exclusion of warranty; and each file should have at least -the "copyright" line and a pointer to where the full notice is found. - - - Copyright (C) - - This program is free software; you can redistribute it and/or modify - it under the terms of the GNU General Public License as published by - the Free Software Foundation; either version 2 of the License, or - (at your option) any later version. - - This program is distributed in the hope that it will be useful, - but WITHOUT ANY WARRANTY; without even the implied warranty of - MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the - GNU General Public License for more details. - - You should have received a copy of the GNU General Public License along - with this program; if not, write to the Free Software Foundation, Inc., - 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. - -Also add information on how to contact you by electronic and paper mail. - -If the program is interactive, make it output a short notice like this -when it starts in an interactive mode: - - Gnomovision version 69, Copyright (C) year name of author - Gnomovision comes with ABSOLUTELY NO WARRANTY; for details type `show w'. - This is free software, and you are welcome to redistribute it - under certain conditions; type `show c' for details. - -The hypothetical commands `show w' and `show c' should show the appropriate -parts of the General Public License. Of course, the commands you use may -be called something other than `show w' and `show c'; they could even be -mouse-clicks or menu items--whatever suits your program. - -You should also get your employer (if you work as a programmer) or your -school, if any, to sign a "copyright disclaimer" for the program, if -necessary. Here is a sample; alter the names: - - Yoyodyne, Inc., hereby disclaims all copyright interest in the program - `Gnomovision' (which makes passes at compilers) written by James Hacker. - - , 1 April 1989 - Ty Coon, President of Vice - -This General Public License does not permit incorporating your program into -proprietary programs. If your program is a subroutine library, you may -consider it more useful to permit linking proprietary applications with the -library. If this is what you want to do, use the GNU Lesser General -Public License instead of this License. diff -Nru a2jmidid-8~dfsg0/INSTALL a2jmidid-9/INSTALL --- a2jmidid-8~dfsg0/INSTALL 2010-06-04 18:05:09.000000000 +0000 +++ a2jmidid-9/INSTALL 1970-01-01 00:00:00.000000000 +0000 @@ -1,34 +0,0 @@ -= Configure it = - -./waf configure - -This will configure for installation to /usr/local prefix. -If you want to use other prefix, use --prefix option: - -./waf configure --prefix=/usr - -For full list of options, run: - -./waf configure --help - -There are two custom options: - - * "--disable-dbus will" force disable dbus support, even if dependencies are present - * "--enable-pkg-config-dbus-service-dir" will force D-Bus service install - dir to be one returned by pkg-config. This is usually needed when - prefix is /usr/local because dbus daemon scans /usr for service - files but does not in /usr/local - -= Build it = - -./waf - -You can use -j option to enable building on more than one CPU: - -./waf -j 4 - -= Install it = - -./waf install - -You probably want to run later as superuser to install system-wide diff -Nru a2jmidid-8~dfsg0/INSTALLATION.rst a2jmidid-9/INSTALLATION.rst --- a2jmidid-8~dfsg0/INSTALLATION.rst 1970-01-01 00:00:00.000000000 +0000 +++ a2jmidid-9/INSTALLATION.rst 2019-09-23 22:34:12.000000000 +0000 @@ -0,0 +1,48 @@ +============ +Installation +============ + +*a2jmidid* uses the |meson| build system. + + +Configure and build +------------------- + +To configure the project, |meson|'s |meson_universal_options| (e.g. *prefix*) +can be used to prepare a build directory:: + + meson --prefix=/usr build + +One additional - project specific - option enables for building without |dbus| +support:: + + meson --prefix=/usr -Ddisable-dbus=true build + +To build the application |ninja| is required:: + + ninja -C build + +Install +------- + +|meson| is able to install the project components to the system directories +(when run as root), while honoring the *DESTDIR* environment variable:: + + DESTDIR="/some/other/location" meson install -C build + +.. |meson| raw:: html + + Meson + +.. |meson_universal_options| raw:: html + + universal options + +.. |dbus| raw:: html + + D-Bus + +.. |ninja| raw:: html + + Ninja + diff -Nru a2jmidid-8~dfsg0/LICENSE a2jmidid-9/LICENSE --- a2jmidid-8~dfsg0/LICENSE 1970-01-01 00:00:00.000000000 +0000 +++ a2jmidid-9/LICENSE 2019-09-23 22:34:12.000000000 +0000 @@ -0,0 +1,339 @@ + GNU GENERAL PUBLIC LICENSE + Version 2, June 1991 + + Copyright (C) 1989, 1991 Free Software Foundation, Inc., + 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA + Everyone is permitted to copy and distribute verbatim copies + of this license document, but changing it is not allowed. + + Preamble + + The licenses for most software are designed to take away your +freedom to share and change it. By contrast, the GNU General Public +License is intended to guarantee your freedom to share and change free +software--to make sure the software is free for all its users. This +General Public License applies to most of the Free Software +Foundation's software and to any other program whose authors commit to +using it. (Some other Free Software Foundation software is covered by +the GNU Lesser General Public License instead.) You can apply it to +your programs, too. + + When we speak of free software, we are referring to freedom, not +price. Our General Public Licenses are designed to make sure that you +have the freedom to distribute copies of free software (and charge for +this service if you wish), that you receive source code or can get it +if you want it, that you can change the software or use pieces of it +in new free programs; and that you know you can do these things. + + To protect your rights, we need to make restrictions that forbid +anyone to deny you these rights or to ask you to surrender the rights. +These restrictions translate to certain responsibilities for you if you +distribute copies of the software, or if you modify it. + + For example, if you distribute copies of such a program, whether +gratis or for a fee, you must give the recipients all the rights that +you have. You must make sure that they, too, receive or can get the +source code. And you must show them these terms so they know their +rights. + + We protect your rights with two steps: (1) copyright the software, and +(2) offer you this license which gives you legal permission to copy, +distribute and/or modify the software. + + Also, for each author's protection and ours, we want to make certain +that everyone understands that there is no warranty for this free +software. If the software is modified by someone else and passed on, we +want its recipients to know that what they have is not the original, so +that any problems introduced by others will not reflect on the original +authors' reputations. + + Finally, any free program is threatened constantly by software +patents. We wish to avoid the danger that redistributors of a free +program will individually obtain patent licenses, in effect making the +program proprietary. To prevent this, we have made it clear that any +patent must be licensed for everyone's free use or not licensed at all. + + The precise terms and conditions for copying, distribution and +modification follow. + + GNU GENERAL PUBLIC LICENSE + TERMS AND CONDITIONS FOR COPYING, DISTRIBUTION AND MODIFICATION + + 0. This License applies to any program or other work which contains +a notice placed by the copyright holder saying it may be distributed +under the terms of this General Public License. The "Program", below, +refers to any such program or work, and a "work based on the Program" +means either the Program or any derivative work under copyright law: +that is to say, a work containing the Program or a portion of it, +either verbatim or with modifications and/or translated into another +language. (Hereinafter, translation is included without limitation in +the term "modification".) Each licensee is addressed as "you". + +Activities other than copying, distribution and modification are not +covered by this License; they are outside its scope. The act of +running the Program is not restricted, and the output from the Program +is covered only if its contents constitute a work based on the +Program (independent of having been made by running the Program). +Whether that is true depends on what the Program does. + + 1. You may copy and distribute verbatim copies of the Program's +source code as you receive it, in any medium, provided that you +conspicuously and appropriately publish on each copy an appropriate +copyright notice and disclaimer of warranty; keep intact all the +notices that refer to this License and to the absence of any warranty; +and give any other recipients of the Program a copy of this License +along with the Program. + +You may charge a fee for the physical act of transferring a copy, and +you may at your option offer warranty protection in exchange for a fee. + + 2. You may modify your copy or copies of the Program or any portion +of it, thus forming a work based on the Program, and copy and +distribute such modifications or work under the terms of Section 1 +above, provided that you also meet all of these conditions: + + a) You must cause the modified files to carry prominent notices + stating that you changed the files and the date of any change. + + b) You must cause any work that you distribute or publish, that in + whole or in part contains or is derived from the Program or any + part thereof, to be licensed as a whole at no charge to all third + parties under the terms of this License. + + c) If the modified program normally reads commands interactively + when run, you must cause it, when started running for such + interactive use in the most ordinary way, to print or display an + announcement including an appropriate copyright notice and a + notice that there is no warranty (or else, saying that you provide + a warranty) and that users may redistribute the program under + these conditions, and telling the user how to view a copy of this + License. (Exception: if the Program itself is interactive but + does not normally print such an announcement, your work based on + the Program is not required to print an announcement.) + +These requirements apply to the modified work as a whole. If +identifiable sections of that work are not derived from the Program, +and can be reasonably considered independent and separate works in +themselves, then this License, and its terms, do not apply to those +sections when you distribute them as separate works. But when you +distribute the same sections as part of a whole which is a work based +on the Program, the distribution of the whole must be on the terms of +this License, whose permissions for other licensees extend to the +entire whole, and thus to each and every part regardless of who wrote it. + +Thus, it is not the intent of this section to claim rights or contest +your rights to work written entirely by you; rather, the intent is to +exercise the right to control the distribution of derivative or +collective works based on the Program. + +In addition, mere aggregation of another work not based on the Program +with the Program (or with a work based on the Program) on a volume of +a storage or distribution medium does not bring the other work under +the scope of this License. + + 3. You may copy and distribute the Program (or a work based on it, +under Section 2) in object code or executable form under the terms of +Sections 1 and 2 above provided that you also do one of the following: + + a) Accompany it with the complete corresponding machine-readable + source code, which must be distributed under the terms of Sections + 1 and 2 above on a medium customarily used for software interchange; or, + + b) Accompany it with a written offer, valid for at least three + years, to give any third party, for a charge no more than your + cost of physically performing source distribution, a complete + machine-readable copy of the corresponding source code, to be + distributed under the terms of Sections 1 and 2 above on a medium + customarily used for software interchange; or, + + c) Accompany it with the information you received as to the offer + to distribute corresponding source code. (This alternative is + allowed only for noncommercial distribution and only if you + received the program in object code or executable form with such + an offer, in accord with Subsection b above.) + +The source code for a work means the preferred form of the work for +making modifications to it. For an executable work, complete source +code means all the source code for all modules it contains, plus any +associated interface definition files, plus the scripts used to +control compilation and installation of the executable. However, as a +special exception, the source code distributed need not include +anything that is normally distributed (in either source or binary +form) with the major components (compiler, kernel, and so on) of the +operating system on which the executable runs, unless that component +itself accompanies the executable. + +If distribution of executable or object code is made by offering +access to copy from a designated place, then offering equivalent +access to copy the source code from the same place counts as +distribution of the source code, even though third parties are not +compelled to copy the source along with the object code. + + 4. You may not copy, modify, sublicense, or distribute the Program +except as expressly provided under this License. Any attempt +otherwise to copy, modify, sublicense or distribute the Program is +void, and will automatically terminate your rights under this License. +However, parties who have received copies, or rights, from you under +this License will not have their licenses terminated so long as such +parties remain in full compliance. + + 5. You are not required to accept this License, since you have not +signed it. However, nothing else grants you permission to modify or +distribute the Program or its derivative works. These actions are +prohibited by law if you do not accept this License. Therefore, by +modifying or distributing the Program (or any work based on the +Program), you indicate your acceptance of this License to do so, and +all its terms and conditions for copying, distributing or modifying +the Program or works based on it. + + 6. Each time you redistribute the Program (or any work based on the +Program), the recipient automatically receives a license from the +original licensor to copy, distribute or modify the Program subject to +these terms and conditions. You may not impose any further +restrictions on the recipients' exercise of the rights granted herein. +You are not responsible for enforcing compliance by third parties to +this License. + + 7. If, as a consequence of a court judgment or allegation of patent +infringement or for any other reason (not limited to patent issues), +conditions are imposed on you (whether by court order, agreement or +otherwise) that contradict the conditions of this License, they do not +excuse you from the conditions of this License. If you cannot +distribute so as to satisfy simultaneously your obligations under this +License and any other pertinent obligations, then as a consequence you +may not distribute the Program at all. For example, if a patent +license would not permit royalty-free redistribution of the Program by +all those who receive copies directly or indirectly through you, then +the only way you could satisfy both it and this License would be to +refrain entirely from distribution of the Program. + +If any portion of this section is held invalid or unenforceable under +any particular circumstance, the balance of the section is intended to +apply and the section as a whole is intended to apply in other +circumstances. + +It is not the purpose of this section to induce you to infringe any +patents or other property right claims or to contest validity of any +such claims; this section has the sole purpose of protecting the +integrity of the free software distribution system, which is +implemented by public license practices. Many people have made +generous contributions to the wide range of software distributed +through that system in reliance on consistent application of that +system; it is up to the author/donor to decide if he or she is willing +to distribute software through any other system and a licensee cannot +impose that choice. + +This section is intended to make thoroughly clear what is believed to +be a consequence of the rest of this License. + + 8. If the distribution and/or use of the Program is restricted in +certain countries either by patents or by copyrighted interfaces, the +original copyright holder who places the Program under this License +may add an explicit geographical distribution limitation excluding +those countries, so that distribution is permitted only in or among +countries not thus excluded. In such case, this License incorporates +the limitation as if written in the body of this License. + + 9. The Free Software Foundation may publish revised and/or new versions +of the General Public License from time to time. Such new versions will +be similar in spirit to the present version, but may differ in detail to +address new problems or concerns. + +Each version is given a distinguishing version number. If the Program +specifies a version number of this License which applies to it and "any +later version", you have the option of following the terms and conditions +either of that version or of any later version published by the Free +Software Foundation. If the Program does not specify a version number of +this License, you may choose any version ever published by the Free Software +Foundation. + + 10. If you wish to incorporate parts of the Program into other free +programs whose distribution conditions are different, write to the author +to ask for permission. For software which is copyrighted by the Free +Software Foundation, write to the Free Software Foundation; we sometimes +make exceptions for this. Our decision will be guided by the two goals +of preserving the free status of all derivatives of our free software and +of promoting the sharing and reuse of software generally. + + NO WARRANTY + + 11. BECAUSE THE PROGRAM IS LICENSED FREE OF CHARGE, THERE IS NO WARRANTY +FOR THE PROGRAM, TO THE EXTENT PERMITTED BY APPLICABLE LAW. EXCEPT WHEN +OTHERWISE STATED IN WRITING THE COPYRIGHT HOLDERS AND/OR OTHER PARTIES +PROVIDE THE PROGRAM "AS IS" WITHOUT WARRANTY OF ANY KIND, EITHER EXPRESSED +OR IMPLIED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF +MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE. THE ENTIRE RISK AS +TO THE QUALITY AND PERFORMANCE OF THE PROGRAM IS WITH YOU. SHOULD THE +PROGRAM PROVE DEFECTIVE, YOU ASSUME THE COST OF ALL NECESSARY SERVICING, +REPAIR OR CORRECTION. + + 12. IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN WRITING +WILL ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MAY MODIFY AND/OR +REDISTRIBUTE THE PROGRAM AS PERMITTED ABOVE, BE LIABLE TO YOU FOR DAMAGES, +INCLUDING ANY GENERAL, SPECIAL, INCIDENTAL OR CONSEQUENTIAL DAMAGES ARISING +OUT OF THE USE OR INABILITY TO USE THE PROGRAM (INCLUDING BUT NOT LIMITED +TO LOSS OF DATA OR DATA BEING RENDERED INACCURATE OR LOSSES SUSTAINED BY +YOU OR THIRD PARTIES OR A FAILURE OF THE PROGRAM TO OPERATE WITH ANY OTHER +PROGRAMS), EVEN IF SUCH HOLDER OR OTHER PARTY HAS BEEN ADVISED OF THE +POSSIBILITY OF SUCH DAMAGES. + + END OF TERMS AND CONDITIONS + + How to Apply These Terms to Your New Programs + + If you develop a new program, and you want it to be of the greatest +possible use to the public, the best way to achieve this is to make it +free software which everyone can redistribute and change under these terms. + + To do so, attach the following notices to the program. It is safest +to attach them to the start of each source file to most effectively +convey the exclusion of warranty; and each file should have at least +the "copyright" line and a pointer to where the full notice is found. + + + Copyright (C) + + This program is free software; you can redistribute it and/or modify + it under the terms of the GNU General Public License as published by + the Free Software Foundation; either version 2 of the License, or + (at your option) any later version. + + This program is distributed in the hope that it will be useful, + but WITHOUT ANY WARRANTY; without even the implied warranty of + MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + GNU General Public License for more details. + + You should have received a copy of the GNU General Public License along + with this program; if not, write to the Free Software Foundation, Inc., + 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. + +Also add information on how to contact you by electronic and paper mail. + +If the program is interactive, make it output a short notice like this +when it starts in an interactive mode: + + Gnomovision version 69, Copyright (C) year name of author + Gnomovision comes with ABSOLUTELY NO WARRANTY; for details type `show w'. + This is free software, and you are welcome to redistribute it + under certain conditions; type `show c' for details. + +The hypothetical commands `show w' and `show c' should show the appropriate +parts of the General Public License. Of course, the commands you use may +be called something other than `show w' and `show c'; they could even be +mouse-clicks or menu items--whatever suits your program. + +You should also get your employer (if you work as a programmer) or your +school, if any, to sign a "copyright disclaimer" for the program, if +necessary. Here is a sample; alter the names: + + Yoyodyne, Inc., hereby disclaims all copyright interest in the program + `Gnomovision' (which makes passes at compilers) written by James Hacker. + + , 1 April 1989 + Ty Coon, President of Vice + +This General Public License does not permit incorporating your program into +proprietary programs. If your program is a subroutine library, you may +consider it more useful to permit linking proprietary applications with the +library. If this is what you want to do, use the GNU Lesser General +Public License instead of this License. diff -Nru a2jmidid-8~dfsg0/man/a2jmidid.1 a2jmidid-9/man/a2jmidid.1 --- a2jmidid-8~dfsg0/man/a2jmidid.1 2011-08-03 13:03:45.000000000 +0000 +++ a2jmidid-9/man/a2jmidid.1 2019-09-23 22:34:12.000000000 +0000 @@ -19,13 +19,13 @@ .SH NOTES ALSA does not guarantee client names to by unique. I.e. it is possible to have two apps that create two clients with same ALSA client name. -JACK however requires port names to be unqiue. To ensure this uniqueness, +JACK however requires port names to be unique. To ensure this uniqueness, a2jmidid will add the unique numeric ALSA client ID to the JACK port name. However this behaviour is known to be problematic when restoring connections using simplistic tools like aj\-snapshot and jack_connect. In order to make them work, the -u option can be used. This option will cause a2jmidid to omit the numeric ALSA Client ID from JACK port names. -In this mode, ALSA client name uniqueness must be guartanteed externally. +In this mode, ALSA client name uniqueness must be guaranteed externally. .SH AUTHOR Eric Hedekar diff -Nru a2jmidid-8~dfsg0/meson.build a2jmidid-9/meson.build --- a2jmidid-8~dfsg0/meson.build 1970-01-01 00:00:00.000000000 +0000 +++ a2jmidid-9/meson.build 2019-09-23 22:34:12.000000000 +0000 @@ -0,0 +1,111 @@ +project( + 'a2jmidid', + 'c', + default_options: [ + 'prefix=/usr/local', + 'b_pie=true', + 'b_lto=true' + ], + meson_version: '>=0.50.0', + license: 'GPL2+', + version: '9') + +cc = meson.get_compiler('c') + +# dependency definitions +dep_jack = dependency('jack') +dep_alsa = dependency('alsa') +lib_dl = cc.find_library('dl') +lib_pthread = cc.find_library('pthread') +deps_a2jmidid = [dep_alsa, dep_jack, lib_dl, lib_pthread] + +# source definitions +src_a2jmidi_bridge = ['a2jmidi_bridge.c'] +src_j2amidi_bridge = ['j2amidi_bridge.c'] +src_a2jmidid = [ + 'a2jmidid.c', + 'log.c', + 'port.c', + 'port_thread.c', + 'port_hash.c', + 'paths.c', + #'conf.c', + 'jack.c', + 'list.c'] + +# config.h input +conf_data = configuration_data() +conf_data.set('version', meson.project_version()) + +git = find_program('git', required : false) +if git.found() + git_version = run_command('git', 'describe', '--long', '--tags') + if git_version.returncode() == 0 + conf_data.set('version', git_version.stdout().strip()) + endif +endif + +if get_option('disable-dbus') + conf_data.set10('dbus', false) +else + dep_dbus = dependency('dbus-1') + deps_a2jmidid += [dep_dbus] + dbus_data = configuration_data() + dbus_data.set('bindir', join_paths(get_option('prefix'), get_option('bindir'))) + dbus_data.set('dbus_service_dir', join_paths(get_option('prefix'), 'share', 'dbus-1', 'services')) + configure_file( + input: 'org.gna.home.a2jmidid.service.in', + output: 'org.gna.home.a2jmidid.service', + install: true, + install_dir: dbus_data.get('dbus_service_dir'), + configuration: dbus_data + ) + conf_data.set10('dbus', true) + src_a2jmidid += [ + 'dbus.c', + 'dbus_iface_introspectable.c', + 'dbus_iface_control.c', + 'sigsegv.c'] + install_man('man/a2j.1') + install_man('man/a2j_control.1') + configure_file( + input: 'a2j', + output: 'a2j', + copy: true, + install: true, + install_dir: dbus_data.get('bindir')) + configure_file( + input: 'a2j_control', + output: 'a2j_control', + copy: true, + install: true, + install_dir: dbus_data.get('bindir')) +endif + +config_header = configure_file( + input: 'config.h.in', + output: 'config.h', + configuration: conf_data) +src_a2jmidid += [config_header] + +# executables to compile +executable( + 'a2jmidi_bridge', + sources: src_a2jmidi_bridge, + dependencies: [dep_alsa, dep_jack], + install: true) +executable( + 'j2amidi_bridge', + sources: src_j2amidi_bridge, + dependencies: [dep_alsa, dep_jack], + install: true) +executable( + 'a2jmidid', + sources: src_a2jmidid, + dependencies: deps_a2jmidid, + install: true) + +# installing man pages +install_man('man/a2jmidi_bridge.1') +install_man('man/a2jmidid.1') +install_man('man/j2amidi_bridge.1') diff -Nru a2jmidid-8~dfsg0/meson_options.txt a2jmidid-9/meson_options.txt --- a2jmidid-8~dfsg0/meson_options.txt 1970-01-01 00:00:00.000000000 +0000 +++ a2jmidid-9/meson_options.txt 2019-09-23 22:34:12.000000000 +0000 @@ -0,0 +1 @@ +option('disable-dbus', type: 'boolean', value: false, description: 'Disable D-Bus support (default: false)') diff -Nru a2jmidid-8~dfsg0/NEWS a2jmidid-9/NEWS --- a2jmidid-8~dfsg0/NEWS 2012-07-05 01:16:42.000000000 +0000 +++ a2jmidid-9/NEWS 1970-01-01 00:00:00.000000000 +0000 @@ -1,59 +0,0 @@ -= Version 8 "Sophronius of Vratsa" on 2012-07-05 = - * -u commandline option - * D-Bus method for checking whether hw export is enabled. Kudos to Danni Coy - * Fix for resource leak. Kudos to Dan A. Muresan - * Improved error message for snd_seq_connect_to() failures - * --mandir= option in waf. Kudos to Dan Church - -= Version 7 "Paisius of Hilendar" on 2011-01-16 = - * MIDI processing improvements - * Use the JACK limit for max port name size (sr #2526) - * Adopt to shared library policy changes in some distros (sr #2547) - * dbus support can now be disabled at configure stage - * fix build on not so common platforms (LP: #556351) - * man pages (from Debian) - * reopen log file when it is deleted or replaced - -= Version 6 "Indzhe Voyvoda" on 2009-12-29 = - * MIDI processing improvements - * Handle large number of ports - * a2j script (non-dbus-like behaviour for dbus environment) - * Allow tweaking through dbus of the hardware port export option - * Fix a use of invalid memory - * Fix port miss that can occur if port appears during bridge startup - -= Version 5 "Athos" on 2009-06-13 = - * Fix thight loop on D-Bus disconnect - * D-Bus signals for bridge start and stop - * Fixed alsamidi "disappearing output" bug. (backport from jack1) - * MIDI note-off normalization fix from Dave Robillard (Backport from jack1) - * Removed wrong assert from alsa_seqmidi.c reported by Ken Ellinwood (Backport from jack1) - * Mark anything that looks like a hardware port as physical&terminal (Backport from jack1/jack2) - * Fix potential crash when D-Bus is not used - * Support for multiple ALSA clients with same name - * Merge midibridge changeset by Paul Davis that is expected to fix - midi event timing problems that some people have reported. - -= Version 4 "Devsirme" on 2008-08-03 = - * Fix typos in docs - * Disable use of C++ demangling in sigsegv. Fix for sr #2074 - * Fix a2j_control help text (thanks kfoltman!) - * Request fixed JACK client name. Fix for bug #12139 - * Handle missing svnversion executable nicely. Fixes bug #12138 - -= Version 3 "Bodrum" on 2008-08-03 = - * Improved port naming, support for bidirectional ports - * Allow exporting of hardware ports (disabled by default) - * Switch from autotools to waf - * Remove support for old JACK MIDI API variants - * Remove usage of posix semaphores that could cause sleep in realtime context, in rare circumstances - * D-Bus background service mode. The old manual mode is still working. - * Log file when running in background service mode. - * Improved documentation - * Import, with slight modifications, static bridges created by Sean Bolton and Lars Luthman. - -= Version 2 on 2007-10-27 = - * Improved build system (autotools) and support for older JACK variants - -= Version 1 on 2007-08-26 = - * Initial release diff -Nru a2jmidid-8~dfsg0/org.gna.home.a2jmidid.service.in a2jmidid-9/org.gna.home.a2jmidid.service.in --- a2jmidid-8~dfsg0/org.gna.home.a2jmidid.service.in 2008-09-16 08:05:10.000000000 +0000 +++ a2jmidid-9/org.gna.home.a2jmidid.service.in 2019-09-23 22:34:12.000000000 +0000 @@ -1,3 +1,3 @@ [D-BUS Service] Name=org.gna.home.a2jmidid -Exec=@BINDIR@/a2jmidid dbus +Exec=@bindir@/a2jmidid dbus diff -Nru a2jmidid-8~dfsg0/README a2jmidid-9/README --- a2jmidid-8~dfsg0/README 2008-09-16 08:05:10.000000000 +0000 +++ a2jmidid-9/README 1970-01-01 00:00:00.000000000 +0000 @@ -1,102 +0,0 @@ -= Overview = -Main goal of this project main is to ease usage of legacy, not -JACK-ified apps, in a JACK MIDI enabled system. - -There are two ways to use legacy ALSA sequencer applications in JACK -MIDI system. - -The first approach is to use automatic bridging. For every ALSA -sequencer port you get one JACK MIDI port. If ALSA sequencer port is -both input and output one, you get two JACK MIDI ports, one input and -output. - -The second approach is to static bridges. You start application that -creates one ALSA sequencer port and one JACK MIDI port. Such bridge is -unidirectional. - -First approach works almost always except when legacy ALSA sequencer -application does not create ports and/or wants existing port to exist -so it can capture from or playback to it. Such programs are using the -feature of ALSA sequencer framework that allows sending and receiving -MIDI events to/from port, without creating connection to it. - -= a2jmidid = -a2jmidid is daemon that implements automatic bridging. For every ALSA -sequencer port you get one jack midi port. If ALSA sequencer port is -both input and output one, you get two JACK MIDI ports, one input and -output. - -a2jmidid has two modes of operation, running bridge manually and -background D-Bus service. - -To start a2jmidid in manual mode, just run it (and don't supply the -special "undocumented" dbus parameter). a2jmidid will start bridging, -and you will get output on stdout and stderr. You can stop the bridge -using ctrl-c. - -Usually you want to bridge software ports and not bridge hardware -ports (they are handled by JACK itself). In case you want to force -a2jmidid to bridge hardware ports, you can use the export-hw option: - -a2jmidid -e - -or - -a2jmidid --export-hw - -In D-Bus service mode, a2jmidid works in background. When service -access is requested by some application (like a2j_control), dbus -session bus daemon activates the object by executing the service -executable. The object has methods for starting and stopping the -bridging. You can use a2j_control to do this: - -a2j_control start -a2j_control stop - -You can deactivate (that may cause later reactivation) the service -like this: - -a2j_control exit - -You can query the bridge status using this command: - -a2j_control status - -There also methods (and corresponding a2j_control commands) that can -be used to query mapping information. - -a2jmidid implementation is based on jack-alsamidi-0.5 that is [almost] -same as jackd ALSA "seq" MIDI backend), both created by Dmitry -Baikov. - -= a2jmidi_bridge = -a2jmidi_bridge is static bridge that creates one ALSA sequencer -playback port and one JACK MIDI capture port. MIDI events sent to ALSA -sequencer playback port can be read from the JACK MIDI capture port. - -a2jmidi_bridge has optional argument that allows overriding name used -for JACK and ALSA client: - -a2jmidi_bridge "my precious bridge" - -a2jmidi_bridge implementation is based on alsaseq2jackmidi by Sean -Bolton. - -= j2amidi_bridge = -j2amidi_bridge is static bridge that creates one JACK MIDI playback -port and one ALSA sequencer capture port. MIDI events sent to JACK -MIDI playback port can be read from the ALSA sequencer capture port. - -j2amidi_bridge has optional argument that allows overriding name used -for JACK and ALSA client: - -j2amidi_bridge "my precious bridge" - -j2amidi_bridge implementation is based on jackmidi2alsaseq by Lars -Luthman. - -= Contact info = -If someone wants to contribute please, contact me (Nedko Arnaudov), or -send patches, or request inclusion (Gna! a2jmidid project). - -Packagers are more than welcome too. diff -Nru a2jmidid-8~dfsg0/README.rst a2jmidid-9/README.rst --- a2jmidid-8~dfsg0/README.rst 1970-01-01 00:00:00.000000000 +0000 +++ a2jmidid-9/README.rst 2019-09-23 22:34:12.000000000 +0000 @@ -0,0 +1,128 @@ +======== +a2jmidid +======== + +This project aims to ease the usage of legacy, non |jack| enabled applications, +in a |jack| MIDI enabled system, when using |jack2| + +There are two ways to use legacy |alsa| sequencer applications in a |jack| MIDI +system: + +**Automatic bridging**: For every |alsa| sequencer port you get one |jack| MIDI +port. If an |alsa| sequencer port is both an input and an output, you get two +|jack| MIDI ports, one input and output. +(*This approach works almost always except when the legacy ALSA sequencer +application does not create ports and/or wants to use an existing port to +capture from or playback to. Such programs are using a feature of the |alsa| +sequencer framework that allows sending and receiving MIDI events to/from a +port, without creating a connection to it.*) + +**Static bridges**: You start an application that creates one |alsa| sequencer +port and one |jack| MIDI port. Such a bridge is unidirectional. + +For details on how to build and install this project, look at `INSTALLATION.rst +`_. + +a2jmidid +-------- + +a2jmidid is a daemon that implements **automatic bridging**. + +It has two modes of operation: Running a bridge manually or as a backgrounded +|dbus| service. + +Start daemon +____________ + +To start *a2jmidid* in manual mode, just run the executable. +*a2jmidid* will start bridging, and you will get output on stdout and stderr. +You can stop the bridge using *ctrl-c*. + +Usually you want to bridge software ports and not bridge hardware +ports (they are handled by |jack| itself). In case you want to force +*a2jmidid* to bridge hardware ports nonetheless, you can use the according +flags:: + + a2jmidid -e + +or:: + + a2jmidid --export-hw + +Start D-Bus service +___________________ + +In D-Bus service mode, a2jmidid works in the background. When service access is +requested by some application (such as *a2j_control*), the |dbus| session bus +daemon activates the object by executing the service executable. + +The object has methods for starting and stopping the +bridging. You can use *a2j_control* to do this:: + + a2j_control --start + a2j_control --stop + +You can deactivate (that may cause later reactivation) the service +like this:: + + a2j_control --exit + +You can query the bridge status using this command:: + + a2j_control --status + +There also methods (and corresponding a2j_control commands) that can +be used to query mapping information:: + + a2j_control --help + +The *a2jmidid* implementation is based on *jack-alsamidi-0.5*, which is +(almost) identical to the jackd |alsa| *seq* MIDI backend), both created by +Dmitry Baikov. + +a2jmidi_bridge +-------------- + +*a2jmidi_bridge* creates a **static bridge** between one |alsa| sequencer +playback port and one |jack| MIDI capture port. MIDI events sent to the |alsa| +sequencer playback port can be read from the |jack| MIDI capture port. + +*a2jmidi_bridge* has an optional argument that allows overriding the name used +for the |jack| and |alsa| client:: + + a2jmidi_bridge "my precious bridge" + +The *a2jmidi_bridge* implementation is based on *alsaseq2jackmidi* by Sean +Bolton. + +j2amidi_bridge +-------------- + +*j2amidi_bridge* creates a **static bridge** between one |jack| MIDI playback +port and one |alsa| sequencer capture port. MIDI events sent to |jack| +MIDI playback port can be read from the |alsa| sequencer capture port. + +*j2amidi_bridge* has an optional argument that allows overriding the name used +for the |jack| and |alsa| client:: + + j2amidi_bridge "my precious bridge" + +The *j2amidi_bridge* implementation is based on jackmidi2alsaseq by Lars +Luthman. + +.. |jack| raw:: html + + JACK + +.. |jack2| raw:: html + + jack2 + +.. |dbus| raw:: html + + D-Bus + +.. |alsa| raw:: html + + ALSA + diff -Nru a2jmidid-8~dfsg0/sigsegv.c a2jmidid-9/sigsegv.c --- a2jmidid-8~dfsg0/sigsegv.c 2010-08-20 15:08:15.000000000 +0000 +++ a2jmidid-9/sigsegv.c 2019-09-23 22:34:12.000000000 +0000 @@ -91,18 +91,20 @@ a2j_error("info.si_errno = %d", info->si_errno); a2j_error("info.si_code = %d (%s)", info->si_code, si_codes[info->si_code]); a2j_error("info.si_addr = %p", info->si_addr); -#if !defined(__alpha__) && !defined(__ia64__) && !defined(__FreeBSD_kernel__) && !defined(__arm__) && !defined(__hppa__) && !defined(__sh__) +#if !defined(__alpha__) && !defined(__ia64__) && !defined(__FreeBSD_kernel__) && !defined(__arm__) && !defined(__hppa__) && !defined(__sh__) && !defined(__aarch64__) for(i = 0; i < NGREG; i++) a2j_error("reg[%02d] = 0x" REGFORMAT, i, -#if defined(__powerpc__) +#if defined(__powerpc__) && !defined(__powerpc64__) ucontext->uc_mcontext.uc_regs[i] +#elif defined(__powerpc64__) + ucontext->uc_mcontext.gp_regs[i] #elif defined(__sparc__) && defined(__arch64__) ucontext->uc_mcontext.mc_gregs[i] #else ucontext->uc_mcontext.gregs[i] #endif ); -#endif /* alpha, ia64, kFreeBSD, arm, hppa */ +#endif /* alpha, ia64, kFreeBSD, arm, hppa, aarch64 */ #if defined(SIGSEGV_STACK_X86) || defined(SIGSEGV_STACK_IA64) # if defined(SIGSEGV_STACK_IA64) diff -Nru a2jmidid-8~dfsg0/waf a2jmidid-9/waf --- a2jmidid-8~dfsg0/waf 2012-08-02 11:07:21.000000000 +0000 +++ a2jmidid-9/waf 1970-01-01 00:00:00.000000000 +0000 @@ -1,142 +0,0 @@ -#! /usr/bin/env python -# encoding: utf-8 -# Thomas Nagy, 2005-2008 - -""" -Redistribution and use in source and binary forms, with or without -modification, are permitted provided that the following conditions -are met: - -1. Redistributions of source code must retain the above copyright - notice, this list of conditions and the following disclaimer. - -2. Redistributions in binary form must reproduce the above copyright - notice, this list of conditions and the following disclaimer in the - documentation and/or other materials provided with the distribution. - -3. The name of the author may not be used to endorse or promote products - derived from this software without specific prior written permission. - -THIS SOFTWARE IS PROVIDED BY THE AUTHOR "AS IS" AND ANY EXPRESS OR -IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED -WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE -DISCLAIMED. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, -INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES -(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR -SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) -HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, -STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING -IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE -POSSIBILITY OF SUCH DAMAGE. -""" - -import os, sys -if sys.hexversion<0x203000f: raise "Waf requires Python >= 2.3" - -if 'PSYCOWAF' in os.environ: - try:import psyco;psyco.full() - except:pass - -VERSION="1.4.3" -REVISION="4cc0bec64a165ffe5dd3eed60cd2e01b" -INSTALL=sys.platform=='win32' and 'c:/temp' or '/usr/local' -cwd = os.getcwd() -join = os.path.join - -def decode(s): - p1 = len(s) - s += '!!!!!' - w1 = [256**(3-u) for u in xrange(4)] - w2 = [(u, 85**(4-u)) for u in xrange(5)] - tot = [sum([(ord(s[i+m])-33) * n for (m, n) in w2]) for i in xrange(0, p1, 5)] - return ''.join([chr((y/x) & 255) for y in tot for x in w1]) - -def err(m): - print '\033[91mError: %s\033[0m' % m - sys.exit(1) - -def unpack_wafdir(dir): - f = open(sys.argv[0],'rb') - c = "corrupted waf (%d)" - while 1: - line = f.readline() - if not line: err("run waf-light from a folder containing wafadmin") - if line == '#==>\n': - txt = f.readline() - if not txt: err(c % 1) - if f.readline()!='#<==\n': err(c % 2) - break - if not txt: err(c % 3) - try: txt = decode(txt[1:-1].replace('z', '!!!!!')) - except: err(c % 4) - - import shutil, tarfile - try: shutil.rmtree(dir) - except OSError: pass - try: os.makedirs(join(dir, 'wafadmin', 'Tools')) - except OSError: err("Cannot unpack waf lib into %s\nMove waf into a writeable directory" % dir) - - os.chdir(dir) - tmp = 't.tbz2' - t = open(tmp,'wb') - t.write(txt) - t.close() - - t = tarfile.open(tmp) - for x in t: t.extract(x) - t.close() - - os.chmod(join('wafadmin','Tools'), 0755) - - os.unlink(tmp) - os.chdir(cwd) - -def test(dir): - try: os.stat(join(dir, 'wafadmin')); return os.path.abspath(dir) - except OSError: pass - -def find_lib(): - name = sys.argv[0] - base = os.path.dirname(os.path.abspath(name)) - - #devs use $WAFDIR - w=test(os.environ.get('WAFDIR', '')) - if w: return w - - #waf-light - if name.endswith('waf-light'): - w = test(base) - if w: return w - err("waf-light requires wafadmin -> export WAFDIR=/folder") - - dir = "/lib/waf-%s-%s/" % (VERSION, REVISION) - for i in [INSTALL,'/usr','/usr/local','/opt']: - w = test(i+dir) - if w: return w - - #waf-local - s = '.waf-%s-%s' - if sys.platform == 'win32': s = s[1:] - dir = join(base, s % (VERSION, REVISION)) - w = test(dir) - if w: return w - - #unpack - unpack_wafdir(dir) - return dir - -wafdir = find_lib() -if "-vv" in sys.argv: print "wafdir is %s" % wafdir - -w = join(wafdir, 'wafadmin') -t = join(w, 'Tools') -sys.path = [w, t] + sys.path - -import Scripting, Params -Params.g_tooldir = [t] -Params.g_cwd_launch = cwd - -if Params.g_version != VERSION: - err('Version mismatch: waf %s <> wafadmin %s (wafdir %s)' % (VERSION, Params.g_version, wafdir)) -Scripting.prepare() - diff -Nru a2jmidid-8~dfsg0/wafadmin/Action.py a2jmidid-9/wafadmin/Action.py --- a2jmidid-8~dfsg0/wafadmin/Action.py 2008-07-21 22:39:47.000000000 +0000 +++ a2jmidid-9/wafadmin/Action.py 1970-01-01 00:00:00.000000000 +0000 @@ -1,22 +0,0 @@ -#! /usr/bin/env python -# encoding: utf-8 - -#! /usr/bin/env python -# encoding: utf-8 -# Thomas Nagy, 2005-2008 (ita) - -""" -Actions (Scons design) were used to separate the code to execute from the Task objects -This had no clear justification (extra level of indirection when a factory -is just fine) and is removed in Waf 1.5 -""" - -#import warnings -#warnings.warn("The WAF module 'Action' is being deprecated! :-)", DeprecationWarning, stacklevel=2) -#del warnings - -import Task -Action = Task.task_type_from_func -simple_action = Task.simple_task_type - - diff -Nru a2jmidid-8~dfsg0/wafadmin/Build.py a2jmidid-9/wafadmin/Build.py --- a2jmidid-8~dfsg0/wafadmin/Build.py 2008-07-21 22:39:47.000000000 +0000 +++ a2jmidid-9/wafadmin/Build.py 1970-01-01 00:00:00.000000000 +0000 @@ -1,601 +0,0 @@ -#! /usr/bin/env python -# encoding: utf-8 -import sys -if sys.hexversion < 0x020400f0: from sets import Set as set -#! /usr/bin/env python -# encoding: utf-8 -# Thomas Nagy, 2005 (ita) - -""" -Dependency tree holder - -The class Build holds all the info related to a build: -* file system representation (tree of Node instances) -* various cached objects (task signatures, file scan results, ..) - -There is only one Build object at a time (Params.g_build singleton) -""" - -import os, sys, cPickle, types, imp, errno, re, glob -import Params, Runner, TaskGen, Node, Scripting, Utils, Environment, Task -from Params import debug, error, fatal, warning -from Constants import * - -SAVED_ATTRS = 'm_root m_srcnode m_bldnode m_tstamp_variants node_deps raw_deps bld_sigs id_nodes'.split() -"Build class members to save" - -g_modcache = {} -"Cache for the tools (modules), re-importing raises errors" - -class BuildError(Exception): - def __init__(self, b=None, t=[]): - self.bld = b - self.tasks = t - self.ret = 1 - def get_message(self): - lst = ['Build failed'] - for tsk in self.tasks: - if tsk.m_hasrun == CRASHED: - try: - lst.append(" -> task failed (err #%d): %s" % (tsk.err_code, str(tsk.m_outputs))) - except AttributeError: - lst.append(" -> task failed:" % str(tsk.m_outputs)) - elif tsk.m_hasrun == MISSING: - lst.append(" -> missing files: %s" % str(tsk.m_outputs)) - return '\n'.join(lst) - -class BuildDTO(object): - "holds the data to store using cPickle" - def __init__(self): - pass - def init(self, bdobj): - global SAVED_ATTRS - for a in SAVED_ATTRS: - setattr(self, a, getattr(bdobj, a)) - def update_build(self, bdobj): - global SAVED_ATTRS - for a in SAVED_ATTRS: - setattr(bdobj, a, getattr(self, a)) - -class Build(object): - "holds the dependency tree" - def __init__(self): - - # there should be only one build dir in use at a time - Params.g_build = self - - # instead of hashing the nodes, we assign them a unique id when they are created - self.id_nodes = 0 - - # initialize the filesystem representation - self._init_data() - - # map names to environments, the 'default' must be defined - self.m_allenvs = {} - - # ======================================= # - # code for reading the scripts - - # project build directory - do not reset() from load_dirs() or _init_data() - self.m_bdir = '' - - # the current directory from which the code is run - # the folder changes everytime a wscript is read - self.path = None - - # temporary holding the subdirectories containing scripts - look in Scripting.py - self.m_subdirs = [] - - # ======================================= # - # cache variables - - # local cache for absolute paths - m_abspath_cache[variant][node] - self.m_abspath_cache = {} - - # list of folders that are already scanned - # so that we do not need to stat them one more time - self.m_scanned_folders = {} - - # list of targets to uninstall for removing the empty folders after uninstalling - self.m_uninstall = [] - - # ======================================= # - # tasks and objects - - # build dir variants (release, debug, ..) - for v in 'm_tstamp_variants node_deps bld_sigs raw_deps m_abspath_cache'.split(): - var = {} - setattr(self, v, var) - - self.cache_dir_contents = {} - - def _init_data(self): - debug("init data called", 'build') - - # filesystem root - root name is Params.g_rootname - self.m_root = Node.Node('', None, Node.DIR) - - self.m_srcnode = None # src directory - self.m_bldnode = None # bld directory - - self.task_manager = Task.TaskManager() - - # load existing data structures from the disk (stored using self._store()) - def _load(self): - cachedir = Params.g_cachedir - code = '' - try: - file = open(os.path.join(cachedir, 'build.config.py'), 'r') - code = file.read() - file.close() - except (IOError, OSError): - # TODO load the pickled file and the environments better - pass - else: - re_imp = re.compile('^(#)*?([^#=]*?)\ =\ (.*?)$', re.M) - for m in re_imp.finditer(code): - g = m.group - if g(2) == 'version': - if eval(g(3)) < HEXVERSION: - Params.fatal('Version mismatch! reconfigure the project') - elif g(2) == 'tools': - lst = eval(g(3)) - for t in lst: - self.setup(**t) - - try: - file = open(os.path.join(self.m_bdir, DBFILE), 'rb') - dto = cPickle.load(file) - dto.update_build(self) - file.close() - except IOError: - debug("resetting the build object (dto failed)", 'build') - self._init_data() - - # store the data structures on disk, retrieve with self._load() - def _store(self): - file = open(os.path.join(self.m_bdir, DBFILE), 'wb') - dto = BuildDTO() - dto.init(self) - cPickle.dump(dto, file, -1) # remove the '-1' for unoptimized version - file.close() - - # ======================================= # - - def save(self): - self._store() - - def clean(self): - debug("clean called", 'build') - # FIXME this will not work for files created during the configuration dep_files - def clean_rec(node): - for x in node.childs.keys(): - nd = node.childs[x] - - tp = nd.id & 3 - if tp == Node.DIR: - clean_rec(nd) - elif tp == Node.BUILD: - for env in self.m_allenvs.values(): - pt = nd.abspath(env) - if pt in env['waf_config_files']: continue - try: os.remove(pt) - except OSError: pass - node.childs.__delitem__(x) - clean_rec(self.m_srcnode) - - for v in 'm_tstamp_variants node_deps bld_sigs raw_deps m_abspath_cache'.split(): - var = {} - setattr(self, v, var) - - def compile(self): - debug("compile called", 'build') - - os.chdir(self.m_bdir) - - - """ - import cProfile, pstats - cProfile.run("import TaskGen; TaskGen.flush()", 'profi.txt') - p = pstats.Stats('profi.txt') - p.sort_stats('cumulative').print_stats(80) - """ - TaskGen.flush() - #""" - - if Params.g_verbose>2: self.dump() - - self.generator = Runner.get_instance(self, Params.g_options.jobs) - - def dw(on=True): - if Params.g_options.progress_bar: - if on: sys.stdout.write(Params.g_cursor_on) - else: sys.stdout.write(Params.g_cursor_off) - - debug('executor starting', 'build') - try: - dw(on=False) - ret = self.generator.start() - except KeyboardInterrupt, e: - dw() - os.chdir(self.m_srcnode.abspath()) - self._store() - Params.pprint('RED', 'Build interrupted') - if Params.g_verbose > 1: raise - else: sys.exit(68) - except Exception, e: - dw() - # do not store anything, for something bad happened - raise - else: - dw() - self._store() - - if ret: - os.chdir(self.m_srcnode.abspath()) - Utils.test_full() - raise BuildError(self, self.task_manager.tasks_done) - - if Params.g_verbose > 2: self.dump() - os.chdir(self.m_srcnode.abspath()) - - def install(self): - "this function is called for both install and uninstall" - debug('install called', 'build') - - TaskGen.flush() - for obj in TaskGen.g_allobjs: - if obj.m_posted: obj.install() - - # remove empty folders after uninstalling - if Params.g_commands['uninstall']: - lst = [] - for x in self.m_uninstall: - dir = os.path.dirname(x) - if not dir in lst: lst.append(dir) - lst.sort() - lst.reverse() - - nlst = [] - for y in lst: - x = y - while len(x) > 4: - if not x in nlst: nlst.append(x) - x = os.path.dirname(x) - - nlst.sort() - nlst.reverse() - for x in nlst: - try: os.rmdir(x) - except OSError: pass - - def add_subdirs(self, dirs): - for dir in Utils.to_list(dirs): - if dir: Scripting.add_subdir(dir, self) - - def create_obj(self, *k, **kw): - cls_name = k[0] - try: cls = TaskGen.task_gen.classes[cls_name] - except KeyError: raise KeyError('%s is not a valid build tool -> %s' % (cls_name, [x for x in TaskGen.task_gen.classes])) - else: return cls(*k, **kw) - - def load_envs(self): - cachedir = Params.g_cachedir - try: - lst = Utils.listdir(cachedir) - except OSError, e: - if e.errno == errno.ENOENT: - fatal('The project was not configured: run "waf configure" first!') - else: - # TODO: deal with network error and other OS errors. - raise - - if not lst: - fatal('The cache directory is empty: reconfigure the project') - - for file in lst: - if file.endswith(CACHE_SUFFIX): - env = Environment.Environment() - env.load(os.path.join(cachedir, file)) - name = file.split('.')[0] - - self.m_allenvs[name] = env - - self.init_variants() - - for env in self.m_allenvs.values(): - for f in env['dep_files']: - newnode = self.m_srcnode.find_or_declare(f) - try: - hash = Params.h_file(newnode.abspath(env)) - except (IOError, AttributeError): - error("cannot find "+f) - hash = SIG_NIL - self.m_tstamp_variants[env.variant()][newnode.id] = hash - - def setup(self, tool, tooldir=None, funs=None): - "setup tools for build process" - if type(tool) is types.ListType: - for i in tool: self.setup(i, tooldir) - return - - if not tooldir: tooldir = Params.g_tooldir - - file = None - key = str((tool, tooldir)) - module = g_modcache.get(key, None) - if not module: - file,name,desc = imp.find_module(tool, tooldir) - module = imp.load_module(tool,file,name,desc) - g_modcache[key] = module - if hasattr(module, "setup"): module.setup(self) - if file: file.close() - - def init_variants(self): - debug("init variants", 'build') - - lstvariants = [] - for env in self.m_allenvs.values(): - if not env.variant() in lstvariants: - lstvariants.append(env.variant()) - self._variants = lstvariants - - debug("list of variants is "+str(lstvariants), 'build') - - for name in lstvariants+[0]: - for v in 'm_tstamp_variants node_deps raw_deps m_abspath_cache'.split(): - var = getattr(self, v) - if not name in var: - var[name] = {} - - # ======================================= # - # node and folder handling - - # this should be the main entry point - def load_dirs(self, srcdir, blddir, isconfigure=None): - "this functions should be the start of everything" - - # there is no reason to bypass this check - try: - if srcdir == blddir or os.path.abspath(srcdir) == os.path.abspath(blddir): - fatal("build dir must be different from srcdir ->"+str(srcdir)+" ->"+str(blddir)) - except OSError: - pass - - # set the source directory - if not os.path.isabs(srcdir): - srcdir = os.path.join(os.path.abspath('.'),srcdir) - - # set the build directory it is a path, not a node (either absolute or relative) - if not os.path.isabs(blddir): - self.m_bdir = os.path.abspath(blddir) - else: - self.m_bdir = blddir - - if not isconfigure: - self._load() - if self.m_srcnode: - self.path = self.m_srcnode - return - - self.m_srcnode = self.m_root.ensure_dir_node_from_path(srcdir) - debug("srcnode is %s and srcdir %s" % (str(self.m_srcnode.m_name), srcdir), 'build') - - self.path = self.m_srcnode - - self.m_bldnode = self.m_root.ensure_dir_node_from_path(self.m_bdir) - - # create this build dir if necessary - try: os.makedirs(blddir) - except OSError: pass - - self.init_variants() - - def rescan(self, src_dir_node): - """ first list the files in the src dir and update the nodes - - for each variant build dir (multiple build dirs): - - list the files in the build dir, update the nodes - - this makes (n bdirs)+srdir to scan (at least 2 folders) - so we might want to do it in parallel in some future - """ - - # FIXME use sets with intersection and union - # do not rescan over and over again - if self.m_scanned_folders.get(src_dir_node.id, None): return - self.m_scanned_folders[src_dir_node.id] = 1 - - #debug("rescanning "+str(src_dir_node), 'build') - - # TODO undocumented hook - if hasattr(self, 'repository'): self.repository(src_dir_node) - - # list the files in the build dirs - # remove the existing timestamps if the build files are removed - if sys.platform == "win32" and not src_dir_node.m_name: - return - self.scan_src_path(src_dir_node, src_dir_node.abspath()) - - # first obtain the differences between srcnode and src_dir_node - #lst = self.m_srcnode.difflst(src_dir_node) - h1 = self.m_srcnode.height() - h2 = src_dir_node.height() - - lst = [] - child = src_dir_node - while h2 > h1: - lst.append(child.m_name) - child = child.m_parent - h2 -= 1 - lst.reverse() - - for variant in self._variants: - sub_path = os.path.join(self.m_bldnode.abspath(), variant , *lst) - try: - self.scan_path(src_dir_node, sub_path, variant) - except OSError: - #debug("osError on " + sub_path, 'build') - # listdir failed, remove all sigs of nodes - # TODO more things to remove? - dict = self.m_tstamp_variants[variant] - for node in src_dir_node.childs.values(): - if node.id in dict: - dict.__delitem__(node.id) - - # avoid deleting the build dir node - if node.id != Params.g_build.m_bldnode.id: - src_dir_node.childs.__delitem__(node.m_name) - os.makedirs(sub_path) - - # ======================================= # - def scan_src_path(self, i_parent_node, i_path): - """ - @param i_parent_node [Node]: parent node of path to scan. - @param i_path [string]: path to folder to scan. - @param i_existing_nodes: nodes already scanned ?""" - - listed_files = set(Utils.listdir(i_path)) - - self.cache_dir_contents[i_parent_node.id] = listed_files - debug("folder contents "+str(listed_files), 'build') - - node_names = set([x.m_name for x in i_parent_node.childs.values() if x.id & 3 == Node.FILE]) - cache = self.m_tstamp_variants[0] - - # nodes to keep - to_keep = listed_files & node_names - for x in to_keep: - node = i_parent_node.childs[x] - try: - # do not call node.abspath here - cache[node.id] = Params.h_file(i_path + os.sep + node.m_name) - except IOError: - fatal("a file is readonly or has become a dir "+node.abspath()) - - # remove both nodes and signatures - to_remove = node_names - listed_files - if to_remove: - # infrequent scenario - cache = self.m_tstamp_variants[0] - for name in to_remove: - nd = i_parent_node.childs[name] - if nd.id in cache: - cache.__delitem__(nd.id) - i_parent_node.childs.__delitem__(name) - - def scan_path(self, i_parent_node, i_path, i_variant): - """in this function we do not add timestamps but we remove them - when the files no longer exist (file removed in the build dir)""" - - i_existing_nodes = [x for x in i_parent_node.childs.values() if x.id & 3 == Node.BUILD] - - listed_files = set(Utils.listdir(i_path)) - node_names = set([x.m_name for x in i_existing_nodes]) - remove_names = node_names - listed_files - - # remove the stamps of the build nodes that no longer exist on the filesystem - ids_to_remove = [x.id for x in i_existing_nodes if x.m_name in remove_names] - cache = self.m_tstamp_variants[i_variant] - for nid in ids_to_remove: - if nid in cache: - cache.__delitem__(nid) - - def dump(self): - "for debugging" - def recu(node, count): - accu = count * '-' - accu += "> %s (d) %d \n" % (node.m_name, node.id) - - for child in node.childs.values(): - tp = child.get_type() - if tp == Node.FILE: - accu += count * '-' - accu += '-> '+child.m_name+' ' - - for variant in self.m_tstamp_variants: - var = self.m_tstamp_variants[variant] - if child.id in var: - accu += ' [%s,%s] ' % (str(variant), Params.view_sig(var[child.id])) - accu += str(child.id) - - accu+='\n' - elif tp == Node.BUILD: - accu+= count * '-' - accu+= '-> '+child.m_name+' (b) ' - - for variant in self.m_tstamp_variants: - var = self.m_tstamp_variants[variant] - if child.id in var: - accu+=' [%s,%s] ' % (str(variant), Params.view_sig(var[child.id])) - accu += str(child.id) - - accu+='\n' - elif tp == Node.DIR: - accu += recu(child, count+1) - return accu - - Params.pprint('CYAN', recu(self.m_root, 0) ) - Params.pprint('CYAN', 'size is '+str(self.m_root.size_subtree())) - - def env_of_name(self, name): - if not name: - error('env_of_name called with no name!') - return None - try: - return self.m_allenvs[name] - except KeyError: - error('no such environment: '+name) - return None - - def env(self, name='default'): - return self.env_of_name(name) - - def add_group(self, name=''): - TaskGen.flush(all=0) - self.task_manager.add_group(name) - - def add_manual_dependency(self, path, value): - h = getattr(self, 'deps_man', {}) - if os.path.isabs(path): - node = self.m_root.find_resource(path) - else: - node = self.path.find_resource(path) - h[node] = value - self.deps_man = h - - def set_sig_cache(self, key, val): - self.bld_sigs[key] = val - - def get_sig_cache(self, key): - try: - return self.bld_sigs[key] - except KeyError: - s = SIG_NIL - return (s, s, s, s, s) - - def launch_node(self): - try: - return self._launch_node - except AttributeError: - self._launch_node = self.m_root.find_dir(Params.g_cwd_launch) - return self._launch_node - - def glob(self, pattern, relative=True): - "files matching the pattern, seen from the current folder" - path = Params.g_build.path.abspath() - files = [self.m_root.find_resource(x) for x in glob.glob(path+os.sep+pattern)] - if relative: - files = [x.relpath(self.path) for x in files if x] - else: - files = [x.abspath() for x in files if x] - return files - - # backward compatibility - def get_curdir(self): - return self.path - def set_curdir(self, val): - Params.fatal("the current path cannot be set") - m_curdirnode = property(get_curdir, set_curdir) - - - diff -Nru a2jmidid-8~dfsg0/wafadmin/Common.py a2jmidid-9/wafadmin/Common.py --- a2jmidid-8~dfsg0/wafadmin/Common.py 2008-07-21 22:39:47.000000000 +0000 +++ a2jmidid-9/wafadmin/Common.py 1970-01-01 00:00:00.000000000 +0000 @@ -1,197 +0,0 @@ -#! /usr/bin/env python -# encoding: utf-8 - -#! /usr/bin/env python -# encoding: utf-8 -# Thomas Nagy, 2005-2008 (ita) - -""" -Important functions: install_files, install_as, symlink_as (destdir is taken into account) -if the variable is not set (eval to false), installation is cancelled -if the variable is set but it does not exist, it assumes an absolute path was given -""" - -import os, types, shutil, glob -import Params, Utils -from Params import error, fatal -from Constants import * - -class InstallError(Exception): - pass - -def check_dir(dir): - #print "check dir ", dir - try: - os.stat(dir) - except OSError: - try: - os.makedirs(dir) - except OSError: - fatal("Cannot create folder " + dir) - -def do_install(src, tgt, chmod=0644): - """returns true if the file was effectively installed or uninstalled, false otherwise""" - if Params.g_commands['install']: - # check if the file is already there to avoid a copy - _do_install = True - if not Params.g_options.force: - try: - t1 = os.stat(tgt).st_mtime - t2 = os.stat(src).st_mtime - if t1 >= t2: _do_install = False - except OSError: - _do_install = True - - if _do_install: - srclbl = src - try: - srclbl = src.replace(Params.g_build.m_bldnode.abspath(None)+os.sep, '') - srclbl = src.replace(Params.g_build.m_srcnode.abspath(None)+os.sep, '') - except OSError: - pass - print "* installing %s as %s" % (srclbl, tgt) - - # followig is for shared libs and stale inodes - try: os.remove(tgt) - except OSError: pass - try: - shutil.copy2(src, tgt) - os.chmod(tgt, chmod) - except IOError: - try: - os.stat(src) - except IOError: - error('file %s does not exist' % str(src)) - fatal('Could not install the file %s' % str(tgt)) - return _do_install - elif Params.g_commands['uninstall']: - print "* uninstalling %s" % tgt - - Params.g_build.m_uninstall.append(tgt) - - try: os.remove(tgt) - except OSError: pass - return True - -def path_install(var, subdir, env=None): - bld = Params.g_build - if not env: env = Params.g_build.env() - destpath = env[var] - if not destpath: - error("Installing: to set a destination folder use env['%s']" % (var)) - destpath = var - destdir = env.get_destdir() - if destdir: destpath = os.path.join(destdir, destpath.lstrip(os.sep)) - if subdir: destpath = os.path.join(destpath, subdir.lstrip(os.sep)) - - return destpath - -def install_files(var, subdir, files, env=None, chmod=0644): - if not Params.g_install: return [] - if not var: return [] - - bld = Params.g_build - - if not env: env = bld.env() - destpath = env[var] - if not destpath: destpath = var # absolute paths - - node = bld.path - if type(files) is types.StringType: - if '*' in files: - gl = node.abspath()+os.sep+files - lst = glob.glob(gl) - else: - lst = files.split() - else: lst = files - - destdir = env.get_destdir() - if destdir: destpath = os.path.join(destdir, destpath.lstrip(os.sep)) - if subdir: destpath = os.path.join(destpath, subdir.lstrip(os.sep)) - - check_dir(destpath) - - # copy the files to the final destination - installed_files = [] - for filename in lst: - if not os.path.isabs(filename): - alst = Utils.split_path(filename) - filenode = node.find_resource_lst(alst) - if filenode is None: - Params.fatal("Unable to install the file `%s': not found in %s" % (filename, node)) - - file = filenode.abspath(env) - destfile = os.path.join(destpath, filenode.m_name) - else: - file = filename - alst = Utils.split_path(filename) - destfile = os.path.join(destpath, alst[-1]) - - if do_install(file, destfile, chmod=chmod): - installed_files.append(destfile) - return installed_files - -def install_as(var, destfile, srcfile, env=None, chmod=0644): - """returns True if the file was effectively installed, False otherwise""" - if not Params.g_install: return False - if not var: return False - - bld = Params.g_build - if not env: env = Params.g_build.env() - node = bld.path - - tgt = env[var] - if not tgt: tgt = var # absolute paths for example - - destdir = env.get_destdir() - if destdir: tgt = os.path.join(destdir, tgt.lstrip(os.sep)) - tgt = os.path.join(tgt, destfile.lstrip(os.sep)) - - dir, name = os.path.split(tgt) - check_dir(dir) - - # the source path - if not os.path.isabs(srcfile): - alst = Utils.split_path(srcfile) - filenode = node.find_resource_lst(alst) - src = filenode.abspath(env) - else: - src = srcfile - - return do_install(src, tgt, chmod=chmod) - -def symlink_as(var, src, dest, env=None): - if not Params.g_install: return - if not var: return - - bld = Params.g_build - if not env: env=Params.g_build.env() - node = bld.path - - tgt = env[var] - if not tgt: tgt = var - - destdir = env.get_destdir() - if destdir: tgt = os.path.join(destdir, tgt.lstrip(os.sep)) - tgt = os.path.join(tgt, dest.lstrip(os.sep)) - - dir, name = os.path.split(tgt) - check_dir(dir) - - if Params.g_commands['install']: - try: - if not os.path.islink(tgt) or os.readlink(tgt) != src: - print "* symlink %s (-> %s)" % (tgt, src) - os.symlink(src, tgt) - return 0 - except OSError: - return 1 - elif Params.g_commands['uninstall']: - try: - print "* removing %s" % (tgt) - os.remove(tgt) - return 0 - except OSError: - return 1 - - diff -Nru a2jmidid-8~dfsg0/wafadmin/Configure.py a2jmidid-9/wafadmin/Configure.py --- a2jmidid-8~dfsg0/wafadmin/Configure.py 2008-07-21 22:39:47.000000000 +0000 +++ a2jmidid-9/wafadmin/Configure.py 1970-01-01 00:00:00.000000000 +0000 @@ -1,373 +0,0 @@ -#! /usr/bin/env python -# encoding: utf-8 - -#! /usr/bin/env python -# encoding: utf-8 -# Thomas Nagy, 2005-2008 (ita) - -""" -Configuration system - -A configuration instance is created when "waf configure" is called, it is used to: -* create data dictionaries (Environment instances) -* store the list of modules to import - -The old model (copied from Scons) was to store logic (mapping file extensions to functions) -along with the data. In Waf a way was found to separate that logic by adding an indirection -layer (storing the names in the Environment instances) - -In the new model, the logic is more object-oriented, and the user scripts provide the -logic. The data files (Environments) must contain configuration data only (flags, ..). - -Note: the c/c++ related code is in the module config_c -""" - -import os, types, imp, cPickle, sys, shlex, warnings -from Utils import md5 -import Action, Params, Environment, Runner, Build, Utils -from Params import fatal, warning -from Constants import * - -TEST_OK = True - -class ConfigurationError(Exception): - pass - -g_maxlen = 40 -"""initial length of configuration messages""" - -g_debug = 0 -"""enable/disable debug""" - -g_stdincpath = ['/usr/include/', '/usr/local/include/'] -"""standard include paths""" - -g_stdlibpath = ['/usr/lib/', '/usr/local/lib/', '/lib'] -"""standard library search paths""" - - -##################### -## Helper functions - -def find_file(filename, path_list): - """find a file in a list of paths - @param filename: name of the file to search for - @param path_list: list of directories to search - @return: the first occurrence filename or '' if filename could not be found -""" - if type(path_list) is types.StringType: - lst = path_list.split() - else: - lst = path_list - for directory in lst: - if os.path.exists( os.path.join(directory, filename) ): - return directory - return '' - -def find_file_ext(filename, path_list): - """find a file in a list of paths using fnmatch - @param filename: name of the file to search for - @param path_list: list of directories to search - @return: the first occurrence filename or '' if filename could not be found -""" - import fnmatch - if type(path_list) is types.StringType: - lst = path_list.split() - else: - lst = path_list - for directory in lst: - for path, subdirs, files in os.walk(directory): - for name in files: - if fnmatch.fnmatch(name, filename): - return path - return '' - -def find_program_impl(env, filename, path_list=[], var=None): - """find a program in folders path_lst, and sets env[var] - @param env: environment - @param filename: name of the program to search for - @param path_list: list of directories to search for filename - @param var: environment value to be checked for in env or os.environ - @return: either the value that is referenced with [var] in env or os.environ - or the first occurrence filename or '' if filename could not be found -""" - try: path_list = path_list.split() - except AttributeError: pass - - if var: - if var in os.environ: env[var] = os.environ[var] - if env[var]: return env[var] - - if not path_list: path_list = os.environ['PATH'].split(os.pathsep) - - if Params.g_platform=='win32': - # TODO isnt fnmatch for this? - for y in [filename+x for x in '.exe,.com,.bat,.cmd'.split(',')]: - for directory in path_list: - x = os.path.join(directory, y) - if os.path.isfile(x): - if var: env[var] = x - return x - else: - for directory in path_list: - x = os.path.join(directory, filename) - if os.access(x, os.X_OK) and os.path.isfile(x): - if var: env[var] = x - return x - return '' - -class Configure(object): - log_file = 'config.log' - tests = {} - error_handlers = [] - def __init__(self, env=None, blddir='', srcdir=''): - - self.env = None - self.m_envname = '' - - self.m_blddir = blddir - self.m_srcdir = srcdir - - self.m_allenvs = {} - self.defines = {} - self.configheader = 'config.h' - self.cwd = os.getcwd() - - self.tools = [] # tools loaded in the configuration, and that will be loaded when building - - self.setenv('default') - - self.m_cache_table = {} - - self.lastprog = '' - - # load the cache - if Params.g_cache_global and not Params.g_options.nocache: - fic = os.path.join(Params.g_cache_global, Params.g_conf_name) - try: - file = open(fic, 'rb') - except (OSError, IOError): - pass - else: - try: - self.m_cache_table = cPickle.load(file) - finally: - file.close() - - self._quiet = 0 - - self.hash = 0 - self.files = [] - - def errormsg(self, msg): - Params.niceprint(msg, 'ERROR', 'Configuration') - - def fatal(self, msg): - raise ConfigurationError(msg) - - def check_tool(self, input, tooldir=None, funs=None): - "load a waf tool" - tools = Utils.to_list(input) - if tooldir: tooldir = Utils.to_list(tooldir) - for tool in tools: - try: - file,name,desc = imp.find_module(tool, tooldir) - except ImportError, ex: - raise ConfigurationError("no tool named '%s' found (%s)" % (tool, str(ex))) - module = imp.load_module(tool, file, name, desc) - - func = getattr(module, 'detect', None) - if func: - if type(func) is types.FunctionType: func(self) - else: self.eval_rules(funs or func) - - self.tools.append({'tool':tool, 'tooldir':tooldir, 'funs':funs}) - - def sub_config(self, dir): - "executes the configure function of a wscript module" - - current = self.cwd - - self.cwd = os.path.join(self.cwd, dir) - cur = os.path.join(self.cwd, WSCRIPT_FILE) - - try: - mod = Utils.load_module(cur) - except IOError: - fatal("the wscript file %s was not found." % cur) - - if not hasattr(mod, 'configure'): - fatal('the module %s has no configure function; make sure such a function is defined' % cur) - - ret = mod.configure(self) - if Params.g_autoconfig: - self.hash = Params.hash_function_with_globals(self.hash, mod.configure) - self.files.append(os.path.abspath(cur)) - self.cwd = current - return ret - - def store(self, file=''): - "save the config results into the cache file" - if not os.path.isdir(Params.g_cachedir): - os.makedirs(Params.g_cachedir) - - file = open(os.path.join(Params.g_cachedir, 'build.config.py'), 'w') - file.write('version = 0x%x\n' % HEXVERSION) - file.write('tools = %r\n' % self.tools) - file.close() - - if not self.m_allenvs: - fatal("nothing to store in Configure !") - for key in self.m_allenvs: - tmpenv = self.m_allenvs[key] - tmpenv.store(os.path.join(Params.g_cachedir, key+CACHE_SUFFIX)) - - def cleanup(self): - "when there is a cache directory store the config results (shutdown)" - if not Params.g_cache_global: return - - # not during the build - if not os.path.isdir(Params.g_cache_global): - os.makedirs(Params.g_cache_global) - - fic = os.path.join(Params.g_cache_global, Params.g_conf_name) - file = open(fic, 'wb') - try: - cPickle.dump(self.m_cache_table, file) - finally: - file.close() - - def set_env_name(self, name, env): - "add a new environment called name" - self.m_allenvs[name] = env - return env - - def retrieve(self, name, fromenv=None): - "retrieve an environment called name" - try: - env = self.m_allenvs[name] - except KeyError: - env = Environment.Environment() - self.m_allenvs[name] = env - else: - if fromenv: warning("The environment %s may have been configured already" % name) - return env - - def setenv(self, name): - "enable the environment called name" - self.env = self.retrieve(name) - self.envname = name - - def add_os_flags(self, var, dest=None): - if not dest: dest = var - # do not use 'get' to make certain the variable is not defined - try: self.env[dest] = os.environ[var] - except KeyError: pass - - def check_message(self, type, msg, state, option=''): - "print an checking message. This function is used by other checking functions" - sr = 'Checking for %s %s' % (type, msg) - global g_maxlen - g_maxlen = max(g_maxlen, len(sr)) - print "%s :" % sr.ljust(g_maxlen), - - p = Params.pprint - if state: p('GREEN', 'ok ' + option) - else: p('YELLOW', 'not found') - Runner.print_log(sr, '\n\n') - - def check_message_custom(self, type, msg, custom, option='', color='CYAN'): - """print an checking message. This function is used by other checking functions""" - sr = 'Checking for ' + type + ' ' + msg - global g_maxlen - g_maxlen = max(g_maxlen, len(sr)) - print "%s :" % sr.ljust(g_maxlen), - Params.pprint(color, custom) - Runner.print_log(sr, '\n\n') - - def hook(self, func): - "attach the function given as input as new method" - setattr(self.__class__, func.__name__, func) - - def mute_logging(self): - "mutes the output temporarily" - self._quiet = Runner.g_quiet - Runner.g_quiet = 1 - if not Runner.log_file: - Runner.log_file = open(os.path.join(self.m_blddir, Configure.log_file), 'a') - - def restore_logging(self): - "see mute_logging" - Runner.g_quiet = self._quiet - if Runner.log_file: - Runner.log_file.close() - Runner.log_file = None - - def find_program(self, program_name, path_list=[], var=None): - "wrapper provided for convenience" - ret = find_program_impl(self.env, program_name, path_list, var) - self.check_message('program', program_name, ret, ret) - return ret - - def check_pkg(self, modname, destvar='', vnum='', pkgpath='', pkgbin='', - pkgvars=[], pkgdefs={}, mandatory=False): - "wrapper provided for convenience" - pkgconf = self.create_pkgconfig_configurator() - - if not destvar: destvar = modname.upper() - - pkgconf.uselib = destvar - pkgconf.name = modname - pkgconf.version = vnum - if pkgpath: pkgconf.pkgpath = pkgpath - pkgconf.binary = pkgbin - pkgconf.variables = pkgvars - pkgconf.defines = pkgdefs - pkgconf.mandatory = mandatory - return pkgconf.run() - - def pkgconfig_fetch_variable(self,pkgname,variable,pkgpath='',pkgbin='',pkgversion=0,env=None): - if not env: env=self.env - - if not pkgbin: pkgbin='pkg-config' - if pkgpath: pkgpath='PKG_CONFIG_PATH=$PKG_CONFIG_PATH:'+pkgpath - pkgcom = '%s %s' % (pkgpath, pkgbin) - if pkgversion: - ret = os.popen("%s --atleast-version=%s %s" % (pkgcom, pkgversion, pkgname)).close() - self.conf.check_message('package %s >= %s' % (pkgname, pkgversion), '', not ret) - if ret: - return '' # error - else: - ret = os.popen("%s %s" % (pkgcom, pkgname)).close() - self.check_message('package %s ' % (pkgname), '', not ret) - if ret: - return '' # error - - return os.popen('%s --variable=%s %s' % (pkgcom, variable, pkgname)).read().strip() - - def eval_rules(self, rules): - self.rules = Utils.to_list(rules) - for x in self.rules: - f = getattr(self, x) - try: - # TODO check pre/post conditions - f() - except Exception, e: - raise - if err_handler(x, e) == STOP: - break - else: - raise - -def conf(f): - "decorator: attach new configuration functions" - setattr(Configure, f.__name__, f) - return f - -def conftest(f): - "decorator: attach new configuration tests (registered as strings)" - setattr(Configure, f.__name__, f) - Configure.tests[f.__name__] = f - return f - - diff -Nru a2jmidid-8~dfsg0/wafadmin/Constants.py a2jmidid-9/wafadmin/Constants.py --- a2jmidid-8~dfsg0/wafadmin/Constants.py 2008-07-21 22:39:47.000000000 +0000 +++ a2jmidid-9/wafadmin/Constants.py 1970-01-01 00:00:00.000000000 +0000 @@ -1,51 +0,0 @@ -#! /usr/bin/env python -# encoding: utf-8 - -#! /usr/bin/env python -# encoding: utf-8 -# Yinon dot me gmail 2008 - -""" -these constants are somewhat public, try not to mess them - -maintainer: the version number is updated from the top-level wscript file -""" - -HEXVERSION = 0x10403 -ABI = 6 - -CACHE_DIR = 'c4che' -CACHE_SUFFIX = '.cache.py' -DBFILE = '.wafpickle-%d' % ABI -WSCRIPT_FILE = 'wscript' -WSCRIPT_BUILD_FILE = 'wscript_build' -COMMON_INCLUDES = 'COMMON_INCLUDES' - -SIG_NIL = 'iluvcuteoverload' - -VARIANT = '_VARIANT_' -DEFAULT = 'default' - -SRCDIR = 'srcdir' -BLDDIR = 'blddir' -APPNAME = 'APPNAME' -VERSION = 'VERSION' - -DEFINES = 'defines' -UNDEFINED = '#undefined#variable#for#defines#' - -STOP = "stop" -CONTINUE = "continue" - -# task scheduler options -JOBCONTROL = "JOBCONTROL" -MAXPARALLEL = "MAXPARALLEL" -NORMAL = "NORMAL" - -# task state -MISSING = 1 -CRASHED = 2 -SKIPPED = 8 -SUCCESS = 9 - - diff -Nru a2jmidid-8~dfsg0/wafadmin/DirWatch.py a2jmidid-9/wafadmin/DirWatch.py --- a2jmidid-8~dfsg0/wafadmin/DirWatch.py 2008-07-21 22:39:47.000000000 +0000 +++ a2jmidid-9/wafadmin/DirWatch.py 1970-01-01 00:00:00.000000000 +0000 @@ -1,190 +0,0 @@ -#! /usr/bin/env python -# encoding: utf-8 - -#! /usr/bin/env python -# encoding: utf-8 -# Matthias Jahn , 2006 - -"DirWatch chooses a supported backend (fam, gamin or fallback) it is mainly a wrapper script without own methods beside this" - -from Params import debug -import GaminAdaptor, FamAdaptor, FallbackAdaptor -import os - -class WatchObject: - def __init__(self, idxName, namePath, isDir, callBackThis, handleEvents): - """watch object to handle a watch - @param idxName: unique name for ref - @param dirList: path to watch - @param isDir: directory True or False - @param callBackThis: is called if something in dirs in dirlist has events (handleEvents) callBackThis(idxName, changedFilePath) - @param handleEvents: events to handle possible are 'changed', 'deleted', 'created', 'exist' suspendDirWatch after a handled change - """ - self.__adaptor = None - self.__fr = None - self.__idxName = idxName - self.__name = namePath - self.__isDir = isDir - self.__callBackThis = callBackThis - self.__handleEvents = handleEvents - - def __del__(self): - self.unwatch() - - def watch(self, adaptor): - """start watching - @param adaptor: dirwatch adaptor for backend - """ - self.__adaptor = adaptor - if self.__fr != None: - self.unwatch() - if self.__isDir: - self.__fr = self.__adaptor.watch_directory(self.__name, self.__idxName) - else: - self.__fr = self.__adaptor.watch_file(self.__name, self.__idxName) - - def unwatch(self): - """stop watching""" - if self.__fr: - self.__fr = self.__adaptor.stop_watch(self.__name) - - def get_events(self): - """returns all events to care""" - return self.__handleEvents - - def get_callback(self): - """returns the callback methode""" - return self.__callBackThis - - def get_fullpath(self, fileName): - """returns the full path dir + filename""" - return os.path.join(self.__name, fileName) - - def __str__(self): - if self.__isDir: - return 'DIR %s: ' % self.__name - else: - return 'FILE %s: ' % self.__name - -class DirectoryWatcher: - """DirWatch chooses a supported backend (fam, gamin or fallback) - it is mainly a wrapper script without own methods beside this - """ - def __init__(self): - self.__adaptor = None - self.__watcher = {} - self.__loops = True - self.connect() - - def __del__ (self): - self.disconnect() - - def __raise_disconnected(self): - raise "Already disconnected" - - def disconnect(self): - if self.__adaptor: - self.suspend_all_watch() - self.__adaptor = None - - def connect(self): - if self.__adaptor: - self.disconnect() - if FamAdaptor.support: - debug("using FamAdaptor") - self.__adaptor = FamAdaptor.FamAdaptor(self.__processDirEvents) - if self.__adaptor == None: - raise "something is strange" - elif GaminAdaptor.support: - debug("using GaminAdaptor") - self.__adaptor = GaminAdaptor.GaminAdaptor(self.__processDirEvents) - else: - debug("using FallbackAdaptor") - self.__adaptor = FallbackAdaptor.FallbackAdaptor(self.__processDirEvents) - - def add_watch(self, idxName, callBackThis, dirList, handleEvents = ['changed', 'deleted', 'created']): - """add dirList to watch. - @param idxName: unique name for ref - @param callBackThis: is called if something in dirs in dirlist has events (handleEvents) callBackThis(idxName, changedFilePath) - @param dirList: list of dirs to watch - @param handleEvents: events to handle possible are 'changed', 'deleted', 'created', 'exist' suspendDirWatch after a handled change - """ - self.remove_watch(idxName) - self.__watcher[idxName] = [] - for directory in dirList: - watchObject = WatchObject(idxName, os.path.abspath(directory), 1, callBackThis, handleEvents) - self.__watcher[idxName].append(watchObject) - self.resume_watch(idxName) - - def remove_watch(self, idxName): - """remove DirWatch with name idxName""" - if self.__watcher.has_key(idxName): - self.suspend_watch(idxName) - del self.__watcher[idxName] - - def remove_all_watch(self): - """remove all DirWatcher""" - self.__watcher = {} - - def suspend_watch(self, idxName): - """suspend DirWatch with name idxName. No dir/filechanges will be reacted until resume""" - if self.__watcher.has_key(idxName): - for watchObject in self.__watcher[idxName]: - watchObject.unwatch() - - def suspend_all_watch(self): - """suspend all DirWatcher ... they could be resumed with resume_all_watch""" - for idxName in self.__watcher.keys(): - self.suspend_watch(idxName) - - def resume_watch(self, idxName): - """resume a DirWatch that was supended with suspendDirWatch or suspendAllDirWatch""" - for watchObject in self.__watcher[idxName]: - watchObject.watch(self.__adaptor) - - def resume_all_watch(self): - """ resume all DirWatcher""" - for idxName in self.__watcher.keys(): - self.resume_watch(idxName) - - def __processDirEvents(self, pathName, event, idxName): - if event in self.__watcher[idxName][0].get_events(): - #self.disconnect() - self.suspend_watch(idxName) - __watcher = self.__watcher[idxName][0] - __watcher.get_callback()(idxName, __watcher.get_fullpath(pathName), event) - #self.connect() - self.resume_watch(idxName) - - def request_end_loop(self): - """sets a flag that stops the loop. it do not stop the loop directly!""" - self.__loops = False - - def loop(self): - """wait for dir events and start handling of them""" - try: - self.__loops = True - while self.__loops and self.__adaptor != None: - self.__adaptor.wait_for_event() - while self.__adaptor.event_pending(): - self.__adaptor.handle_events() - if not self.__loops: - break - except KeyboardInterrupt: - self.request_end_loop() - -if __name__ == "__main__": - class Test: - def __init__(self): - self.fam_test = DirectoryWatcher() - self.fam_test.add_watch("tmp Test", self.thisIsCalledBack, ["/tmp"]) - self.fam_test.loop() -# self.fam_test.loop() - - def thisIsCalledBack(self, idxName, pathName, event): - print "idxName=%s, Path=%s, Event=%s " % (idxName, pathName, event) - self.fam_test.resume_watch(idxName) - - Test() - - diff -Nru a2jmidid-8~dfsg0/wafadmin/Environment.py a2jmidid-9/wafadmin/Environment.py --- a2jmidid-8~dfsg0/wafadmin/Environment.py 2008-07-21 22:39:47.000000000 +0000 +++ a2jmidid-9/wafadmin/Environment.py 1970-01-01 00:00:00.000000000 +0000 @@ -1,190 +0,0 @@ -#! /usr/bin/env python -# encoding: utf-8 - -#! /usr/bin/env python -# encoding: utf-8 -# Thomas Nagy, 2005 (ita) - -"""Environment representation - -There is one gotcha: getitem returns [] if the contents evals to False -This means env['foo'] = {}; print env['foo'] will print [] not {} -""" - -import os,types, copy, re -import Params -from Params import debug, warning -from Constants import * -re_imp = re.compile('^(#)*?([^#=]*?)\ =\ (.*?)$', re.M) - -g_cache_max = {} - -g_idx = 0 -class Environment(object): - """A safe-to-use dictionary, but do not attach functions to it please (break cPickle) - An environment instance can be stored into a file and loaded easily - """ - __slots__ = ("m_idx", "m_table", "m_parent") - def __init__(self): - global g_idx - self.m_idx = g_idx - g_idx += 1 - self.m_table={} - #self.m_parent = None <- set only if necessary - - if Params.g_commands['configure']: - # set the prefix once and for everybody on creation (configuration) - self.m_table['PREFIX'] = Params.g_options.prefix - - def __contains__(self, key): - if key in self.m_table: return True - try: return self.m_parent.__contains__(key) - except AttributeError: return False # m_parent may not exist - - def set_variant(self, name): - self.m_table[VARIANT] = name - - def variant(self): - env = self - while 1: - try: - return env.m_table[VARIANT] - except KeyError: - try: env = env.m_parent - except AttributeError: return DEFAULT - - def copy(self): - newenv = Environment() - if Params.g_commands['configure']: - if self['PREFIX']: del newenv.m_table['PREFIX'] - newenv.m_parent = self - return newenv - - def __str__(self): - return "environment table\n"+str(self.m_table) - - def __getitem__(self, key): - x = self.m_table.get(key, None) - if not x is None: return x - try: - u = self.m_parent - except AttributeError: - return [] - else: - return u[key] - - def __setitem__(self, key, value): - self.m_table[key] = value - - def get_flat(self, key): - s = self[key] - if not s: return '' - elif isinstance(s, list): return ' '.join(s) - else: return s - - def _get_list_value_for_modification(self, key): - """Gets a value that must be a list for further modification. The - list may be modified inplace and there is no need to - "self.m_table[var] = value" afterwards. - """ - try: - value = self.m_table[key] - except KeyError: - try: value = self.m_parent[key] - except AttributeError: value = [] - if isinstance(value, list): - value = copy.copy(value) - else: - value = [value] - self.m_table[key] = value - return value - else: - if not isinstance(value, list): - value = [value] - self.m_table[key] = value - return value - - def append_value(self, var, value): - current_value = self._get_list_value_for_modification(var) - - if isinstance(value, list): - current_value.extend(value) - else: - current_value.append(value) - - def prepend_value(self, var, value): - current_value = self._get_list_value_for_modification(var) - - if isinstance(value, list): - current_value = value + current_value - # a new list: update the dictionary entry - self.m_table[var] = current_value - else: - current_value.insert(0, value) - - # prepend unique would be ambiguous - def append_unique(self, var, value): - current_value = self._get_list_value_for_modification(var) - - if isinstance(value, list): - for value_item in value: - if value_item not in current_value: - current_value.append(value_item) - else: - if value not in current_value: - current_value.append(value) - - def store(self, filename): - "Write the variables into a file" - file = open(filename, 'w') - - # compute a merged table - table_list = [] - env = self - while 1: - table_list.insert(0, env.m_table) - try: env = env.m_parent - except AttributeError: break - merged_table = dict() - for table in table_list: - merged_table.update(table) - - keys = merged_table.keys() - keys.sort() - for k in keys: file.write('%s = %r\n' % (k, merged_table[k])) - file.close() - - def load(self, filename): - "Retrieve the variables from a file" - tbl = self.m_table - file = open(filename, 'r') - code = file.read() - file.close() - for m in re_imp.finditer(code): - g = m.group - tbl[g(2)] = eval(g(3)) - debug(self.m_table, 'env') - - def get_destdir(self): - "return the destdir, useful for installing" - if self.__getitem__('NOINSTALL'): return '' - return Params.g_options.destdir - - def sign_vars(self, vars_list): - " ['CXX', ..] -> [env['CXX'], ..]" - - # ccroot objects use the same environment for building the .o at once - # the same environment and the same variables are used - s = str([self.m_idx]+vars_list) - try: return g_cache_max[s] - except KeyError: pass - - lst = [self.get_flat(a) for a in vars_list] - ret = Params.h_list(lst) - if Params.g_zones: debug("%s %s" % (Params.view_sig(ret), str(lst)), 'envhash') - - # next time - g_cache_max[s] = ret - return ret - - diff -Nru a2jmidid-8~dfsg0/wafadmin/FallbackAdaptor.py a2jmidid-9/wafadmin/FallbackAdaptor.py --- a2jmidid-8~dfsg0/wafadmin/FallbackAdaptor.py 2008-07-21 22:39:47.000000000 +0000 +++ a2jmidid-9/wafadmin/FallbackAdaptor.py 1970-01-01 00:00:00.000000000 +0000 @@ -1,152 +0,0 @@ -#! /usr/bin/env python -# encoding: utf-8 - -#! /usr/bin/env python -# encoding: utf-8 -# Matthias Jahn 2006 - -""" -Fallback WatchMonitor should work anywhere ..;-) -this do not depends on gamin or fam instead it polls for changes -it works at least under linux ... windows or other *nix are untested -""" - -import os, time - -support = True - -class Fallback: - class Helper: - def __init__(self, callBack, userdata): - self.currentFiles = {} - self.oldFiles = {} - self.__firstRun = True - self.callBack = callBack - self.userdata = userdata - - def isFirstRun(self): - if self.__firstRun: - self.__firstRun = False - return True - else: - return False - - def __init__(self): - self.__dirs = {} - #event lists for changed and deleted - self.__changeLog = {} - - def __traversal(self, dirName): - """Traversal function for directories -Basic principle: all_files is a dictionary mapping paths to -modification times. We repeatedly crawl through the directory -tree rooted at 'path', doing a stat() on each file and comparing -the modification time. -""" - files = os.listdir(dirName) - firstRun = self.__dirs[dirName].isFirstRun() - - for filename in files: - path = os.path.join(dirName, filename) - try: - fileStat = os.stat(path) - except os.error: - # If a file has been deleted since the lsdir - # scanning the directory and now, we'll get an - # os.error here. Just ignore it -- we'll report - # the deletion on the next pass through the main loop. - continue - modifyTime = self.__dirs[dirName].oldFiles.get(path) - if modifyTime is not None: - # Record this file as having been seen - del self.__dirs[dirName].oldFiles[path] - # File's mtime has been changed since we last looked at it. - if fileStat.st_mtime > modifyTime: - self.__changeLog[path] = 'changed' - else: - if firstRun: - self.__changeLog[path] = 'exists' - else: - # No recorded modification time, so it must be - # a brand new file - self.__changeLog[path] = 'created' - # Record current mtime of file. - self.__dirs[dirName].currentFiles[path] = fileStat.st_mtime - - def watch_directory(self, namePath, callBack, idxName): - self.__dirs[namePath] = self.Helper(callBack, idxName) - return self - - def unwatch_directory(self, namePath): - if self.__dirs.get(namePath): - del self.__dirs[namePath] - - def event_pending(self): - for dirName in self.__dirs.keys(): - self.__dirs[dirName].oldFiles = self.__dirs[dirName].currentFiles.copy() - self.__dirs[dirName].currentFiles = {} - self.__traversal(dirName) - for deletedFile in self.__dirs[dirName].oldFiles.keys(): - self.__changeLog[deletedFile] = 'deleted' - del self.__dirs[dirName].oldFiles[deletedFile] - return len(self.__changeLog) - - def handle_events(self): - pathName = self.__changeLog.keys()[0] - event = self.__changeLog[pathName] - dirName = os.path.dirname(pathName) - self.__dirs[dirName].callBack(pathName, event, self.__dirs[dirName].userdata) - del self.__changeLog[pathName] - -class FallbackAdaptor: - def __init__(self, eventHandler): - self.__fallback = Fallback() - self.__eventHandler = eventHandler # callBack function - self.__watchHandler = {} # {name : famId} - - def __del__(self): - if self.__fallback: - for handle in self.__watchHandler.keys(): - self.stop_watch(handle) - self.__fallback = None - - def __check_fallback(self): - if self.__fallback == None: - raise "fallback not init" - - def watch_directory(self, name, idxName): - self.__check_fallback() - if self.__watchHandler.has_key(name): - raise "dir already watched" - # set famId - self.__watchHandler[name] = self.__fallback.watch_directory(name, self.__eventHandler, idxName) - return(self.__watchHandler[name]) - - def watch_file(self, name, idxName): - self.__check_fallback() - if self.__watchHandler.has_key(name): - raise "file already watched" - # set famId - self.__watchHandler[name] = self.__fallback.watch_directory(name, self.__eventHandler, idxName) - return self.__watchHandler[name] - - def stop_watch(self, name): - self.__check_fallback() - if self.__watchHandler.has_key(name): - self.__fallback.unwatch_directory(name) - del self.__watchHandler[name] - return None - - def wait_for_event(self): - self.__check_fallback() - time.sleep(1) - - def event_pending(self): - self.__check_fallback() - return self.__fallback.event_pending() - - def handle_events(self): - self.__check_fallback() - self.__fallback.handle_events() - - diff -Nru a2jmidid-8~dfsg0/wafadmin/FamAdaptor.py a2jmidid-9/wafadmin/FamAdaptor.py --- a2jmidid-8~dfsg0/wafadmin/FamAdaptor.py 2008-07-21 22:39:47.000000000 +0000 +++ a2jmidid-9/wafadmin/FamAdaptor.py 1970-01-01 00:00:00.000000000 +0000 @@ -1,83 +0,0 @@ -#! /usr/bin/env python -# encoding: utf-8 - -#! /usr/bin/env python -# encoding: utf-8 -# Matthias Jahn 2006 - -"""Fam WatchMonitor depends on python-fam ... it works with fam or gamin demon""" - -import select, errno -try: - import _fam -except ImportError: - support = False -else: - # check if fam runs and accepts connections - test = _fam.open() - test.close() - test = None - support = True - -class FamAdaptor: - """fam helper class for use with DirWatcher""" - def __init__(self, eventHandler): - """ creates the fam adaptor class - @param eventHandler: callback method for event handling""" - self.__fam = _fam.open() - self.__eventHandler = eventHandler # callBack function - self.__watchHandler = {} # {name : famId} - - def __del__(self): - if self.__fam: - for handle in self.__watchHandler.keys(): - self.stop_watch(handle) - self.__fam.close() - - def __check_fam(self): - if self.__fam == None: - raise "fam not init" - - def watch_directory(self, name, idxName): - self.__check_fam() - if self.__watchHandler.has_key(name): - raise "dir already watched" - # set famId - self.__watchHandler[name] = self.__fam.monitorDirectory(name, idxName) - return(self.__watchHandler[name]) - - def watch_file(self, name, idxName): - self.__check_fam() - if self.__watchHandler.has_key(name): - raise "file already watched" - # set famId - self.__watchHandler[name] = self.__fam.monitorFile(name, idxName) - return(self.__watchHandler[name]) - - def stop_watch(self, name): - self.__check_fam() - if self.__watchHandler.has_key(name): - self.__watchHandler[name].cancelMonitor() - del self.__watchHandler[name] - return None - - def wait_for_event(self): - self.__check_fam() - try: - select.select([self.__fam], [], []) - except select.error, er: - errnumber, strerr = er - if errnumber != errno.EINTR: - raise strerr - - def event_pending(self): - self.__check_fam() - return self.__fam.pending() - - def handle_events(self): - self.__check_fam() - fe = self.__fam.nextEvent() - #pathName, event, idxName - self.__eventHandler(fe.filename, fe.code2str(), fe.userData) - - diff -Nru a2jmidid-8~dfsg0/wafadmin/GaminAdaptor.py a2jmidid-9/wafadmin/GaminAdaptor.py --- a2jmidid-8~dfsg0/wafadmin/GaminAdaptor.py 2008-07-21 22:39:47.000000000 +0000 +++ a2jmidid-9/wafadmin/GaminAdaptor.py 1970-01-01 00:00:00.000000000 +0000 @@ -1,107 +0,0 @@ -#! /usr/bin/env python -# encoding: utf-8 - -#! /usr/bin/env python -# encoding: utf-8 -# Oscar Blumberg 2006 (nael) -# Matthias Jahn - -"""Depends on python gamin and on gamin demon""" - -import select, errno -try: - import gamin -except ImportError: - support = False -else: - # check if gamin runs and accepts connections - test = gamin.WatchMonitor() - test.disconnect() - test = None - support = True - -class GaminAdaptor: - """gamin helper class for use with DirWatcher""" - def __init__(self, eventHandler): - """ creates the gamin wrapper - @param eventHandler: callback method for event handling""" - self.__gamin = gamin.WatchMonitor() - self.__eventHandler = eventHandler # callBack function - self.__watchHandler = {} # {name : famId} - - def __del__(self): - """clean remove""" - if self.__gamin: - for handle in self.__watchHandler.keys(): - self.stop_watch(handle) - self.__gamin.disconnect() - self.__gamin = None - - def __check_gamin(self): - """is gamin connected""" - if self.__gamin == None: - raise "gamin not init" - - def __code2str(self, event): - """convert event numbers to string""" - gaminCodes = { - 1:"changed", - 2:"deleted", - 3:"StartExecuting", - 4:"StopExecuting", - 5:"created", - 6:"moved", - 7:"acknowledge", - 8:"exists", - 9:"endExist" - } - try: - return gaminCodes[event] - except KeyError: - return "unknown" - - def __eventhandler_helper(self, pathName, event, idxName): - """local eventhandler helps to convert event numbers to string""" - self.__eventHandler(pathName, self.__code2str(event), idxName) - - def watch_directory(self, name, idxName): - self.__check_gamin() - if self.__watchHandler.has_key(name): - raise "dir already watched" - # set gaminId - self.__watchHandler[name] = self.__gamin.watch_directory(name, self.__eventhandler_helper, idxName) - return(self.__watchHandler[name]) - - def watch_file(self, name, idxName): - self.__check_gamin() - if self.__watchHandler.has_key(name): - raise "file already watched" - # set famId - self.__watchHandler[name] = self.__gamin.watch_directory(name, self.__eventhandler_helper, idxName) - return(self.__watchHandler[name]) - - def stop_watch(self, name): - self.__check_gamin() - if self.__watchHandler.has_key(name): - self.__gamin.stop_watch(name) - del self.__watchHandler[name] - return None - - def wait_for_event(self): - self.__check_gamin() - try: - select.select([self.__gamin.get_fd()], [], []) - except select.error, er: - errnumber, strerr = er - if errnumber != errno.EINTR: - raise strerr - - def event_pending(self): - self.__check_gamin() - return self.__gamin.event_pending() - - def handle_events(self): - self.__check_gamin() - self.__gamin.handle_events() - - diff -Nru a2jmidid-8~dfsg0/wafadmin/__init__.py a2jmidid-9/wafadmin/__init__.py --- a2jmidid-8~dfsg0/wafadmin/__init__.py 2008-07-21 22:39:47.000000000 +0000 +++ a2jmidid-9/wafadmin/__init__.py 1970-01-01 00:00:00.000000000 +0000 @@ -1,7 +0,0 @@ -#! /usr/bin/env python -# encoding: utf-8 - -#! /usr/bin/env python -# encoding: utf-8 -# Thomas Nagy, 2005 (ita) - diff -Nru a2jmidid-8~dfsg0/wafadmin/Node.py a2jmidid-9/wafadmin/Node.py --- a2jmidid-8~dfsg0/wafadmin/Node.py 2008-07-21 22:39:47.000000000 +0000 +++ a2jmidid-9/wafadmin/Node.py 1970-01-01 00:00:00.000000000 +0000 @@ -1,521 +0,0 @@ -#! /usr/bin/env python -# encoding: utf-8 - -#! /usr/bin/env python -# encoding: utf-8 -# Thomas Nagy, 2005 (ita) - -""" -Node: filesystem structure, contains lists of nodes - -Each file/folder is represented by exactly one node - -we do not want to add another type attribute (memory) -rather, we will use the id to find out: -type = id & 3 -setting: new type = type + x - type & 3 - -IMPORTANT: -Some would-be class properties are stored in Build: nodes to depend on, signature, flags, .. -In fact, unused class members increase the .wafpickle file size sensibly with lots of objects -eg: the m_tstamp is used for every node, while the signature is computed only for build files - -the build is launched from the top of the build dir (for example, in _build_/) -""" - -import os, sys -import Params, Utils -from Params import debug, error, fatal - -UNDEFINED = 0 -DIR = 1 -FILE = 2 -BUILD = 3 - -type_to_string = {UNDEFINED: "unk", DIR: "dir", FILE: "src", BUILD: "bld"} - -class Node(object): - __slots__ = ("m_name", "m_parent", "id", "childs") - def __init__(self, name, parent, node_type = UNDEFINED): - self.m_name = name - self.m_parent = parent - - # assumption: one build object at a time - Params.g_build.id_nodes += 4 - self.id = Params.g_build.id_nodes + node_type - - if node_type == DIR: self.childs = {} - - # The checks below could be disabled for speed, if necessary - # TODO check for . .. / \ in name - - # Node name must contain only one level - if Utils.split_path(name)[0] != name: - fatal('name forbidden '+name) - - if parent and name in parent.childs: - fatal('node %s exists in the parent files %s already' % (name, str(parent))) - - if parent: parent.childs[name] = self - - def __str__(self): - if not self.m_parent: return '' - return "%s://%s" % (type_to_string[self.id & 3], self.abspath()) - - def __repr__(self): - return self.__str__() - - def __hash__(self): - "expensive, make certain it is not used" - raise - - def get_type(self): - return self.id & 3 - - def set_type(self, t): - self.id = self.id + t - self.id & 3 - - def dirs(self): - return [x for x in self.childs.values() if x.id & 3 == DIR] - - def get_dir(self, name, default=None): - node = self.childs.get(name, None) - if not node or node.id & 3 != DIR: return default - return node - - def files(self): - return [x for x in self.childs.values() if x.id & 3 == FILE] - - def get_file(self, name, default=None): - node = self.childs.get(name, None) - if not node or node.id & 3 != FILE: return default - return node - - def get_build(self, name, default=None): - node = self.childs.get(name, None) - if not node or node.id & 3 != BUILD: return default - return node - - # ===== BEGIN find methods ===== # - - def find_resource(self, path): - lst = Utils.split_path(path) - return self.find_resource_lst(lst) - - def find_resource_lst(self, lst): - "Find an existing input file: either a build node declared previously or a source node" - if not lst[:-1]: - parent = self - else: - parent = self.find_dir_lst(lst[:-1]) - if not parent: return None - Params.g_build.rescan(parent) - - name = lst[-1] - node = parent.childs.get(name, None) - if node: - tp = node.id & 3 - if tp == FILE or tp == BUILD: - return node - - tree = Params.g_build - if not name in tree.cache_dir_contents[parent.id]: - return None - - path = parent.abspath() + os.sep + name - try: - st = Params.h_file(path) - except IOError: - return None - - child = Node(name, parent, FILE) - tree.m_tstamp_variants[0][child.id] = st - return child - - def find_or_declare(self, path): - lst = Utils.split_path(path) - return self.find_or_declare_lst(lst) - - def find_or_declare_lst(self, lst): - "Used for declaring a build node representing a file being built" - if not lst[:-1]: - parent = self - else: - parent = self.find_dir_lst(lst[:-1]) - if not parent: return None - Params.g_build.rescan(parent) - - name = lst[-1] - node = parent.childs.get(name, None) - if node: - tp = node.id & 3 - if tp != BUILD: - fatal("find_or_declare returns a build node, not a source nor a directory"+str(lst)) - return node - node = Node(name, parent, BUILD) - return node - - def find_dir(self, path): - lst = Utils.split_path(path) - return self.find_dir_lst(lst) - - def find_dir_lst(self, lst): - "search a folder in the filesystem" - current = self - for name in lst: - Params.g_build.rescan(current) - prev = current - - if not current.m_parent and name == current.m_name: - continue - elif not name: - continue - elif name == '.': - continue - elif name == '..': - current = current.m_parent or current - else: - current = prev.childs.get(name, None) - if current is None: - dir_cont = Params.g_build.cache_dir_contents - if prev.id in dir_cont and name in dir_cont[prev.id]: - current = Node(name, prev, DIR) - else: - return None - return current - - # compatibility - find_build = find_or_declare - find_build_lst = find_or_declare_lst - find_source = find_resource - find_source_lst = find_resource_lst - - def ensure_dir_node_from_path(self, path): - return self.ensure_dir_node_from_path_lst(Utils.split_path(path)) - - def ensure_dir_node_from_path_lst(self, plst): - "used very rarely, force the construction of a branch of node instance for representing folders" - current = self - for name in plst: - if not name: - continue - elif name == '.': - continue - elif name == '..': - current = current.m_parent or current - else: - prev = current - current = prev.childs.get(name, None) - if current is None: - current = Node(name, prev, DIR) - return current - - def exclusive_build_node(self, path): - """ - create a hierarchy in the build dir (no source folders) for ill-behaving compilers - the node is not hashed, so you must do it manually - - after declaring such a node, find_dir and find_resource should work as expected - """ - lst = Utils.split_path(path) - name = lst[-1] - if len(lst) > 1: - parent = None - try: - parent = self.find_dir_lst(lst[:-1]) - except OSError: - pass - if not parent: - # exclusive build directory -> mark the parent as rescanned - # for find_dir and find_resource to work - parent = self.ensure_dir_node_from_path_lst(lst[:-1]) - Params.g_build.m_scanned_folders[parent.id] = 1 - else: - parent = self - - node = parent.childs.get(name, None) - if not node: - node = Node(name, parent, BUILD) - - return node - - ## ===== END find methods ===== ## - - - ## ===== BEGIN relpath-related methods ===== ## - - # same as pathlist3, but do not append './' at the beginning - def pathlist4(self, node): - #print "pathlist4 called" - if self == node: return [] - if self.m_parent == node: return [self.m_name] - return [self.m_name, os.sep] + self.m_parent.pathlist4(node) - - def relpath(self, parent): - "path relative to a direct parent, as string" - lst = [] - p = self - h1 = parent.height() - h2 = p.height() - while h2 > h1: - h2 -= 1 - lst.append(p.m_name) - p = p.m_parent - if lst: - lst.reverse() - ret = os.path.join(*lst) - else: - ret = '' - return ret - - # find a common ancestor for two nodes - for the shortest path in hierarchy - def find_ancestor(self, node): - dist = self.height() - node.height() - if dist < 0: return node.find_ancestor(self) - # now the real code - cand = self - while dist > 0: - cand = cand.m_parent - dist -= 1 - if cand == node: return cand - cursor = node - while cand.m_parent: - cand = cand.m_parent - cursor = cursor.m_parent - if cand == cursor: return cand - - # prints the amount of "../" between two nodes - def invrelpath(self, parent): - lst = [] - cand = self - while not cand == parent: - cand = cand.m_parent - lst += ['..', os.sep] - return lst - - # TODO: do this in a single function (this one uses invrelpath, find_ancestor and pathlist4) - # string representing a relative path between two nodes, we are at relative_to - def relpath_gen(self, going_to): - if self == going_to: return '.' - if going_to.m_parent == self: return '..' - - # up_path is '../../../' and down_path is 'dir/subdir/subdir/file' - ancestor = self.find_ancestor(going_to) - up_path = going_to.invrelpath(ancestor) - down_path = self.pathlist4(ancestor) - down_path.reverse() - return "".join(up_path + down_path) - - def nice_path(self, env=None): - "printed in the console, open files easily from the launch directory" - tree = Params.g_build - ln = tree.launch_node() - name = self.m_name - x = self.m_parent.get_file(name) - if x: return self.relative_path(ln) - else: return os.path.join(tree.m_bldnode.relative_path(ln), env.variant(), self.relative_path(tree.m_srcnode)) - - def relative_path(self, folder): - "relative path between a node and a directory node" - hh1 = h1 = self.height() - hh2 = h2 = folder.height() - p1 = self - p2 = folder - while h1 > h2: - p1 = p1.m_parent - h1 -= 1 - while h2 > h1: - p2 = p2.m_parent - h2 -= 1 - - # now we have two nodes of the same height - ancestor = None - if p1.m_name == p2.m_name: - ancestor = p1 - while p1.m_parent: - p1 = p1.m_parent - p2 = p2.m_parent - if p1.m_name != p2.m_name: - ancestor = None - elif not ancestor: - ancestor = p1 - - anh = ancestor.height() - n1 = hh1-anh - n2 = hh2-anh - - lst = [] - tmp = self - while n1: - n1 -= 1 - lst.append(tmp.m_name) - tmp = tmp.m_parent - - lst.reverse() - up_path = os.sep.join(lst) - down_path = (".."+os.sep) * n2 - - return "".join(down_path + up_path) - - ## ===== END relpath-related methods ===== ## - - def debug(self): - print "========= debug node =============" - print "dirs are ", self.dirs() - print "files are", self.files() - print "======= end debug node ===========" - - def is_child_of(self, node): - "does this node belong to the subtree node" - p = self - diff = self.height() - node.height() - while diff > 0: - diff -= 1 - p = p.m_parent - return p.id == node.id - - def variant(self, env): - "variant, or output directory for this node, a source has for variant 0" - if not env: return 0 - elif self.id & 3 == FILE: return 0 - else: return env.variant() - - def size_subtree(self): - "for debugging, returns the amount of subnodes" - return sum([i.size_subtree() for i in self.dirs()]) + len(self.files()) - - def height(self): - "amount of parents" - # README a cache can be added here if necessary - d = self - val = 0 - while d.m_parent: - d = d.m_parent - val += 1 - return val - - # helpers for building things - - def abspath(self, env=None): - """ - Returns the absolute file path for this node. If this - node is a build node, the absolute path will be a - build path, else it will be a source path. - - @param env: Environment object. This is used to - determine the variant we are interested in. 'env' can - be None if we are sure to be working with a source - Node, but when in doubt a real Environment object - should be provided. - """ - ## absolute path - hot zone, so do not touch - - if not self.m_name: - return '/' - - variant = self.variant(env) - ret = Params.g_build.m_abspath_cache[variant].get(self.id, None) - if ret: return ret - - if not variant: - cur = self - lst = [] - while cur: - lst.append(cur.m_name) - cur = cur.m_parent - lst.reverse() - # the real hot zone is the os path join - val = os.sep.join(lst) - else: - val = os.sep.join((Params.g_build.m_bldnode.abspath(), env.variant(), self.relpath(Params.g_build.m_srcnode))) - Params.g_build.m_abspath_cache[variant][self.id] = val - return val - - def change_ext(self, ext): - "node of the same path, but with a different extension" - name = self.m_name - k = name.rfind('.') - if k >= 0: - name = name[:k] + ext - else: - name = name + ext - - node = self.m_parent.childs.get(name, None) - if not node: - node = Node(name, self.m_parent, BUILD) - return node - - def bld_dir(self, env): - "build path without the file name" - return self.m_parent.bldpath(env) - - def bldbase(self, env): - "build path without the extension: src/dir/foo(.cpp)" - s = self.m_name - s = s[:s.rfind('.')] - return os.path.join(self.bld_dir(env), s) - - def bldpath(self, env=None): - "path seen from the build dir default/src/foo.cpp" - x = self.m_parent.get_file(self.m_name) - - if x: return self.relpath_gen(Params.g_build.m_bldnode) - if self.relpath(Params.g_build.m_srcnode) is not '': - return os.path.join(env.variant(), self.relpath(Params.g_build.m_srcnode)) - return env.variant() - - def srcpath(self, env): - "path in the srcdir from the build dir ../src/foo.cpp" - x = self.m_parent.get_build(self.m_name) - if x: return self.bldpath(env) - return self.relpath_gen(Params.g_build.m_bldnode) - -# win32 fixes follow -if sys.platform == "win32": - def find_dir_lst_win32(self, lst): - current = self - for name in lst: - Params.g_build.rescan(current) - prev = current - - if not current.m_parent and name == current.m_name: - continue - if not name: - continue - elif name == '.': - continue - elif name == '..': - current = current.m_parent or current - else: - current = prev.childs.get(name, None) - if current is None: - if (name in Params.g_build.cache_dir_contents[prev.id] - or (not prev.m_parent and name[1] == ":")): - current = Node(name, prev, DIR) - else: - return None - return current - Node.find_dir_lst = find_dir_lst_win32 - - def abspath_win32(self, env=None): - variant = self.variant(env) - ret = Params.g_build.m_abspath_cache[variant].get(self.id, None) - if ret: return ret - - if not variant: - cur = self - lst = [] - while cur: - lst.append(cur.m_name) - cur = cur.m_parent - lst.reverse() - val = os.sep.join(lst) - else: - val = os.sep.join((Params.g_build.m_bldnode.abspath(), env.variant(), self.relpath(Params.g_build.m_srcnode))) - if val.startswith("\\"): val = val[1:] - if val.startswith("\\"): val = val[1:] - Params.g_build.m_abspath_cache[variant][self.id] = val - return val - Node.abspath = abspath_win32 - - diff -Nru a2jmidid-8~dfsg0/wafadmin/Object.py a2jmidid-9/wafadmin/Object.py --- a2jmidid-8~dfsg0/wafadmin/Object.py 2008-07-21 22:39:47.000000000 +0000 +++ a2jmidid-9/wafadmin/Object.py 1970-01-01 00:00:00.000000000 +0000 @@ -1,10 +0,0 @@ -#! /usr/bin/env python -# encoding: utf-8 - - -import warnings -warnings.warn("The WAF module 'Object' has been renamed to 'TaskGen'", DeprecationWarning, stacklevel=2) -del warnings - -from TaskGen import * - diff -Nru a2jmidid-8~dfsg0/wafadmin/Options.py a2jmidid-9/wafadmin/Options.py --- a2jmidid-8~dfsg0/wafadmin/Options.py 2008-07-21 22:39:47.000000000 +0000 +++ a2jmidid-9/wafadmin/Options.py 1970-01-01 00:00:00.000000000 +0000 @@ -1,217 +0,0 @@ -#! /usr/bin/env python -# encoding: utf-8 - -#! /usr/bin/env python -# encoding: utf-8 -# Scott Newton, 2005 (scottn) -# Thomas Nagy, 2006 (ita) - -"Custom command-line options" - -import os, sys, imp, types, tempfile -from optparse import OptionParser -import Params, Utils -from Params import debug, fatal, warning, error -from Constants import * - -# Such a command-line should work: JOBS=4 PREFIX=/opt/ DESTDIR=/tmp/ahoj/ waf configure -default_prefix = os.environ.get('PREFIX') -if not default_prefix: - if sys.platform == 'win32': default_prefix = tempfile.gettempdir() - else: default_prefix = '/usr/local/' - -default_jobs = os.environ.get('JOBS', 1) -default_destdir = os.environ.get('DESTDIR', '') - -def create_parser(): - debug("create_parser is called", 'options') - - parser = OptionParser(usage = """waf [options] [commands ...] - -* Main commands: configure build install clean dist distclean uninstall distcheck -* Example: ./waf build -j4""", version = 'waf %s' % Params.g_version) - - parser.formatter.width = Utils.get_term_cols() - p = parser.add_option - - p('-j', '--jobs', - type = 'int', - default = default_jobs, - help = "amount of parallel jobs [Default: %s]" % default_jobs, - dest = 'jobs') - - p('', '--daemon', - action = 'store_true', - default = False, - help = 'run as a daemon [Default: False]', - dest = 'daemon') - - p('-f', '--force', - action = 'store_true', - default = False, - help = 'force file installation', - dest = 'force') - - p('-k', '--keep', - action = 'store_true', - default = False, - help = 'keep running happily on independant task groups', - dest = 'keep') - - p('-p', '--progress', - action = 'count', - default = 0, - help = '-p: progress bar; -pp: ide output', - dest = 'progress_bar') - - p('-v', '--verbose', - action = 'count', - default = 0, - help = 'verbosity level -v -vv or -vvv [Default: 0]', - dest = 'verbose') - - p('--destdir', - help = "installation root [Default: '%s']" % default_destdir, - default = default_destdir, - dest = 'destdir') - - p('--nocache', - action = 'store_true', - default = False, - help = 'compile everything, even if WAFCACHE is set', - dest = 'nocache') - - if 'configure' in sys.argv: - p('-b', '--blddir', - action = 'store', - default = '', - help = 'build dir for the project (configuration)', - dest = 'blddir') - - p('-s', '--srcdir', - action = 'store', - default = '', - help = 'src dir for the project (configuration)', - dest = 'srcdir') - - p('--prefix', - help = "installation prefix (configuration only) [Default: '%s']" % default_prefix, - default = default_prefix, - dest = 'prefix') - - p('--zones', - action = 'store', - default = '', - help = 'debugging zones (task_gen, deps, tasks, etc)', - dest = 'zones') - - p('--targets', - action = 'store', - default = '', - help = 'compile the targets given only [targets in CSV format, e.g. "target1,target2"]', - dest = 'compile_targets') - - return parser - -def parse_args_impl(parser, _args=None): - (Params.g_options, args) = parser.parse_args(args=_args) - opts = Params.g_options - #print Params.g_options, " ", args - - # By default, 'waf' is equivalent to 'waf build' - lst='dist configure clean distclean build install uninstall check distcheck'.split() - Params.g_commands = {} - for var in lst: Params.g_commands[var] = 0 - if len(args) == 0: Params.g_commands['build'] = 1 - - # Parse the command arguments - for arg in args: - arg = arg.strip() - if arg in lst: - Params.g_commands[arg]=True - else: - print 'Error: Invalid command specified ',arg - parser.print_help() - sys.exit(1) - if Params.g_commands['check']: - Params.g_commands['build'] = True - - if Params.g_commands['install'] or Params.g_commands['uninstall']: - Params.g_install = 1 - - # TODO -k => -j0 - if opts.keep: opts.jobs = 1 - - Params.g_verbose = opts.verbose - if opts.zones: - Params.g_zones = opts.zones.split(',') - if not Params.g_verbose: Params.g_verbose = 1 - if Params.g_verbose > 1: Params.set_trace(1,1,1) - else: Params.set_trace(0,0,1) - -class Handler(object): - "loads wscript modules in folders for adding options" - def __init__(self): - self.parser = create_parser() - self.cwd = os.getcwd() - global g_parser - g_parser = self - - def add_option(self, *kw, **kwargs): - self.parser.add_option(*kw, **kwargs) - - def add_option_group(self, *args, **kwargs): - return self.parser.add_option_group(*args, **kwargs) - - def get_option_group(self, opt_str): - return self.parser.get_option_group(opt_str) - - def sub_options(self, dir, option_group=None): - """set options defined by wscripts: - - run by Scripting to set the options defined by main wscript. - - run by wscripts to set options in sub directories.""" - try: - current = self.cwd - - self.cwd = os.path.join(self.cwd, dir) - cur = os.path.join(self.cwd, WSCRIPT_FILE) - - mod = Utils.load_module(cur) - try: - fun = mod.set_options - except AttributeError: - msg = "no set_options function was found in wscript\n[%s]:\n * make sure such a function is defined \n * run configure from the root of the project" - fatal(msg % self.cwd) - else: - fun(option_group or self) - - finally: - self.cwd = current - - def tool_options(self, tool, tooldir=None, option_group=None): - Utils.python_24_guard() - if type(tool) is types.ListType: - for i in tool: self.tool_options(i, tooldir, option_group) - return - - if not tooldir: tooldir = Params.g_tooldir - tooldir = Utils.to_list(tooldir) - try: - file,name,desc = imp.find_module(tool, tooldir) - except ImportError: - fatal("no tool named '%s' found" % tool) - module = imp.load_module(tool,file,name,desc) - try: - fun = module.set_options - except AttributeError: - warning("tool %s has no function set_options" % tool) - else: - fun(option_group or self) - - def parse_args(self, args=None): - parse_args_impl(self.parser, args) - -g_parser = None -"Last Handler instance in use" - - diff -Nru a2jmidid-8~dfsg0/wafadmin/Params.py a2jmidid-9/wafadmin/Params.py --- a2jmidid-8~dfsg0/wafadmin/Params.py 2008-07-21 22:39:47.000000000 +0000 +++ a2jmidid-9/wafadmin/Params.py 1970-01-01 00:00:00.000000000 +0000 @@ -1,275 +0,0 @@ -#! /usr/bin/env python -# encoding: utf-8 - -#! /usr/bin/env python -# encoding: utf-8 -# Thomas Nagy, 2005-2008 (ita) - -"Main parameters" - -import os, sys, types, inspect, base64, time -from Utils import md5 -import Constants, Utils - -# updated from the top-level wscript -g_version="1.4.3" - -g_rootname = '' -g_progress = '\x1b[K%s%s%s\r' -if sys.platform=='win32': - # get the first two letters (c:) - g_rootname = os.getcwd()[:2] - g_progress = '\x1b[A\x1b[K%s%s%s\r' - -g_autoconfig = 0 -"reconfigure the project automatically" - -# =================================== # -# Constants set on runtime - -g_cwd_launch = None -"directory from which waf was called" - -g_tooldir='' -"Tools directory (used in particular by Environment.py)" - -g_options = None -"Parsed command-line arguments in the options module" - -g_commands = {} -"build, configure, .." - -g_verbose = 0 -"-v: warnings, -vv: developer info, -vvv: all info" - -g_build = None -"only one build object is active at a time" - -g_platform = sys.platform -"current platform" - -g_cache_global = '' -"config cache directory" - -g_conf_name = 'conf-runs-%s-%d.pickle' % (sys.platform, Constants.ABI) - -g_install = 0 -"true if install or uninstall is set" - -try: g_cache_global = os.path.abspath(os.environ['WAFCACHE']) -except KeyError: pass - -try: g_lockfile = os.environ['WAFLOCK'] -except KeyError: g_lockfile = '.lock-wscript' - -# =================================== # -# HELPERS - -#g_col_names = ['BOLD', 'RED', 'REDP', 'GREEN', 'YELLOW', 'BLUE', 'CYAN', 'NORMAL'] -#"color names" - -g_col_scheme = [1, 91, 33, 92, 93, 94, 96, 0] - -g_colors = { -'BOLD' :'\033[01;1m', -'RED' :'\033[01;91m', -'REDP' :'\033[01;33m', -'GREEN' :'\033[01;92m', -'YELLOW':'\033[00;33m', -'PINK' :'\033[00;35m', -'BLUE' :'\033[01;34m', -'CYAN' :'\033[01;36m', -'NORMAL':'\033[0m' -} -"colors used for printing messages" - -g_cursor_on ='\x1b[?25h' -g_cursor_off='\x1b[?25l' - -def reset_colors(): - global g_colors - for k in g_colors.keys(): - g_colors[k]='' - g_cursor_on='' - g_cursor_off='' - -if (sys.platform=='win32') or ('NOCOLOR' in os.environ) \ - or (os.environ.get('TERM', 'dumb') in ['dumb', 'emacs']) \ - or (not sys.stdout.isatty()): - reset_colors() - -def pprint(col, str, label=''): - try: mycol=g_colors[col] - except KeyError: mycol='' - print "%s%s%s %s" % (mycol, str, g_colors['NORMAL'], label) - -g_levels={ -'Action' : 'GREEN', -'Build' : 'CYAN', -'KDE' : 'REDP', -'Node' : 'GREEN', -'TaskGen': 'GREEN', -'Runner' : 'REDP', -'Task' : 'GREEN', -'Test' : 'GREEN', -} - -g_zones = [] - -def set_trace(a, b, c): - Utils.g_trace=a - Utils.g_debug=b - Utils.g_error=c - -def get_trace(): - return (Utils.g_trace, Utils.g_debug, Utils.g_error) - -def niceprint(msg, type='', module=''): - #if not module: - # print '%s: %s'% (type, msg) - # return - def print_pat(color): - print '%s %s<%s>%s %s' % (type, g_colors[color], module, g_colors['NORMAL'], msg) - - if type == 'ERROR' or type == 'WARNING': - print_pat('RED') - return - if type=='DEBUG': - print_pat('CYAN') - return - if module in g_levels: - print_pat(g_levels[module]) - return - print 'TRACE <%s> %s'% (module, msg) - -def __get_module(): - try: return inspect.stack()[2][0].f_globals['__name__'] - except (IndexError, KeyError): return "unknown" - -def debug(msg, zone=None): - global g_zones, g_verbose - if g_zones: - if (not zone in g_zones) and (not '*' in g_zones): - return - elif not g_verbose>2: - return - module = __get_module() - - msg = time.strftime('%%X %s' % msg) - niceprint(msg, 'DEBUG', module) - -def warning(msg, zone=0): - module = __get_module() - niceprint(msg, 'WARNING', module) - -def error(msg): - if not Utils.g_error: return - module = __get_module() - niceprint(msg, 'ERROR', module) - -def fatal(msg, ret=1): - module = __get_module() - if g_verbose > 0: - pprint('RED', '%s \n (error raised in module %s)' % (msg, module)) - else: - pprint('RED', '%s' % msg) - if g_verbose > 1: - import traceback - traceback.print_stack() - sys.exit(ret) - -def view_sig(s): - "used for displaying signatures" - if type(s) is types.StringType: - n = base64.encodestring(s) - return n[:-2] - else: - return str(s) - -def hash_sig(o1, o2): - "hash two signatures" - m = md5() - m.update(o1) - m.update(o2) - return m.digest() - -def h_file(filename): - f = file(filename,'rb') - m = md5() - readBytes = 100000 - while (readBytes): - readString = f.read(readBytes) - m.update(readString) - readBytes = len(readString) - f.close() - return m.digest() - -try: - from fnv import new - def h_file(filename): - m = md5() - try: - m.hfile(filename) - x = m.digest() - if x is None: raise OSError, "not a file" - return x - except SystemError: - raise OSError, "not a file"+filename -except ImportError: - pass - -# Another possibility, faster (projects with more than 15000 files) but less accurate (cache) -# based on the path, md5 hashing can be used for some files and timestamp for others -#def h_file(filename): -# st = os.stat(filename) -# import stat -# if stat.S_ISDIR(st): raise IOError, 'not a file' -# m = md5() -# m.update(st.st_mtime) -# m.update(st.st_size) -# return m.digest() - -def h_string(str): - m = md5() - m.update(str) - return m.digest() - -def h_list(lst): - m = md5() - m.update(str(lst)) - return m.digest() - -_hash_blacklist_types = ( - types.BuiltinFunctionType, - types.ModuleType, - types.FunctionType, - types.ClassType, - types.TypeType, - types.NoneType, - ) - -def hash_function_with_globals(prevhash, func): - """ - hash a function (object) and the global vars needed from outside - ignore unhashable global variables (lists) - - prevhash -- previous hash value to be combined with this one; - if there is no previous value, zero should be used here - - func -- a Python function object. - """ - assert type(func) is types.FunctionType - for name, value in func.func_globals.iteritems(): - if type(value) in _hash_blacklist_types: - continue - if isinstance(value, type): - continue - try: - prevhash = hash( (prevhash, name, value) ) - except TypeError: # raised for unhashable elements - pass - #else: - # print "hashed: ", name, " => ", value, " => ", hash(value) - return hash( (prevhash, inspect.getsource(func)) ) - - diff -Nru a2jmidid-8~dfsg0/wafadmin/pproc.py a2jmidid-9/wafadmin/pproc.py --- a2jmidid-8~dfsg0/wafadmin/pproc.py 2008-07-21 22:39:47.000000000 +0000 +++ a2jmidid-9/wafadmin/pproc.py 1970-01-01 00:00:00.000000000 +0000 @@ -1,624 +0,0 @@ -#! /usr/bin/env python -# encoding: utf-8 - -# borrowed from python 2.5.2c1 -# Copyright (c) 2003-2005 by Peter Astrand -# Licensed to PSF under a Contributor Agreement. - -import sys -mswindows = (sys.platform == "win32") - -import os -import types -import traceback -import gc - -class CalledProcessError(Exception): - def __init__(self, returncode, cmd): - self.returncode = returncode - self.cmd = cmd - def __str__(self): - return "Command '%s' returned non-zero exit status %d" % (self.cmd, self.returncode) - -if mswindows: - import threading - import msvcrt - if 0: - import pywintypes - from win32api import GetStdHandle, STD_INPUT_HANDLE, \ - STD_OUTPUT_HANDLE, STD_ERROR_HANDLE - from win32api import GetCurrentProcess, DuplicateHandle, \ - GetModuleFileName, GetVersion - from win32con import DUPLICATE_SAME_ACCESS, SW_HIDE - from win32pipe import CreatePipe - from win32process import CreateProcess, STARTUPINFO, \ - GetExitCodeProcess, STARTF_USESTDHANDLES, \ - STARTF_USESHOWWINDOW, CREATE_NEW_CONSOLE - from win32event import WaitForSingleObject, INFINITE, WAIT_OBJECT_0 - else: - from _subprocess import * - class STARTUPINFO: - dwFlags = 0 - hStdInput = None - hStdOutput = None - hStdError = None - wShowWindow = 0 - class pywintypes: - error = IOError -else: - import select - import errno - import fcntl - import pickle - -__all__ = ["Popen", "PIPE", "STDOUT", "call", "check_call", "CalledProcessError"] - -try: - MAXFD = os.sysconf("SC_OPEN_MAX") -except: - MAXFD = 256 - -try: - False -except NameError: - False = 0 - True = 1 - -_active = [] - -def _cleanup(): - for inst in _active[:]: - if inst.poll(_deadstate=sys.maxint) >= 0: - try: - _active.remove(inst) - except ValueError: - pass - -PIPE = -1 -STDOUT = -2 - - -def call(*popenargs, **kwargs): - return Popen(*popenargs, **kwargs).wait() - -def check_call(*popenargs, **kwargs): - retcode = call(*popenargs, **kwargs) - cmd = kwargs.get("args") - if cmd is None: - cmd = popenargs[0] - if retcode: - raise CalledProcessError(retcode, cmd) - return retcode - - -def list2cmdline(seq): - result = [] - needquote = False - for arg in seq: - bs_buf = [] - - if result: - result.append(' ') - - needquote = (" " in arg) or ("\t" in arg) or arg == "" - if needquote: - result.append('"') - - for c in arg: - if c == '\\': - bs_buf.append(c) - elif c == '"': - result.append('\\' * len(bs_buf)*2) - bs_buf = [] - result.append('\\"') - else: - if bs_buf: - result.extend(bs_buf) - bs_buf = [] - result.append(c) - - if bs_buf: - result.extend(bs_buf) - - if needquote: - result.extend(bs_buf) - result.append('"') - - return ''.join(result) - -class Popen(object): - def __init__(self, args, bufsize=0, executable=None, - stdin=None, stdout=None, stderr=None, - preexec_fn=None, close_fds=False, shell=False, - cwd=None, env=None, universal_newlines=False, - startupinfo=None, creationflags=0): - _cleanup() - - self._child_created = False - if not isinstance(bufsize, (int, long)): - raise TypeError("bufsize must be an integer") - - if mswindows: - if preexec_fn is not None: - raise ValueError("preexec_fn is not supported on Windows platforms") - if close_fds: - raise ValueError("close_fds is not supported on Windows platforms") - else: - if startupinfo is not None: - raise ValueError("startupinfo is only supported on Windows platforms") - if creationflags != 0: - raise ValueError("creationflags is only supported on Windows platforms") - - self.stdin = None - self.stdout = None - self.stderr = None - self.pid = None - self.returncode = None - self.universal_newlines = universal_newlines - - (p2cread, p2cwrite, - c2pread, c2pwrite, - errread, errwrite) = self._get_handles(stdin, stdout, stderr) - - self._execute_child(args, executable, preexec_fn, close_fds, - cwd, env, universal_newlines, - startupinfo, creationflags, shell, - p2cread, p2cwrite, - c2pread, c2pwrite, - errread, errwrite) - - if mswindows: - if stdin is None and p2cwrite is not None: - os.close(p2cwrite) - p2cwrite = None - if stdout is None and c2pread is not None: - os.close(c2pread) - c2pread = None - if stderr is None and errread is not None: - os.close(errread) - errread = None - - if p2cwrite: - self.stdin = os.fdopen(p2cwrite, 'wb', bufsize) - if c2pread: - if universal_newlines: - self.stdout = os.fdopen(c2pread, 'rU', bufsize) - else: - self.stdout = os.fdopen(c2pread, 'rb', bufsize) - if errread: - if universal_newlines: - self.stderr = os.fdopen(errread, 'rU', bufsize) - else: - self.stderr = os.fdopen(errread, 'rb', bufsize) - - - def _translate_newlines(self, data): - data = data.replace("\r\n", "\n") - data = data.replace("\r", "\n") - return data - - - def __del__(self, sys=sys): - if not self._child_created: - return - self.poll(_deadstate=sys.maxint) - if self.returncode is None and _active is not None: - _active.append(self) - - - def communicate(self, input=None): - if [self.stdin, self.stdout, self.stderr].count(None) >= 2: - stdout = None - stderr = None - if self.stdin: - if input: - self.stdin.write(input) - self.stdin.close() - elif self.stdout: - stdout = self.stdout.read() - elif self.stderr: - stderr = self.stderr.read() - self.wait() - return (stdout, stderr) - - return self._communicate(input) - - - if mswindows: - def _get_handles(self, stdin, stdout, stderr): - if stdin is None and stdout is None and stderr is None: - return (None, None, None, None, None, None) - - p2cread, p2cwrite = None, None - c2pread, c2pwrite = None, None - errread, errwrite = None, None - - if stdin is None: - p2cread = GetStdHandle(STD_INPUT_HANDLE) - if p2cread is not None: - pass - elif stdin is None or stdin == PIPE: - p2cread, p2cwrite = CreatePipe(None, 0) - p2cwrite = p2cwrite.Detach() - p2cwrite = msvcrt.open_osfhandle(p2cwrite, 0) - elif isinstance(stdin, int): - p2cread = msvcrt.get_osfhandle(stdin) - else: - p2cread = msvcrt.get_osfhandle(stdin.fileno()) - p2cread = self._make_inheritable(p2cread) - - if stdout is None: - c2pwrite = GetStdHandle(STD_OUTPUT_HANDLE) - if c2pwrite is not None: - pass - elif stdout is None or stdout == PIPE: - c2pread, c2pwrite = CreatePipe(None, 0) - c2pread = c2pread.Detach() - c2pread = msvcrt.open_osfhandle(c2pread, 0) - elif isinstance(stdout, int): - c2pwrite = msvcrt.get_osfhandle(stdout) - else: - c2pwrite = msvcrt.get_osfhandle(stdout.fileno()) - c2pwrite = self._make_inheritable(c2pwrite) - - if stderr is None: - errwrite = GetStdHandle(STD_ERROR_HANDLE) - if errwrite is not None: - pass - elif stderr is None or stderr == PIPE: - errread, errwrite = CreatePipe(None, 0) - errread = errread.Detach() - errread = msvcrt.open_osfhandle(errread, 0) - elif stderr == STDOUT: - errwrite = c2pwrite - elif isinstance(stderr, int): - errwrite = msvcrt.get_osfhandle(stderr) - else: - errwrite = msvcrt.get_osfhandle(stderr.fileno()) - errwrite = self._make_inheritable(errwrite) - - return (p2cread, p2cwrite, - c2pread, c2pwrite, - errread, errwrite) - def _make_inheritable(self, handle): - return DuplicateHandle(GetCurrentProcess(), handle, GetCurrentProcess(), 0, 1, DUPLICATE_SAME_ACCESS) - - def _find_w9xpopen(self): - w9xpopen = os.path.join(os.path.dirname(GetModuleFileName(0)), "w9xpopen.exe") - if not os.path.exists(w9xpopen): - w9xpopen = os.path.join(os.path.dirname(sys.exec_prefix), "w9xpopen.exe") - if not os.path.exists(w9xpopen): - raise RuntimeError("Cannot locate w9xpopen.exe, which is needed for Popen to work with your shell or platform.") - return w9xpopen - - def _execute_child(self, args, executable, preexec_fn, close_fds, - cwd, env, universal_newlines, - startupinfo, creationflags, shell, - p2cread, p2cwrite, - c2pread, c2pwrite, - errread, errwrite): - - if not isinstance(args, types.StringTypes): - args = list2cmdline(args) - - if startupinfo is None: - startupinfo = STARTUPINFO() - if None not in (p2cread, c2pwrite, errwrite): - startupinfo.dwFlags |= STARTF_USESTDHANDLES - startupinfo.hStdInput = p2cread - startupinfo.hStdOutput = c2pwrite - startupinfo.hStdError = errwrite - - if shell: - startupinfo.dwFlags |= STARTF_USESHOWWINDOW - startupinfo.wShowWindow = SW_HIDE - comspec = os.environ.get("COMSPEC", "cmd.exe") - args = comspec + " /c " + args - if (GetVersion() >= 0x80000000L or - os.path.basename(comspec).lower() == "command.com"): - w9xpopen = self._find_w9xpopen() - args = '"%s" %s' % (w9xpopen, args) - creationflags |= CREATE_NEW_CONSOLE - - try: - hp, ht, pid, tid = CreateProcess(executable, args, None, None, 1, creationflags, env, cwd, startupinfo) - except pywintypes.error, e: - raise WindowsError(*e.args) - - self._child_created = True - self._handle = hp - self.pid = pid - ht.Close() - - if p2cread is not None: - p2cread.Close() - if c2pwrite is not None: - c2pwrite.Close() - if errwrite is not None: - errwrite.Close() - - - def poll(self, _deadstate=None): - if self.returncode is None: - if WaitForSingleObject(self._handle, 0) == WAIT_OBJECT_0: - self.returncode = GetExitCodeProcess(self._handle) - return self.returncode - - - def wait(self): - if self.returncode is None: - obj = WaitForSingleObject(self._handle, INFINITE) - self.returncode = GetExitCodeProcess(self._handle) - return self.returncode - - def _readerthread(self, fh, buffer): - buffer.append(fh.read()) - - def _communicate(self, input): - stdout = None - stderr = None - - if self.stdout: - stdout = [] - stdout_thread = threading.Thread(target=self._readerthread, args=(self.stdout, stdout)) - stdout_thread.setDaemon(True) - stdout_thread.start() - if self.stderr: - stderr = [] - stderr_thread = threading.Thread(target=self._readerthread, args=(self.stderr, stderr)) - stderr_thread.setDaemon(True) - stderr_thread.start() - - if self.stdin: - if input is not None: - self.stdin.write(input) - self.stdin.close() - - if self.stdout: - stdout_thread.join() - if self.stderr: - stderr_thread.join() - - if stdout is not None: - stdout = stdout[0] - if stderr is not None: - stderr = stderr[0] - - if self.universal_newlines and hasattr(file, 'newlines'): - if stdout: - stdout = self._translate_newlines(stdout) - if stderr: - stderr = self._translate_newlines(stderr) - - self.wait() - return (stdout, stderr) - - else: - def _get_handles(self, stdin, stdout, stderr): - p2cread, p2cwrite = None, None - c2pread, c2pwrite = None, None - errread, errwrite = None, None - - if stdin is None: - pass - elif stdin == PIPE: - p2cread, p2cwrite = os.pipe() - elif isinstance(stdin, int): - p2cread = stdin - else: - p2cread = stdin.fileno() - - if stdout is None: - pass - elif stdout == PIPE: - c2pread, c2pwrite = os.pipe() - elif isinstance(stdout, int): - c2pwrite = stdout - else: - c2pwrite = stdout.fileno() - - if stderr is None: - pass - elif stderr == PIPE: - errread, errwrite = os.pipe() - elif stderr == STDOUT: - errwrite = c2pwrite - elif isinstance(stderr, int): - errwrite = stderr - else: - errwrite = stderr.fileno() - - return (p2cread, p2cwrite, c2pread, c2pwrite, errread, errwrite) - - def _set_cloexec_flag(self, fd): - try: - cloexec_flag = fcntl.FD_CLOEXEC - except AttributeError: - cloexec_flag = 1 - - old = fcntl.fcntl(fd, fcntl.F_GETFD) - fcntl.fcntl(fd, fcntl.F_SETFD, old | cloexec_flag) - - def _close_fds(self, but): - for i in xrange(3, MAXFD): - if i == but: - continue - try: - os.close(i) - except: - pass - - def _execute_child(self, args, executable, preexec_fn, close_fds, - cwd, env, universal_newlines, startupinfo, creationflags, shell, - p2cread, p2cwrite, c2pread, c2pwrite, errread, errwrite): - - if isinstance(args, types.StringTypes): - args = [args] - else: - args = list(args) - - if shell: - args = ["/bin/sh", "-c"] + args - - if executable is None: - executable = args[0] - - errpipe_read, errpipe_write = os.pipe() - self._set_cloexec_flag(errpipe_write) - - gc_was_enabled = gc.isenabled() - gc.disable() - try: - self.pid = os.fork() - except: - if gc_was_enabled: - gc.enable() - raise - self._child_created = True - if self.pid == 0: - try: - if p2cwrite: - os.close(p2cwrite) - if c2pread: - os.close(c2pread) - if errread: - os.close(errread) - os.close(errpipe_read) - - if p2cread: - os.dup2(p2cread, 0) - if c2pwrite: - os.dup2(c2pwrite, 1) - if errwrite: - os.dup2(errwrite, 2) - - if p2cread and p2cread not in (0,): - os.close(p2cread) - if c2pwrite and c2pwrite not in (p2cread, 1): - os.close(c2pwrite) - if errwrite and errwrite not in (p2cread, c2pwrite, 2): - os.close(errwrite) - - if close_fds: - self._close_fds(but=errpipe_write) - - if cwd is not None: - os.chdir(cwd) - - if preexec_fn: - apply(preexec_fn) - - if env is None: - os.execvp(executable, args) - else: - os.execvpe(executable, args, env) - - except: - exc_type, exc_value, tb = sys.exc_info() - exc_lines = traceback.format_exception(exc_type, exc_value, tb) - exc_value.child_traceback = ''.join(exc_lines) - os.write(errpipe_write, pickle.dumps(exc_value)) - - os._exit(255) - - if gc_was_enabled: - gc.enable() - os.close(errpipe_write) - if p2cread and p2cwrite: - os.close(p2cread) - if c2pwrite and c2pread: - os.close(c2pwrite) - if errwrite and errread: - os.close(errwrite) - - data = os.read(errpipe_read, 1048576) - os.close(errpipe_read) - if data != "": - os.waitpid(self.pid, 0) - child_exception = pickle.loads(data) - raise child_exception - - def _handle_exitstatus(self, sts): - if os.WIFSIGNALED(sts): - self.returncode = -os.WTERMSIG(sts) - elif os.WIFEXITED(sts): - self.returncode = os.WEXITSTATUS(sts) - else: - raise RuntimeError("Unknown child exit status!") - - def poll(self, _deadstate=None): - if self.returncode is None: - try: - pid, sts = os.waitpid(self.pid, os.WNOHANG) - if pid == self.pid: - self._handle_exitstatus(sts) - except os.error: - if _deadstate is not None: - self.returncode = _deadstate - return self.returncode - - def wait(self): - if self.returncode is None: - pid, sts = os.waitpid(self.pid, 0) - self._handle_exitstatus(sts) - return self.returncode - - def _communicate(self, input): - read_set = [] - write_set = [] - stdout = None - stderr = None - - if self.stdin: - self.stdin.flush() - if input: - write_set.append(self.stdin) - else: - self.stdin.close() - if self.stdout: - read_set.append(self.stdout) - stdout = [] - if self.stderr: - read_set.append(self.stderr) - stderr = [] - - input_offset = 0 - while read_set or write_set: - rlist, wlist, xlist = select.select(read_set, write_set, []) - - if self.stdin in wlist: - bytes_written = os.write(self.stdin.fileno(), buffer(input, input_offset, 512)) - input_offset += bytes_written - if input_offset >= len(input): - self.stdin.close() - write_set.remove(self.stdin) - - if self.stdout in rlist: - data = os.read(self.stdout.fileno(), 1024) - if data == "": - self.stdout.close() - read_set.remove(self.stdout) - stdout.append(data) - - if self.stderr in rlist: - data = os.read(self.stderr.fileno(), 1024) - if data == "": - self.stderr.close() - read_set.remove(self.stderr) - stderr.append(data) - - if stdout is not None: - stdout = ''.join(stdout) - if stderr is not None: - stderr = ''.join(stderr) - - if self.universal_newlines and hasattr(file, 'newlines'): - if stdout: - stdout = self._translate_newlines(stdout) - if stderr: - stderr = self._translate_newlines(stderr) - - self.wait() - return (stdout, stderr) - - diff -Nru a2jmidid-8~dfsg0/wafadmin/Runner.py a2jmidid-9/wafadmin/Runner.py --- a2jmidid-8~dfsg0/wafadmin/Runner.py 2008-07-21 22:39:47.000000000 +0000 +++ a2jmidid-9/wafadmin/Runner.py 1970-01-01 00:00:00.000000000 +0000 @@ -1,370 +0,0 @@ -#! /usr/bin/env python -# encoding: utf-8 -import sys -if sys.hexversion < 0x020400f0: from sets import Set as set -#! /usr/bin/env python -# encoding: utf-8 -# Thomas Nagy, 2005 (ita) - -"Execute the tasks" - -import sys, random, time, threading, Queue, traceback -import Params, Utils -import pproc as subprocess -from Params import debug, error -from Constants import * - -g_quiet = 0 -"do not output anything" - -log_file = None -"output to a config.log file, see Configure.py {mute,restore}_logging" - -def print_log(msg, nl='\n'): - if log_file: - log_file.write(msg) - log_file.write(nl) - log_file.flush() - -def printout(s): - if not g_quiet: - sys.stdout.write(s) - sys.stdout.flush() - print_log(s, nl='') - -def progress_line(state, total, col1, task, col2): - "do not print anything if there is nothing to display" - if Params.g_options.progress_bar == 1: - return Utils.progress_line(state, total, col1, col2) - - if Params.g_options.progress_bar == 2: - try: ini = Params.g_build.ini - except AttributeError: ini = Params.g_build.ini = time.time() - ela = time.strftime('%H:%M:%S', time.gmtime(time.time() - ini)) - ins = ','.join([n.m_name for n in task.m_inputs]) - outs = ','.join([n.m_name for n in task.m_outputs]) - return '|Total %s|Current %s|Inputs %s|Outputs %s|Time %s|\n' % (total, state, ins, outs, ela) - - n = len(str(total)) - fs = '[%%%dd/%%%dd] %%s%%s%%s' % (n, n) - return fs % (state, total, col1, task.get_display(), col2) - -def process_cmd_output(proc): - """calling communicate to avoid race-condition between stdout and stderr""" - (cmd_stdout, cmd_stderr) = proc.communicate() - if cmd_stdout: - printout(cmd_stdout) - if cmd_stderr: - if g_quiet: - printout(cmd_stderr) - else: - sys.stderr.write(cmd_stderr) - sys.stderr.flush() - -def _exec_command_normal(s): - "run commands in a portable way the subprocess module backported from python 2.4 and should work on python >= 2.2" - debug("system command -> "+ s, 'runner') - if Params.g_verbose or g_quiet: printout(s+'\n') - # encase the command in double-quotes in windows - if sys.platform == 'win32' and not s.startswith('""'): - s = '"%s"' % s - proc = subprocess.Popen(s, shell=1, stdout=subprocess.PIPE, stderr=subprocess.PIPE) - process_cmd_output(proc) - stat = proc.wait() - if stat & 0xff: return stat | 0x80 - return stat >> 8 - -def _exec_command_interact(s): - "this one is for the latex output, where we cannot capture the output while the process waits for stdin" - debug("system command (interact) -> "+ s, 'runner') - if Params.g_verbose or g_quiet: printout(s+'\n') - # encase the command in double-quotes in windows - if sys.platform == 'win32' and not s.startswith('""'): - s = '"%s"' % s - proc = subprocess.Popen(s, shell=1) - stat = proc.wait() - if stat & 0xff: return stat | 0x80 - return stat >> 8 - -if sys.platform == "win32": - old_log = _exec_command_interact - def _exec_commandi_interact(s): - # TODO very long command-lines are unlikely to be used in the configuration - if len(s) < 2000: return old_log(s) - if Params.g_verbose or g_quiet: printout(s+'\n') - startupinfo = subprocess.STARTUPINFO() - startupinfo.dwFlags |= subprocess.STARTF_USESHOWWINDOW - proc = subprocess.Popen(s, shell=False, startupinfo=startupinfo) - stat = proc.wait() - if stat & 0xff: return stat | 0x80 - return stat >> 8 - -exec_command = _exec_command_interact # python bug on stdout overload -def set_exec(mode): - global exec_command - if mode == 'normal': exec_command = _exec_command_normal - elif mode == 'noredir': exec_command = _exec_command_interact - else: error('set_runner_mode') - -class Serial(object): - def __init__(self, bld): - self.error = 0 - - self.manager = bld.task_manager - - self.outstanding = [] - - self.priolst = [] - - # progress bar - self.total = self.manager.total() - self.processed = 0 - - self.switchflag = 1 # postpone - # self.manager.debug() - - # warning, this one is recursive .. - def get_next(self): - if self.outstanding: - t = self.outstanding.pop(0) - self.processed += 1 - return t - - # handle case where only one wscript exist - # that only install files - if not self.manager.groups: - return None - - (_, self.outstanding) = self.manager.get_next_set() - if not self.outstanding: return None - - if Params.g_verbose: - debug("Preparing to run prio %i tasks: [\n%s\n\t]" % - (1, ',\n'.join(["\t#%i: %s" % (tsk.m_idx, repr(tsk).strip()) - for tsk in self.outstanding])), - 'runner') - return self.get_next() - - def progress(self): - return (self.processed, self.total) - - def postpone(self, tsk): - self.processed -= 1 - # shuffle the list - why it does work is left as an exercise for the reader - self.switchflag *= -1 - if self.switchflag>0: self.outstanding.insert(0, tsk) - else: self.outstanding.append(tsk) - - # TODO FIXME - def debug(self): - debug("debugging a task: something went wrong:", 'runner') - s = " ".join([str(t.m_idx) for t in self.manager]) - debug(s, 'runner') - - # skip a group and report the failure - def skip_group(self): - self.outstanding = [] - - def start(self): - global g_quiet - debug("Serial start called", 'runner') - #self.debug() - while 1: - # get next Task - tsk = self.get_next() - if tsk is None: break - - debug("retrieving #%i (%r)" % (tsk.m_idx, tsk), 'runner') - - # # ======================= - #if tsk.m_hasrun: - # error("task has already run! "+str(tsk.m_idx)) - - if not tsk.may_start(): - debug("delaying #"+str(tsk.m_idx), 'runner') - self.postpone(tsk) - #self.debug() - #tsk = None - continue - # # ======================= - - tsk.prepare() - #tsk.debug() - - #debug("m_sig is "+str(tsk.m_sig), 'runner') - #debug("obj output m_sig is "+str(tsk.m_outputs[0].get_sig()), 'runner') - - #continue - if not tsk.must_run(): - tsk.m_hasrun = SKIPPED - self.manager.add_finished(tsk) - #debug("task is up-to_date "+str(tsk.m_idx), 'runner') - continue - - debug("executing #"+str(tsk.m_idx), 'runner') - - # display the command that we are about to run - if not g_quiet: - (s, t) = self.progress() - cl = Params.g_colors - printout(progress_line(s, t, cl[tsk.color()], tsk, cl['NORMAL'])) - - # run the command - ret = tsk.run() - self.manager.add_finished(tsk) - - # non-zero means something went wrong - if ret: - self.error = 1 - tsk.m_hasrun = CRASHED - tsk.err_code = ret - if Params.g_options.keep: continue - else: return -1 - - try: - tsk.update_stat() - except OSError: - traceback.print_stack() - self.error = 1 - tsk.m_hasrun = MISSING - if Params.g_options.keep: continue - else: return -1 - else: - tsk.m_hasrun = SUCCESS - - if self.error: - return -1 - -class TaskConsumer(threading.Thread): - def __init__(self, i, m): - threading.Thread.__init__(self) - self.setDaemon(1) - self.id = i - self.master = m - self.start() - - def run(self): - m = self.master - - while 1: - tsk = m.ready.get() - if m.failed and not m.running: - m.out.put(tsk) - continue - - printout(tsk.get_display()) - ret = tsk.run() - - if ret: - tsk.err_code = ret - tsk.m_hasrun = CRASHED - else: - try: - tsk.update_stat() - except OSError: - tsk.m_hasrun = MISSING - else: - tsk.m_hasrun = SUCCESS - if tsk.m_hasrun != SUCCESS: # TODO for now, do no keep running in parallel and not Params.g_options.keep: - m.failed = 1 - - m.out.put(tsk) - -class Parallel(object): - """ - The following is a small scheduler for making as many tasks available to the consumer threads - It uses the serial shuffling system - """ - def __init__(self, bld, j=2): - - # number of consumers - self.numjobs = j - - self.manager = bld.task_manager - - # progress bar - self.total = self.manager.total() - self.processed = 0 - - # tasks waiting to be processed - IMPORTANT - self.outstanding = [] - self.maxjobs = 100 - - # tasks that are awaiting for another task to complete - self.frozen = [] - - # tasks waiting to be run by the consumers - self.ready = Queue.Queue(0) - self.out = Queue.Queue(0) - - self.count = 0 # tasks not in the producer area - self.failed = 0 # some task has failed - self.running = 0 # keep running ? - self.progress = 0 # progress indicator - - def start(self): - self.consumers = [TaskConsumer(i, self) for i in range(self.numjobs)] - - # the current group - #group = None - - def get_out(): - self.manager.add_finished(self.out.get()) - self.count -= 1 - - lastfailput = 0 - - # iterate over all tasks at most one time for each task run - penalty = 0 - currentprio = 0 - #loop=0 - while 1: - #loop += 1 - if self.failed and not self.running: - while self.count > 0: get_out() - if self.failed: return -1 - - if 1 == currentprio: - # allow only one process at a time in priority 'even' - while self.count > 0: get_out() - else: - # not too many jobs in the queue - while self.count > self.numjobs + 10: get_out() - - # empty the returned tasks as much as possible - while not self.out.empty(): get_out() - - if not self.outstanding: - if self.count > 0: get_out() - self.outstanding = self.frozen - self.frozen = [] - if not self.outstanding: - while self.count > 0: get_out() - (currentprio, self.outstanding) = self.manager.get_next_set() - #if self.outstanding: random.shuffle(self.outstanding) - if currentprio is None: break - - # consider the next task - tsk = self.outstanding.pop(0) - if tsk.may_start(): - tsk.prepare() - self.progress += 1 - if not tsk.must_run(): - tsk.m_hasrun = SKIPPED - self.manager.add_finished(tsk) - continue - cl = Params.g_colors - tsk.set_display(progress_line(self.progress, self.total, cl[tsk.color()], tsk, cl['NORMAL'])) - self.count += 1 - self.ready.put(tsk) - else: - if random.randint(0,1): self.frozen.insert(0, tsk) - else: self.frozen.append(tsk) - #print loop - -def get_instance(bld, njobs): - if njobs <= 1: executor = Serial(bld) - else: executor = Parallel(bld, njobs) - return executor - - diff -Nru a2jmidid-8~dfsg0/wafadmin/Scan.py a2jmidid-9/wafadmin/Scan.py --- a2jmidid-8~dfsg0/wafadmin/Scan.py 2008-07-21 22:39:47.000000000 +0000 +++ a2jmidid-9/wafadmin/Scan.py 1970-01-01 00:00:00.000000000 +0000 @@ -1,135 +0,0 @@ -#! /usr/bin/env python -# encoding: utf-8 -import sys -if sys.hexversion < 0x020400f0: from sets import Set as set -#! /usr/bin/env python -# encoding: utf-8 -# Thomas Nagy, 2005-2008 (ita) - -"Scan for dependencies, compute task signatures" - -from Utils import md5 -import Params -from Params import debug, error -from Constants import * - -g_all_scanners={} -"all instances of scanners" - -class ScannerError(Exception): - pass - -class scanner(object): - "TODO: call this a dependency manager (not a scanner), as it does scan and compute the signatures" - - def __init__(self): - global g_all_scanners - g_all_scanners[self.__class__.__name__] = self - self.vars = [] # additional vars to add in the scanning process - - # ======================================= # - # interface definition - - # this method returns a tuple containing: - # * a list of nodes corresponding to real files - # * a list of names for files not found in path_lst - # the input parameters may have more parameters that the ones used below - def scan(self, tsk, node): - "usually reimplemented" - return ((), ()) - - # scans a node, the task may have additional parameters such as include paths, etc - def do_scan(self, tsk, node): - "more rarely reimplemented" - debug("do_scan(self, node, env, hashparams)", 'ccroot') - - variant = node.variant(tsk.env()) - - if not node: - error("BUG rescanning a null node") - return - - # we delegate the work to "def scan(self, tsk, node)" to avoid duplicate code - (nodes, names) = self.scan(tsk, node) - if Params.g_verbose: - if Params.g_zones: - debug('scanner for %s returned %s %s' % (node.m_name, str(nodes), str(names)), 'deps') - - tree = Params.g_build - tree.node_deps[variant][node.id] = nodes - tree.raw_deps[variant][node.id] = names - - # compute the signature, recompute it if there is no match in the cache - def get_signature(self, tsk): - "the signature obtained may not be the one if the files have changed, we do it in two steps" - tree = Params.g_build - env = tsk.env() - - # get the task signature from the signature cache - node = tsk.m_outputs[0] - variant = node.variant(tsk.env()) - tstamps = tree.m_tstamp_variants[variant] - prev_sig = None - - time = tstamps.get(node.id, None) - if not time is None: - key = hash( (variant, node.m_name, time, self.__class__.__name__) ) - # a tuple contains the task signatures from previous runs - tup = tree.bld_sigs.get(key, ()) - if tup: - prev_sig = tup[1] - if prev_sig != None: - sig = self.get_signature_queue(tsk) - if sig == prev_sig: - return sig - - #print "scanning the file", tsk.m_inputs[0].abspath() - - # some source or some header is dirty, rescan the source files - for node in tsk.m_inputs: - self.do_scan(tsk, node) - - # recompute the signature and return it - sig = self.get_signature_queue(tsk) - - # DEBUG - #print "rescan for ", tsk.m_inputs[0], " is ", rescan, " and deps ", \ - # tree.node_deps[variant][node.id], tree.raw_deps[variant][node.id] - - return sig - - # ======================================= # - # protected methods - override if you know what you are doing - - def get_signature_queue(self, tsk): - "the basic scheme for computing signatures from .cpp and inferred .h files" - tree = Params.g_build - - rescan = 0 - seen = set() - queue = []+tsk.m_inputs - m = md5() - - # additional variables to hash (command-line defines for example) - env = tsk.env() - for x in self.vars: - m.update(str(env[x])) - - # add the hashes of all files entering into the dependency system - while queue: - node = queue.pop(0) - - if node.id in seen: continue - seen.add(node.id) - - variant = node.variant(env) - tree.rescan(node.m_parent) - try: queue += tree.node_deps[variant][node.id] - except KeyError: pass - - try: m.update(tree.m_tstamp_variants[variant][node.id]) - except KeyError: return None - - return m.digest() - - diff -Nru a2jmidid-8~dfsg0/wafadmin/Scripting.py a2jmidid-9/wafadmin/Scripting.py --- a2jmidid-8~dfsg0/wafadmin/Scripting.py 2008-07-21 22:39:47.000000000 +0000 +++ a2jmidid-9/wafadmin/Scripting.py 1970-01-01 00:00:00.000000000 +0000 @@ -1,598 +0,0 @@ -#! /usr/bin/env python -# encoding: utf-8 -import sys -if sys.hexversion < 0x020400f0: from sets import Set as set -#! /usr/bin/env python -# encoding: utf-8 -# Thomas Nagy, 2005 (ita) - -"Module called for configuring, compiling and installing targets" - -import os, sys, shutil, cPickle, traceback - -import Params, Utils, Configure, Build, Runner, Options -from Params import error, fatal, warning, g_lockfile -from Constants import * - -g_gz = 'bz2' -g_dirwatch = None -g_daemonlock = 0 -g_excludes = '.svn CVS .arch-ids {arch} SCCS BitKeeper .hg'.split() -"exclude folders from dist" -g_dist_exts = '~ .rej .orig .pyc .pyo .bak config.log .tar.bz2 .zip Makefile Makefile.in'.split() -"exclude files from dist" - -g_distclean_exts = '~ .pyc .wafpickle'.split() - -def add_subdir(dir, bld): - "each wscript calls bld.add_subdir" - try: bld.rescan(bld.path) - except OSError: fatal("No such directory "+bld.path.abspath()) - - old = bld.path - new = bld.path.find_dir(dir) - if new is None: - fatal("subdir not found (%s), restore is %s" % (dir, bld.path)) - - bld.path = new - # try to open 'wscript_build' for execution - # if unavailable, open the module wscript and call the build function from it - from Common import install_files, install_as, symlink_as # do not remove - try: - file_path = os.path.join(new.abspath(), WSCRIPT_BUILD_FILE) - file = open(file_path, 'r') - exec file - if file: file.close() - except IOError: - file_path = os.path.join(new.abspath(), WSCRIPT_FILE) - module = Utils.load_module(file_path) - module.build(bld) - - # restore the old node position - bld.path = old - -def call_back(idxName, pathName, event): - #print "idxName=%s, Path=%s, Event=%s "%(idxName, pathName, event) - # check the daemon lock state - global g_daemonlock - if g_daemonlock: return - g_daemonlock = 1 - - # clean up existing variables, and start a new instance - Utils.reset() - main() - g_daemonlock = 0 - -def start_daemon(): - "if it does not exist already:start a new directory watcher; else: return immediately" - global g_dirwatch - if not g_dirwatch: - import DirWatch - g_dirwatch = DirWatch.DirectoryWatcher() - m_dirs=[] - for nodeDir in Params.g_build.m_srcnode.dirs(): - tmpstr = "%s" %nodeDir - tmpstr = "%s" %(tmpstr[6:]) - m_dirs.append(tmpstr) - g_dirwatch.add_watch("tmp Test", call_back, m_dirs) - # infinite loop, no need to exit except on ctrl+c - g_dirwatch.loop() - g_dirwatch = None - else: - g_dirwatch.suspend_all_watch() - m_dirs=[] - for nodeDir in Params.g_build.m_srcnode.dirs(): - tmpstr = "%s" % nodeDir - tmpstr = "%s" % (tmpstr[6:]) - m_dirs.append(tmpstr) - g_dirwatch.add_watch("tmp Test", call_back, m_dirs) - -def configure(): - # disable parallelization while configuring - jobs_save = Params.g_options.jobs - Params.g_options.jobs = 1 - - Runner.set_exec('normal') - tree = Build.Build() - - err = 'The %s is not given in %s:\n * define a top level attribute named "%s"\n * run waf configure --%s=xxx' - - src = getattr(Params.g_options, SRCDIR, None) - if not src: src = getattr(Utils.g_module, SRCDIR, None) - if not src: fatal(err % (SRCDIR, os.path.abspath('.'), SRCDIR, SRCDIR)) - - bld = getattr(Params.g_options, BLDDIR, None) - if not bld: bld = getattr(Utils.g_module, BLDDIR, None) - if not bld: fatal(err % (BLDDIR, os.path.abspath('.'), BLDDIR, BLDDIR)) - - Params.g_cachedir = os.path.join(bld, CACHE_DIR) - tree.load_dirs(src, bld, isconfigure=1) - tree.init_variants() - - conf = Configure.Configure(srcdir=src, blddir=bld) - - # first remove the log file if it exists - try: os.unlink(os.path.join(bld, Configure.Configure.log_file)) - except (OSError, IOError): pass - - conf.mute_logging() - try: - # calling to main wscript's configure() - conf.sub_config('') - except Configure.ConfigurationError, e: - fatal(str(e), 2) - except Exception: - Utils.test_full() - raise - conf.restore_logging() - - conf.store(tree) - conf.cleanup() - - # this will write a configure lock so that subsequent run will - # consider the current path as the root directory, to remove: use 'waf distclean' - file = open(g_lockfile, 'w') - file.write - - proj = {} - proj[BLDDIR] = bld - proj[SRCDIR] = src - proj['argv'] = sys.argv - proj['hash'] = conf.hash - proj['files'] = conf.files - cPickle.dump(proj, file) - file.close() - - # restore -j option - Params.g_options.jobs = jobs_save - -def read_cache_file(filename): - file = open(filename, 'r') - proj = cPickle.load(file) - file.close() - return proj - -def prepare(): - # some command-line options can be processed immediately - if '--version' in sys.argv: - opt_obj = Options.Handler() - opt_obj.parse_args() - sys.exit(0) - - # now find the wscript file - msg1 = 'Waf: *** Nothing to do! Please run waf from a directory containing a file named "%s"' % WSCRIPT_FILE - - # Some people want to configure their projects gcc-style: - # mkdir build && cd build && ../waf configure && ../waf - # check that this is really what is wanted - build_dir_override = None - candidate = None - - cwd = Params.g_cwd_launch - lst = os.listdir(cwd) - xml = 0 - - #check if a wscript or a wscript_xml file is in current directory - if WSCRIPT_FILE in lst or WSCRIPT_BUILD_FILE in lst or 'wscript_xml' in lst: - # if a script is in current directory, use this directory as candidate (and prevent gcc-like configuration) - candidate = cwd - elif 'configure' in sys.argv: - # gcc-like configuration - build_dir_override = cwd - - try: - #check the following dirs for wscript or wscript_xml - search_for_candidate = True - if not candidate: - #check first the calldir if there is wscript or wscript_xml - #for example: /usr/src/configure the calldir would be /usr/src - calldir = os.path.abspath(os.path.dirname(sys.argv[0])) - lst_calldir = os.listdir(calldir) - if WSCRIPT_FILE in lst_calldir: - candidate = calldir - search_for_candidate = False - if 'wscript_xml' in lst_calldir: - candidate = calldir - xml = 1 - search_for_candidate = False - if "--make-waf" in sys.argv and candidate: - search_for_candidate = False - - #check all directories above current dir for wscript or wscript_xml if still not found - while search_for_candidate: - if len(cwd) <= 3: - break # stop at / or c: - dirlst = os.listdir(cwd) - if WSCRIPT_FILE in dirlst: - candidate = cwd - xml = 0 - if 'wscript_xml' in dirlst: - candidate = cwd - xml = 1 - break - if 'configure' in sys.argv and candidate: - break - if Params.g_lockfile in dirlst: - break - cwd = cwd[:cwd.rfind(os.sep)] # climb up - except Exception: - traceback.print_stack() - fatal(msg1) - - if not candidate: - # check if the user only wanted to display the help - if '-h' in sys.argv or '--help' in sys.argv: - warning('No wscript file found: the help message may be incomplete') - opt_obj = Options.Handler() - opt_obj.parse_args() - sys.exit(0) - else: - fatal(msg1) - - # We have found wscript, but there is no guarantee that it is valid - os.chdir(candidate) - - if xml: - # the xml module is not provided by default, you will have to import it yourself - from XMLScripting import compile - compile(candidate+os.sep+'wscript_xml') - else: - # define the main module containing the functions init, shutdown, .. - Utils.set_main_module(os.path.join(candidate, WSCRIPT_FILE)) - - if build_dir_override: - d = getattr(Utils.g_module, BLDDIR, None) - if d: - # test if user has set the blddir in wscript. - msg = 'Overriding build directory %s with %s' % (d, build_dir_override) - Params.niceprint(msg, 'WARNING', 'waf') - Utils.g_module.blddir = build_dir_override - - # fetch the custom command-line options recursively and in a procedural way - opt_obj = Options.Handler() - # will call to main wscript's set_options() - opt_obj.sub_options('') - opt_obj.parse_args() - - # use the parser results - if Params.g_commands['dist']: - # try to use the user-defined dist function first, fallback to the waf scheme - fun = getattr(Utils.g_module, 'dist', None) - if fun: fun(); sys.exit(0) - - appname = getattr(Utils.g_module, APPNAME, 'noname') - - get_version = getattr(Utils.g_module, 'get_version', None) - if get_version: version = get_version() - else: version = getattr(Utils.g_module, VERSION, None) - if not version: version = '1.0' - - from Scripting import Dist - Dist(appname, version) - sys.exit(0) - elif Params.g_commands['distclean']: - # try to use the user-defined distclean first, fallback to the waf scheme - fun = getattr(Utils.g_module, 'distclean', None) - if fun: fun() - else: DistClean() - sys.exit(0) - elif Params.g_commands['distcheck']: - # try to use the user-defined dist function first, fallback to the waf scheme - fun = getattr(Utils.g_module, 'dist', None) - if fun: fun(); sys.exit(0) - - appname = getattr(Utils.g_module, APPNAME, 'noname') - - get_version = getattr(Utils.g_module, 'get_version', None) - if get_version: version = get_version() - else: version = getattr(Utils.g_module, VERSION, None) - if not version: version = '1.0' - - DistCheck(appname, version) - sys.exit(0) - - fun = getattr(Utils.g_module, 'init', None) - if fun: fun() - - - main() - -def main(): - import inspect - if Params.g_commands['configure']: - configure() - Params.pprint('GREEN', 'Configuration finished successfully; project is now ready to build.') - sys.exit(0) - - Runner.set_exec('noredir') - - # compile the project and/or install the files - bld = Build.Build() - try: - proj = read_cache_file(g_lockfile) - except IOError: - if Params.g_commands['clean']: - fatal("Nothing to clean (project not configured)", ret=2) - else: - if Params.g_autoconfig: - warning("Reconfiguring the project") - configure() - bld = Build.Build() - proj = read_cache_file(g_lockfile) - else: - fatal("Project not configured (run 'waf configure' first)", ret=2) - - if Params.g_autoconfig: - reconf = 0 - hash = 0 - try: - for file in proj['files']: - mod = Utils.load_module(file) - hash = Params.hash_function_with_globals(hash, mod.configure) - reconf = (hash != proj['hash']) - except Exception, ex: - if Params.g_verbose: - traceback.print_exc() - warning("Reconfiguring the project (an exception occured: %s)" % (str(ex),)) - reconf = 1 - - if reconf: - warning("Reconfiguring the project (the configuration has changed)") - - a1 = Params.g_commands - a2 = Params.g_options - a3 = Params.g_zones - a4 = Params.g_verbose - - oldargs = sys.argv - sys.argv = proj['argv'] - Options.g_parser.parse_args(args=sys.argv[1:]) - configure() - sys.argv = oldargs - - Params.g_commands = a1 - Params.g_options = a2 - Params.g_zones = a3 - Params.g_verbose = a4 - - bld = Build.Build() - proj = read_cache_file(g_lockfile) - - Params.g_cachedir = os.path.join(proj[BLDDIR], CACHE_DIR) - - bld.load_dirs(proj[SRCDIR], proj[BLDDIR]) - bld.load_envs() - - try: - # calling to main wscript's build() - f = Utils.g_module.build - except AttributeError: - fatal("Could not find the function 'def build(bld):' in wscript") - else: - f(bld) - - # TODO undocumented hook - pre_build = getattr(Utils.g_module, 'pre_build', None) - if pre_build: pre_build() - - # compile - if Params.g_commands['build'] or Params.g_install: - try: - - # TODO quite ugly, no? - if not Params.g_commands['build'] and not Params.g_commands['install']: - import Task - def must_run(self): - return 0 - setattr(Task.Task, 'must_run', must_run) - - #""" - bld.compile() - """ - import cProfile, pstats - cProfile.run("Params.g_build.compile()", 'profi.txt') - p = pstats.Stats('profi.txt') - p.sort_stats('time').print_stats(20) - #""" - - except Build.BuildError, e: - if not Params.g_options.daemon: fatal(e.get_message(), 1) - else: error(e.get_message()) - else: - if Params.g_options.progress_bar: print '' - - if Params.g_install: - bld.install() - - if Params.g_commands['install']: msg = 'Compilation and installation finished successfully' - elif Params.g_commands['uninstall']: msg = 'Uninstallation finished successfully' - else: msg = 'Compilation finished successfully' - Params.pprint('GREEN', msg) - - # clean - if Params.g_commands['clean']: - try: - bld.clean() - Params.pprint('GREEN', 'Cleaning finished successfully') - finally: - bld.save() - #if ret: - # msg='Cleanup failed for a mysterious reason' - # error(msg) - - # daemon look here - if Params.g_options.daemon and Params.g_commands['build']: - start_daemon() - return - - # shutdown - fun = getattr(Utils.g_module, 'shutdown', None) - if fun: fun() - - -## Note: this is a modified version of shutil.copytree from python -## 2.5.2 library; modified for WAF purposes to exclude dot dirs and -## another list of files. -def copytree(src, dst, symlinks=False, excludes=(), build_dir=None): - names = os.listdir(src) - os.makedirs(dst) - errors = [] - for name in names: - srcname = os.path.join(src, name) - dstname = os.path.join(dst, name) - try: - if symlinks and os.path.islink(srcname): - linkto = os.readlink(srcname) - os.symlink(linkto, dstname) - elif os.path.isdir(srcname): - if name in excludes: - continue - elif name.startswith('.') or name.startswith(',,') or name.startswith('++'): - continue - elif name == build_dir: - continue - else: - ## build_dir is not passed into the recursive - ## copytree, but that is intentional; it is a - ## directory name valid only at the top level. - copytree(srcname, dstname, symlinks, excludes) - else: - ends = name.endswith - to_remove = False - if name.startswith('.') or name.startswith('++'): - to_remove = True - else: - for x in g_dist_exts: - if ends(x): - to_remove = True - break - if not to_remove: - shutil.copy2(srcname, dstname) - # XXX What about devices, sockets etc.? - except (IOError, os.error), why: - errors.append((srcname, dstname, str(why))) - # catch the Error from the recursive copytree so that we can - # continue with other files - except shutil.Error, err: - errors.extend(err.args[0]) - try: - shutil.copystat(src, dst) - except WindowsError: - # can't copy file access times on Windows - pass - except OSError, why: - errors.extend((src, dst, str(why))) - if errors: - raise shutil.Error, errors - - -def DistDir(appname, version): - "make a distribution directory with all the sources in it" - - # Our temporary folder where to put our files - TMPFOLDER=appname+'-'+version - - # Remove an old package directory - if os.path.exists(TMPFOLDER): shutil.rmtree(TMPFOLDER) - - global g_dist_exts, g_excludes - - # Remove the Build dir - build_dir = getattr(Utils.g_module, BLDDIR, None) - - # Copy everything into the new folder - copytree('.', TMPFOLDER, excludes=g_excludes, build_dir=build_dir) - - # TODO undocumented hook - dist_hook = getattr(Utils.g_module, 'dist_hook', None) - if dist_hook: - os.chdir(TMPFOLDER) - try: - dist_hook() - finally: - # go back to the root directory - os.chdir('..') - return TMPFOLDER - -def DistTarball(appname, version): - """make a tarball with all the sources in it; return (distdirname, tarballname)""" - import tarfile - - TMPFOLDER = DistDir(appname, version) - tar = tarfile.open(TMPFOLDER+'.tar.'+g_gz,'w:'+g_gz) - tar.add(TMPFOLDER) - tar.close() - Params.pprint('GREEN', 'Your archive is ready -> %s.tar.%s' % (TMPFOLDER, g_gz)) - - if os.path.exists(TMPFOLDER): shutil.rmtree(TMPFOLDER) - return (TMPFOLDER, TMPFOLDER+'.tar.'+g_gz) - -def Dist(appname, version): - """make a tarball with all the sources in it""" - DistTarball(appname, version) - sys.exit(0) - -def DistClean(): - """clean the project""" - - # remove the temporary files - # the builddir is given by lock-wscript only - # we do no try to remove it if there is no lock file (rmtree) - for (root, dirs, filenames) in os.walk('.'): - for f in list(filenames): - to_remove = 0 - if f==g_lockfile: - # removes a lock, and the builddir indicated - to_remove = True - try: - proj = read_cache_file(os.path.join(root, f)) - shutil.rmtree(os.path.join(root, proj[BLDDIR])) - except (OSError, IOError): - # ignore errors if the lockfile or the builddir not exist. - pass - else: - ends = f.endswith - for x in g_distclean_exts: - if ends(x): - to_remove = 1 - break - if to_remove: - os.remove(os.path.join(root, f)) - lst = os.listdir('.') - for f in lst: - if f.startswith('.waf-'): - shutil.rmtree(f, ignore_errors=True) - Params.pprint('GREEN', "distclean finished successfully") - sys.exit(0) - -def DistCheck(appname, version): - """Makes some sanity checks on the waf dist generated tarball""" - import tempfile - import pproc as subprocess - - waf = os.path.abspath(sys.argv[0]) - distdir, tarball = DistTarball(appname, version) - retval = subprocess.Popen('bzip2 -dc %s | tar x' % tarball, shell=True).wait() - if retval: - Params.fatal('uncompressing the tarball failed with code %i' % (retval)) - - instdir = tempfile.mkdtemp('.inst', '%s-%s' % (appname, version)) - cwd_before = os.getcwd() - os.chdir(distdir) - try: - retval = subprocess.Popen( - '%(waf)s configure && %(waf)s ' - '&& %(waf)s check && %(waf)s install --destdir=%(instdir)s' - ' && %(waf)s uninstall --destdir=%(instdir)s' % vars(), - shell=True).wait() - if retval: - Params.fatal('distcheck failed with code %i' % (retval)) - finally: - os.chdir(cwd_before) - shutil.rmtree(distdir) - if os.path.exists(instdir): - Params.fatal("distcheck succeeded, but files were left in %s" % (instdir)) - else: - Params.pprint('GREEN', "distcheck finished successfully") - - diff -Nru a2jmidid-8~dfsg0/wafadmin/TaskGen.py a2jmidid-9/wafadmin/TaskGen.py --- a2jmidid-8~dfsg0/wafadmin/TaskGen.py 2008-07-21 22:39:47.000000000 +0000 +++ a2jmidid-9/wafadmin/TaskGen.py 1970-01-01 00:00:00.000000000 +0000 @@ -1,572 +0,0 @@ -#! /usr/bin/env python -# encoding: utf-8 -import sys -if sys.hexversion < 0x020400f0: from sets import Set as set -#! /usr/bin/env python -# encoding: utf-8 -# Thomas Nagy, 2005-2008 (ita) - -""" -The class task_gen encapsulates the creation of task objects (low-level code) -The instances can have various parameters, but the creation of task nodes (Task.py) -is delayed. To achieve this, various methods are called from the method "apply" - -The class task_gen contains lots of methods, and a configuration table: -* the methods to call (self.meths) can be specified dynamically (removing, adding, ..) -* the order of the methods (self.prec or by default task_gen.prec) is configurable -* new methods can be inserted dynamically without pasting old code - -Additionally, task_gen provides the method apply_core -* file extensions are mapped to methods: def meth(self, name_or_node) -* if a mapping is not found in self.mappings, it is searched in task_gen.mappings -* when called, the functions may modify self.allnodes to re-add source to process -* the mappings can map an extension or a filename (see the code below) - -WARNING: subclasses must reimplement the clone method -""" - -import os, types, traceback, copy -import Params, Task, Common, Utils -from Params import debug, error, fatal - -typos = { -'sources':'source', -'targets':'target', -'include':'includes', -'define':'defines', -'importpath':'importpaths', -'install_var':'inst_var', -'install_subdir':'inst_dir', -'m_type_initials':'link', -} - -g_allobjs = [] -"contains all objects, provided they are created (not in distclean or in dist)" -#TODO part of the refactoring to eliminate the static stuff (Utils.reset) - -g_name_to_obj = {} - -def name_to_obj(name): - global g_name_to_obj - if not g_name_to_obj: - for x in g_allobjs: - if x.name: - g_name_to_obj[x.name] = x - elif not x.target in g_name_to_obj.keys(): - g_name_to_obj[x.target] = x - return g_name_to_obj.get(name, None) - -def flush(all=1): - "object instances under the launch directory create the tasks now" - global g_allobjs - global g_name_to_obj - - # force the initialization of the mapping name->object in flush - # name_to_obj can be used in userland scripts, in that case beware of incomplete mapping - g_name_to_obj = {} - name_to_obj(None) - - tree = Params.g_build - debug("delayed operation TaskGen.flush() called", 'object') - - # post only objects below a particular folder (recursive make behaviour) - launch_dir_node = tree.m_root.find_dir(Params.g_cwd_launch) - if launch_dir_node.is_child_of(tree.m_bldnode): - launch_dir_node = tree.m_srcnode - if not launch_dir_node.is_child_of(tree.m_srcnode): - launch_dir_node = tree.m_srcnode - - if Params.g_options.compile_targets: - debug('posting objects listed in compile_targets', 'object') - - # ensure the target names exist, fail before any post() - targets_objects = {} - for target_name in Params.g_options.compile_targets.split(','): - # trim target_name (handle cases when the user added spaces to targets) - target_name = target_name.strip() - targets_objects[target_name] = name_to_obj(target_name) - if all and not targets_objects[target_name]: fatal("target '%s' does not exist" % target_name) - - for target_obj in targets_objects.values(): - if target_obj and not target_obj.m_posted: - target_obj.post() - else: - debug('posting objects (normal)', 'object') - for obj in g_allobjs: - if launch_dir_node and not obj.path.is_child_of(launch_dir_node): continue - if not obj.m_posted: obj.post() - -class register_obj(type): - """no decorators for classes, so we use a metaclass - we store into task_gen.classes the classes that inherit task_gen - and whose names end in 'obj' - """ - def __init__(cls, name, bases, dict): - super(register_obj, cls).__init__(name, bases, dict) - name = cls.__name__ - if name != 'task_gen' and not name.endswith('_abstract'): - task_gen.classes[name.replace('_taskgen', '')] = cls - -class task_gen(object): - """ - Most methods are of the form 'def meth(self):' without any parameters - there are many of them, and they do many different things: - * task creation - * task results installation - * environment modification - * attribute addition/removal - - The inheritance approach is complicated - * mixing several languages at once - * subclassing is needed even for small changes - * inserting new methods is complicated - - This new class uses a configuration table: - * adding new methods easily - * obtaining the order in which to call the methods - * postponing the method calls (post() -> apply) - - Additionally, a 'traits' static attribute is provided: - * this list contains methods - * the methods can remove or add methods from self.meths - Example1: the attribute 'staticlib' is set on an instance - a method set in the list of traits is executed when the - instance is posted, it finds that flag and adds another method for execution - Example2: a method set in the list of traits finds the msvc - compiler (from self.env['MSVC']==1); more methods are added to self.meths - """ - - __metaclass__ = register_obj - mappings = {} - mapped = {} - prec = {} - traits = {} - classes = {} - idx = {} - - def __init__(self, *kw, **kwargs): - self.prec = {} - "map precedence of function names to call" - # so we will have to play with directed acyclic graphs - # detect cycles, etc - - self.source = '' - self.target = '' - - # list of methods to execute - in general one does not touch it by hand - self.meths = set() - - # list of mappings extension -> function - self.mappings = {} - - # list of features (see the documentation on traits) - self.features = list(kw) - - # not always a good idea - self.m_tasks = [] - - self.chmod = 0644 - self._inst_var = '' - self._inst_dir = '' - - if Params.g_install: - self.inst_files = [] # lazy list of tuples representing the files to install - - # kind of private, beware of what you put in it, also, the contents are consumed - self.allnodes = [] - - self.env = Params.g_build.env().copy() - - self.m_posted = 0 - self.path = Params.g_build.path # emulate chdir when reading scripts - self.name = '' # give a name to the target (static+shlib with the same targetname ambiguity) - g_allobjs.append(self) - - - # provide a unique id - self.idx = task_gen.idx[self.path.id] = task_gen.idx.get(self.path.id, 0) + 1 - - for key in kwargs: - setattr(self, key, kwargs[key]) - - def __str__(self): - return ("" - % (self.name or self.target, self.__class__.__name__, str(self.path))) - - def __setattr__(self, name, attr): - real = typos.get(name, name) - if real != name: - Params.warning('typo %s -> %s' % (name, real)) - if Params.g_verbose > 0: - traceback.print_stack() - object.__setattr__(self, real, attr) - - def to_list(self, value): - "helper: returns a list" - if type(value) is types.StringType: return value.split() - else: return value - - def addflags(self, var, value): - "utility function add self.cxxflags -> env['CXXFLAGS']" - self.env.append_value(var, self.to_list(value)) - - def add_method(self, name): - "add a method to execute" - # TODO adding functions ? - self.meths.append(name) - - def install(self): - # FIXME - # ambiguity with the install functions - # it is often better to install the targets right after they are up-to_date - # but this means attaching the install to the task objects - if not Params.g_install: return - for (name, var, dir, chmod) in self.inst_files: - print name, var, dir, chmod - - # TODO ugly code - def install_results(self, var, subdir, task, chmod=0644): - debug('install results called', 'object') - if not task: return - current = Params.g_build.path - lst = [a.relpath_gen(current) for a in task.m_outputs] - Common.install_files(var, subdir, lst, chmod=chmod, env=self.env) - - def meth_order(self, *k): - "this one adds the methods to the list of methods" - assert(len(k) > 1) - n = len(k) - 1 - for i in xrange(n): - f1 = k[i] - f2 = k[i+1] - try: self.prec[f2].append(f1) - except: self.prec[f2] = [f1] - if not f1 in self.meths: self.meths.append(f1) - if not f2 in self.meths: self.meths.append(f2) - - def apply_core(self): - # get the list of folders to use by the scanners - # all our objects share the same include paths anyway - lst = self.to_list(self.source) - - # Validation: sources specified somehow - # one can set self.source to None to avoid apply_core() - if not lst is None: - # sources can be supplied either by self.source or self.allnodes - if len(lst) == 0 and not self.allnodes: - fatal("no sources were specified for '%s'" % self.name) - - find_resource_lst = self.path.find_resource_lst - - for filename in lst: - # if self.mappings or task_gen.mappings contains a file of the same name - x = self.get_hook(filename) - if x: - x(self, filename) - else: - node = find_resource_lst(Utils.split_path(filename)) - if not node: fatal("source not found: %s in %s" % (filename, str(self.path))) - self.allnodes.append(node) - - for node in self.allnodes: - # self.mappings or task_gen.mappings map the file extension to a function - filename = node.m_name - k = max(0, filename.rfind('.')) - x = self.get_hook(filename[k:]) - - if not x: - raise TypeError, "Do not know how to process %s in %s, mappings are %s" % \ - (str(node), str(self.__class__), str(self.__class__.mappings)) - x(self, node) - - def apply(self): - "order the methods to execute using self.prec or task_gen.prec" - keys = self.meths - - # add the methods listed in the features - for x in self.features: - keys.update(task_gen.traits.get(x, ())) - - # copy the precedence table - prec = {} - prec_tbl = self.prec or task_gen.prec - for x in prec_tbl: - if x in keys: - prec[x] = prec_tbl[x] - - # elements disconnected - tmp = [] - for a in prec: - for x in prec.values(): - if a in x: break - else: - tmp.append(a) - - # topological sort - out = [] - while tmp: - e = tmp.pop() - if e in keys: out.append(e) - try: - nlst = prec[e] - except KeyError: - pass - else: - del prec[e] - for x in nlst: - for y in prec: - if x in prec[y]: - break - else: - tmp.append(x) - - if prec: fatal("graph has a cycle %s" % str(prec)) - out.reverse() - self.meths = out - - if not out: out.append(self.apply_core.__name__) - - # then we run the methods in order - debug("posting %s %d" % (self, id(self)), 'task_gen') - for x in out: - v = self.get_meth(x) - debug("-> %s (%d)" % (x, id(self)), 'task_gen') - v() - - def post(self): - "runs the code to create the tasks, do not subclass" - if not self.name: self.name = self.target - - if self.m_posted: - error("OBJECT ALREADY POSTED") - return - self.apply() - debug("posted %s" % self.name, 'object') - self.m_posted = 1 - - def get_hook(self, ext): - try: return self.mappings[ext] - except KeyError: - try: return task_gen.mappings[ext] - except KeyError: return None - - def get_meth(self, name): - try: - return getattr(self, name) - except AttributeError: - raise AttributeError, "tried to retrieve %s which is not a valid method" % name - - def create_task(self, name, env=None, nice=None): - task = Task.g_task_types[name](name, env or self.env) - if nice: task.prio = nice - self.m_tasks.append(task) - return task - - def find_sources_in_dirs(self, dirnames, excludes=[], exts=[]): - "subclass if necessary" - lst = [] - - #make sure dirnames is a list helps with dirnames with spaces - dirnames = self.to_list(dirnames) - - ext_lst = exts or self.mappings.keys() + task_gen.mappings.keys() - - # FIXME the following two lines should be removed - try: ext_lst += self.s_default_ext - except AttributeError: pass - - for name in dirnames: - anode = self.path.find_dir(name) - - # validation: - # * don't use absolute path. - # * don't use paths outside the source tree. - if not anode or not anode.is_child_of(Params.g_build.m_srcnode): - fatal("Unable to use '%s' - either because it's not a relative path" \ - ", or it is not a child of '%s'." % (name, Params.g_build.m_srcnode)) - - Params.g_build.rescan(anode) - - for name in Params.g_build.cache_dir_contents[anode.id]: - (base, ext) = os.path.splitext(name) - if ext in ext_lst and not name in lst and not name in excludes: - lst.append((anode.relative_path(self.path) or '.') + os.path.sep + name) - - lst.sort() - self.source = self.to_list(self.source) - if not self.source: self.source = lst - else: self.source += lst - - def clone(self, env): - "" - newobj = task_gen() - for x in self.__dict__: - if x in ["env"]: - continue - elif x in ["path", "features"]: - setattr(newobj, x, getattr(self, x)) - else: - setattr(newobj, x, copy.copy(getattr(self, x))) - - newobj.__class__ = self.__class__ - if type(env) is types.StringType: - newobj.env = Params.g_build.m_allenvs[env].copy() - else: - newobj.env = env.copy() - - g_allobjs.append(newobj) - - return newobj - - def get_inst_var(self): - "return a default parameter if provided" - k = self._inst_var - if k == 0: return k - if not k: return getattr(self, "inst_var_default", k) - return k - - def set_inst_var(self, val): - self._inst_var = val - - inst_var = property(get_inst_var, set_inst_var) - - def get_inst_dir(self): - "return a default parameter if provided" - k = self._inst_dir - if k == 0: return k - if not k: return getattr(self, "inst_dir_default", k) - return k - - def set_inst_dir(self, val): - self._inst_dir = val - - inst_dir = property(get_inst_dir, set_inst_dir) - -def declare_extension(var, func): - if type(var) is types.ListType: - for x in var: - task_gen.mappings[x] = func - elif type(var) is types.StringType: - task_gen.mappings[var] = func - else: - raise TypeError('declare extension takes either a list or a string %s' % str(var)) - task_gen.mapped[func.__name__] = func - -def declare_order(*k): - assert(len(k) > 1) - n = len(k) - 1 - for i in xrange(n): - f1 = k[i] - f2 = k[i+1] - try: - if not f1 in task_gen.prec[f2]: task_gen.prec[f2].append(f1) - except: - task_gen.prec[f2] = [f1] - -def declare_chain(name='', action='', ext_in=[], ext_out='', reentrant=1, color='BLUE', prio=40, install=0, decider=None): - """ - see Tools/flex.py for an example - while i do not like such wrappers, some people really do - """ - - if type(action) == types.StringType: - act = Task.simple_task_type(name, action, color=color, prio=prio) - act.in_exts = tuple(Utils.to_list(ext_in)) - act.out_exts = tuple(Utils.to_list(ext_out)) - else: - name = action.name - - def x_file(self, node): - if decider: - ext = decider(self, node) - else: - ext = ext_out - - if type(ext) == types.StringType: - out_source = node.change_ext(ext) - if reentrant: - self.allnodes.append(out_source) - elif type(ext) == types.ListType: - out_source = [node.change_ext(x) for x in ext] - if reentrant: - for i in xrange(reentrant): - self.allnodes.append(out_source[i]) - else: - fatal("do not know how to process %s" % str(ext)) - - tsk = self.create_task(name) - tsk.set_inputs(node) - tsk.set_outputs(out_source) - - if Params.g_install and install: - tsk.install = install - - declare_extension(ext_in, x_file) - -def add_feature(name, methods): - lst = Utils.to_list(methods) - try: - l = task_gen.traits[name] - except KeyError: - l = set() - task_gen.traits[name] = l - l.update(lst) - -""" -All the following decorators are registration decorators, i.e add an attribute to current class - (task_gen and its derivatives), with same name as func, which points to func itself. -For example: - @taskgen - def sayHi(self): - print "hi" -Now taskgen.sayHi() may be called -""" -def taskgen(func): - setattr(task_gen, func.__name__, func) - -def feature(*k): - def deco(func): - for name in k: - try: - l = task_gen.traits[name] - except KeyError: - l = set() - task_gen.traits[name] = l - l.update([func.__name__]) - return func - return deco - -def before(fun_name): - def deco(func): - try: - if not func.__name__ in task_gen.prec[fun_name]: task_gen.prec[fun_name].append(func.__name__) - except KeyError: - task_gen.prec[fun_name] = [func.__name__] - return func - return deco - -def after(fun_name): - def deco(func): - try: - if not fun_name in task_gen.prec[func.__name__]: task_gen.prec[func.__name__].append(fun_name) - except KeyError: - task_gen.prec[func.__name__] = [fun_name] - return func - return deco - -def extension(var): - if type(var) is types.ListType: - pass - elif type(var) is types.StringType: - var = [var] - else: - raise TypeError('declare extension takes either a list or a string %s' % str(var)) - - def deco(func): - for x in var: - task_gen.mappings[x] = func - task_gen.mapped[func.__name__] = func - return func - return deco - - diff -Nru a2jmidid-8~dfsg0/wafadmin/Task.py a2jmidid-9/wafadmin/Task.py --- a2jmidid-8~dfsg0/wafadmin/Task.py 2008-07-21 22:39:47.000000000 +0000 +++ a2jmidid-9/wafadmin/Task.py 1970-01-01 00:00:00.000000000 +0000 @@ -1,808 +0,0 @@ -#! /usr/bin/env python -# encoding: utf-8 -import sys -if sys.hexversion < 0x020400f0: from sets import Set as set -#! /usr/bin/env python -# encoding: utf-8 -# Thomas Nagy, 2005-2008 (ita) - -""" -Running tasks in parallel is a simple problem, but in practice it is more complicated: -* dependencies discovered during the build (dynamic task creation) -* dependencies discovered after files are compiled -* the amount of tasks and dependencies (graph size) can be huge - -This is why the dependency management is split on three different levels: -1. groups of tasks that run all after another group of tasks -2. groups of tasks that can be run in parallel -3. tasks that can run in parallel, but with possible unknown ad-hoc dependencies - -The point #1 represents a strict sequential order between groups of tasks, for example a compiler is produced -and used to compile the rest, whereas #2 and #3 represent partial order constraints where #2 applies to the kind of task -and #3 applies to the task instances. - -#1 is held by the task manager (ordered list of TaskGroups) -#2 is held by the task groups (constraint extraction and topological sort) and the actions (priorities) -#3 is held by the tasks individually (attribute m_run_after), - and the scheduler (Runner.py) use Task::may_start to reorder the tasks - - -To simplify the system a little bit, the part #2 only applies to dependencies between actions, -and priorities or order constraints can only be applied to actions, not to tasks anymore - - -To try, use something like this in your code: -import Constants, Task -Task.g_algotype = Constants.MAXPARALLEL -Task.g_shuffle = True -""" - -import os, types, shutil, sys, re, new, random -from Utils import md5 -import Params, Action, Runner, Common, Scan -from Params import debug, error, warning -from Constants import * - -g_algotype = NORMAL -#g_algotype = JOBCONTROL -#g_algotype = MAXPARALLEL - -g_shuffle = False - -g_task_types = {} - -""" -Enable different kind of dependency algorithms: -1 make groups: first compile all cpps and then compile all links (NORMAL) -2 parallelize all (each link task run after its dependencies) (MAXPARALLEL) -3 like 1 but provide additional constraints for the parallelization (MAXJOBS) - -In theory 1. will be faster than 2 for waf, but might be slower for builds -The scheme 2 will not allow for running tasks one by one so it can cause disk thrashing on huge builds - -""" - -class TaskManager(object): - """The manager is attached to the build object, it holds a list of TaskGroup - Each TaskGroup contains a map(priority, list of tasks)""" - def __init__(self): - self.groups = [] - self.idx = 0 # task counter, for debugging (allocating 5000 integers for nothing is a bad idea but well) - self.tasks_done = [] - - self.current_group = 0 - - def get_next_set(self): - """return the next set of tasks to execute - the first parameter is the maximum amount of parallelization that may occur""" - ret = None - while not ret and self.current_group < len(self.groups): - ret = self.groups[self.current_group].get_next_set() - if ret: return ret - else: self.current_group += 1 - return (None, None) - - def add_group(self, name=''): - if not name: - size = len(self.groups) - name = 'group-%d' % size - if not self.groups: - self.groups = [TaskGroup(name)] - return - if not self.groups[0].tasks: - warning('add_group: an empty group is already present') - return - self.groups = self.groups + [TaskGroup(name)] - def add_task(self, task): - if not self.groups: self.add_group('group-0') - task.m_idx = self.idx - self.idx += 1 - self.groups[-1].add_task(task) - def total(self): - total = 0 - if not self.groups: return 0 - for group in self.groups: - total += len(group.tasks) - #for p in group.prio: - # total += len(group.prio[p]) - return total - def debug(self): - for i in self.groups: - print "-----group-------", i.name - for j in i.prio: - print "prio: ", j, str(i.prio[j]) - def add_finished(self, tsk): - self.tasks_done.append(tsk) - # TODO we could install using threads here - if Params.g_install and hasattr(tsk, 'install'): - d = tsk.install - - if type(d) is types.FunctionType: - d(tsk) - elif type(d) is types.StringType: - if not tsk.env()[d]: return - lst = [a.relpath_gen(Params.g_build.m_srcnode) for a in tsk.m_outputs] - Common.install_files(tsk.env()[d], '', lst, chmod=0644, env=tsk.env()) - else: - if not d['var']: return - lst = [a.relpath_gen(Params.g_build.m_srcnode) for a in tsk.m_outputs] - if d.get('src', 0): lst += [a.relpath_gen(Params.g_build.m_srcnode) for a in tsk.m_inputs] - # TODO ugly hack - if d.get('as', ''): - Common.install_as(d['var'], d['dir']+d['as'], lst[0], chmod=d.get('chmod', 0644), env=tsk.env()) - else: - Common.install_files(d['var'], d['dir'], lst, chmod=d.get('chmod', 0644), env=tsk.env()) - -class TaskGroup(object): - "A TaskGroup maps priorities (integers) to lists of tasks" - def __init__(self, name): - self.name = name - self.tasks = [] # this list will be consumed - - self.cstr_groups = {} # tasks having equivalent constraints - self.cstr_order = {} # partial order between the cstr groups - self.temp_tasks = [] # tasks put on hold - self.ready = 0 - - def reset(self): - "clears the state of the object (put back the tasks into self.tasks)" - for x in self.cstr_groups: - self.tasks += self.cstr_groups[x] - self.tasks = self.temp_tasks + self.tasks - self.temp_tasks = [] - self.cstr_groups = [] - self.cstr_order = {} - self.ready = 0 - - def prepare(self): - "prepare the scheduling" - self.ready = 1 - self.make_cstr_groups() - self.extract_constraints() - - def get_next_set(self): - "next list of tasks to execute using max job settings, returns (priority, task_list)" - global g_algotype, g_shuffle - if g_algotype == NORMAL: - tasks = self.tasks_in_parallel() - maxj = sys.maxint - elif g_algotype == JOBCONTROL: - (maxj, tasks) = self.tasks_by_max_jobs() - elif g_algotype == MAXPARALLEL: - tasks = self.tasks_with_inner_constraints() - maxj = sys.maxint - else: - Params.fatal("unknown algorithm type %s" % (g_algotype)) - - if not tasks: return () - if g_shuffle: random.shuffle(tasks) - return (maxj, tasks) - - def make_cstr_groups(self): - "unite the tasks that have similar constraints" - self.cstr_groups = {} - for x in self.tasks: - h = x.hash_constraints() - try: self.cstr_groups[h].append(x) - except KeyError: self.cstr_groups[h] = [x] - - def add_task(self, task): - try: self.tasks.append(task) - except KeyError: self.tasks = [task] - - def set_order(self, a, b): - try: self.cstr_order[a].add(b) - except KeyError: self.cstr_order[a] = set([b,]) - - def compare_prios(self, t1, t2): - x = "prio" - p1 = t1.attr(x, None) - p2 = t2.attr(x, None) - - if not p1 is None and not p2 is None: - if p1 < p2: - return 1 - elif p1 > p2: - return -1 - return 0 - - def compare_exts(self, t1, t2): - "extension production" - x = "in_exts" - y = "out_exts" - in_exts = t1.attr(x, ()) - out_exts = t2.attr(y, ()) - for k in in_exts: - if k in out_exts: - return -1 - else: - in_exts = t2.attr(x, ()) - out_exts = t1.attr(y, ()) - for k in in_exts: - if k in out_exts: - return 1 - else: - pass - return 0 - - def compare_partial(self, t1, t2): - "partial relations after/before" - m = "after" - n = "before" - name = t2.__class__.__name__ - if name in t1.attr(m, ()): return -1 - elif name in t1.attr(n, ()): return 1 - name = t1.__class__.__name__ - if name in t2.attr(m, ()): return 1 - elif name in t2.attr(n, ()): return -1 - return 0 - - def extract_constraints(self): - "extract the parallelization constraints from the tasks with different constraints" - keys = self.cstr_groups.keys() - max = len(keys) - a = "m_action" - # hopefully the lenght of this list is short - for i in xrange(max): - t1 = self.cstr_groups[keys[i]][0] - for j in xrange(i + 1, max): - t2 = self.cstr_groups[keys[j]][0] - - # add the constraints based on the comparisons - - val = (0 - or self.compare_prios(t1, t2) - or self.compare_exts(t1, t2) - or self.compare_partial(t1, t2) - ) - if val > 0: - self.set_order(keys[i], keys[j]) - continue - elif val < 0: - self.set_order(keys[j], keys[i]) - continue - - #print "the constraint groups are:", self.cstr_groups, "and the constraints ", self.cstr_order - # TODO extract constraints by file extensions on the actions - - def tasks_in_parallel(self): - "(NORMAL) next list of tasks that may be executed in parallel" - - if not self.ready: self.prepare() - - #print [(a.m_name, cstrs[a].m_name) for a in cstrs] - keys = self.cstr_groups.keys() - - unconnected = [] - remainder = [] - - for u in keys: - for k in self.cstr_order.values(): - if u in k: - remainder.append(u) - break - else: - unconnected.append(u) - - #print "unconnected tasks: ", unconnected, "tasks", [eq_groups[x] for x in unconnected] - - toreturn = [] - for y in unconnected: - toreturn.extend(self.cstr_groups[y]) - - # remove stuff only after - for y in unconnected: - try: self.cstr_order.__delitem__(y) - except KeyError: pass - self.cstr_groups.__delitem__(y) - - if not toreturn and remainder: - Params.fatal("circular dependency detected %r" % remainder) - - #print "returning", toreturn - return toreturn - - def tasks_by_max_jobs(self): - "(JOBCONTROL) returns the tasks that can run in parallel with the max amount of jobs" - if not self.ready: self.prepare() - if not self.temp_tasks: self.temp_tasks = self.tasks_in_parallel() - if not self.temp_tasks: return (None, None) - - maxjobs = sys.maxint - ret = [] - remaining = [] - for t in self.temp_tasks: - act = getattr(t, "m_action", None) - m = getattr(act, "maxjobs", getattr(t, "maxjobs", sys.maxint)) - if m > maxjobs: - remaining.append(t) - elif m < maxjobs: - remaining += ret - ret = [t] - maxjobs = m - else: - ret.append(t) - self.temp_tasks = remaining - return (maxjobs, ret) - - def tasks_with_inner_constraints(self): - """(MAXPARALLEL) returns all tasks in this group, but add the constraints on each task instance - as an optimization, it might be desirable to discard the tasks which do not have to run""" - if not self.ready: self.prepare() - - if getattr(self, "done", None): return None - - for p in self.cstr_order: - for v in self.cstr_order[p]: - for m in self.cstr_groups[p]: - for n in self.cstr_groups[v]: - n.set_run_after(m) - self.cstr_order = {} - self.cstr_groups = {} - self.done = 1 - return self.tasks[:] # make a copy - -class TaskBase(object): - "TaskBase is the base class for task objects" - - m_vars = [] - m_color = "GREEN" - maxjobs = sys.maxint - - def __init__(self, normal=1): - self.m_display = '' - self.m_hasrun = 0 - - manager = Params.g_build.task_manager - if normal: - manager.add_task(self) - else: - self.m_idx = manager.idx - manager.idx += 1 - - def attr(self, att, default=None): - return getattr(self, att, getattr(self.__class__, att, default)) - - def hash_constraints(self): - sum = 0 - names = ('prio', 'before', 'after', 'in_exts', 'out_exts') - sum = hash((sum, self.__class__.__name__,)) - for x in names: - sum = hash((sum, self.attr(x, sys.maxint),)) - sum = hash((sum, self.__class__.maxjobs)) - return sum - - def get_str(self): - "string to display to the user" - env = self.env() - src_str = ' '.join([a.nice_path(env) for a in self.m_inputs]) - tgt_str = ' '.join([a.nice_path(env) for a in self.m_outputs]) - return '%s: %s -> %s\n' % (self.__class__.__name__, src_str, tgt_str) - - def may_start(self): - "non-zero if the task is ready" - return 1 - def must_run(self): - "0 if the task does not need to run" - return 1 - def prepare(self): - "prepare the task for further processing" - pass - def update_stat(self): - "update the dependency tree (node stats)" - pass - def debug_info(self): - "return debug info" - return '' - def debug(self): - "prints the debug info" - pass - def color(self): - "color to use for the console messages" - return 'BLUE' - def set_display(self, v): - self.m_display = v - def get_display(self): - return self.m_display - -class Task(TaskBase): - "The most common task, it has input and output nodes" - def __init__(self, action_name, env, normal=1, prio=None): - TaskBase.__init__(self, normal=normal) - - # name of the action associated to this task type - if not (prio is None): self.prio = prio - - # environment in use - self.m_env = env - - # inputs and outputs are nodes - # use setters when possible - self.m_inputs = [] - self.m_outputs = [] - - self.m_deps_nodes = [] - self.m_run_after = [] - - # Additionally, you may define the following - #self.dep_vars = 'PREFIX DATADIR' - #self.m_scanner = some_scanner_object - - def env(self): - # TODO IDEA in the future, attach the task generator instead of the env - return self.m_env - - def __repr__(self): - return "".join(['\n\t{task: ', self.__class__.__name__, " ", ",".join([x.m_name for x in self.m_inputs]), " -> ", ",".join([x.m_name for x in self.m_outputs]), '}']) - - def set_inputs(self, inp): - if type(inp) is types.ListType: self.m_inputs += inp - else: self.m_inputs.append(inp) - - def set_outputs(self, out): - if type(out) is types.ListType: self.m_outputs += out - else: self.m_outputs.append(out) - - def set_run_after(self, task): - "set (scheduler) dependency on another task" - # TODO: handle list or object - assert isinstance(task, TaskBase) - self.m_run_after.append(task) - - def get_run_after(self): - try: return self.m_run_after - except AttributeError: return [] - - def add_file_dependency(self, filename): - "TODO user-provided file dependencies" - node = Params.g_build.m_current.find_resource(filename) - self.m_deps_nodes.append(node) - - #------------ users are probably less interested in the following methods --------------# - - def signature(self): - # compute the result one time, and suppose the scanner.get_signature will give the good result - try: return self.sign_all - except AttributeError: pass - - env = self.env() - tree = Params.g_build - - m = md5() - - # TODO maybe we could split this dep sig into two parts (nodes, dependencies) - # this would only help for debugging though - dep_sig = SIG_NIL - scan = getattr(self, 'm_scanner', None) - if scan: - dep_sig = scan.get_signature(self) - try: m.update(dep_sig) - except TypeError: raise Scan.ScannerError, "failure to compute the signature" - else: - # compute the signature from the inputs (no scanner) - for x in self.m_inputs: - v = tree.m_tstamp_variants[x.variant(env)][x.id] - dep_sig = hash( (dep_sig, v) ) - m.update(v) - - # manual dependencies, they can slow down the builds - try: - additional_deps = tree.deps_man - for x in self.m_inputs + self.m_outputs: - try: - d = additional_deps[x] - except KeyError: - continue - if callable(d): d = d() # dependency is a function, call it - dep_sig = hash( (dep_sig, d) ) - m.update(d) - except AttributeError: - pass - - # dependencies on the environment vars - fun = getattr(self.__class__, 'signature_hook', None) - if fun: act_sig = self.__class__.signature_hook(self) - else: act_sig = env.sign_vars(self.__class__.m_vars) - m.update(act_sig) - - # additional variable dependencies, if provided - var_sig = None - dep_vars = getattr(self, 'dep_vars', None) - if dep_vars: - var_sig = env.sign_vars(dep_vars) - m.update(var_sig) - - # additional nodes to depend on, if provided - node_sig = SIG_NIL - dep_nodes = getattr(self, 'dep_nodes', []) - for x in dep_nodes: - variant = x.variant(env) - v = tree.m_tstamp_variants[variant][x.id] - node_sig = hash( (node_sig, v) ) - m.update(v) - - # we now have the array of signatures - ret = m.digest() - self.cache_sig = (ret, dep_sig, act_sig, var_sig, node_sig) - - self.sign_all = ret - return ret - - def may_start(self): - "wait for other tasks to complete" - if (not self.m_inputs) or (not self.m_outputs): - if not (not self.m_inputs) and (not self.m_outputs): - error("potentially grave error, task is invalid : no inputs or outputs") - self.debug() - - # the scanner has its word to say - scan = getattr(self, 'm_scanner', None) - if scan: - fun = getattr(scan, 'may_start', None) - if fun: - if not fun(self): - return 0 - - # this is a dependency using the scheduler, as opposed to hash-based ones - for t in self.get_run_after(): - if not t.m_hasrun: - return 0 - return 1 - - def must_run(self): - "see if the task must be run or not" - #return 0 # benchmarking - - env = self.env() - tree = Params.g_build - - # tasks that have no inputs or outputs are run each time - if not self.m_inputs and not self.m_outputs: - self.m_dep_sig = SIG_NIL - return 1 - - # look at the previous signature first - node = self.m_outputs[0] - variant = node.variant(env) - try: - time = tree.m_tstamp_variants[variant][node.id] - except KeyError: - debug("task #%d should run as the first node does not exist" % self.m_idx, 'task') - try: new_sig = self.signature() - except KeyError: - print "TODO - computing the signature failed" - return 1 - - ret = self.can_retrieve_cache(new_sig) - return not ret - - key = hash( (variant, node.m_name, time, getattr(self, 'm_scanner', self).__class__.__name__) ) - try: prev_sig = tree.bld_sigs[key][0] - except KeyError: return 1 - #print "prev_sig is ", prev_sig - new_sig = self.signature() - - # debug if asked to - if Params.g_zones: self.debug_why(tree.bld_sigs[key]) - - if new_sig != prev_sig: - # try to retrieve the file from the cache - ret = self.can_retrieve_cache(new_sig) - return not ret - - return 0 - - def update_stat(self): - "called after a successful task run" - tree = Params.g_build - env = self.env() - sig = self.signature() - - cnt = 0 - for node in self.m_outputs: - variant = node.variant(env) - #if node in tree.m_tstamp_variants[variant]: - # print "variant is ", variant - # print "self sig is ", Params.view_sig(tree.m_tstamp_variants[variant][node]) - - # check if the node exists .. - os.stat(node.abspath(env)) - - # important, store the signature for the next run - tree.m_tstamp_variants[variant][node.id] = sig - - # We could re-create the signature of the task with the signature of the outputs - # in practice, this means hashing the output files - # this is unnecessary - if Params.g_cache_global: - ssig = sig.encode('hex') - dest = os.path.join(Params.g_cache_global, ssig+'-'+str(cnt)) - try: shutil.copy2(node.abspath(env), dest) - except IOError: warning('could not write the file to the cache') - cnt += 1 - - # keep the signatures in the first node - node = self.m_outputs[0] - variant = node.variant(env) - time = tree.m_tstamp_variants[variant][node.id] - key = hash( (variant, node.m_name, time, getattr(self, 'm_scanner', self).__class__.__name__) ) - val = self.cache_sig - tree.set_sig_cache(key, val) - - self.m_executed=1 - - def can_retrieve_cache(self, sig): - """Retrieve build nodes from the cache - the file time stamps are updated - for cleaning the least used files from the cache dir - be careful when overriding""" - if not Params.g_cache_global: return None - if Params.g_options.nocache: return None - - env = self.env() - sig = self.signature() - - cnt = 0 - for node in self.m_outputs: - variant = node.variant(env) - - ssig = sig.encode('hex') - orig = os.path.join(Params.g_cache_global, ssig+'-'+str(cnt)) - try: - shutil.copy2(orig, node.abspath(env)) - os.utime(orig, None) - # mark the cache file as used recently (modified) - except (OSError, IOError): - debug("failed retrieving file", 'task') - return None - else: - cnt += 1 - Params.g_build.m_tstamp_variants[variant][node.id] = sig - if not Runner.g_quiet: Params.pprint('GREEN', 'restored from cache %s' % node.bldpath(env)) - return 1 - - def prepare(self): - return - try: self.m_action.prepare(self) - except AttributeError: pass - - def get_display(self): - if self.m_display: return self.m_display - self.m_display = self.get_str() - return self.m_display - - def color(self): - return self.__class__.m_color - - def debug_info(self): - ret = [] - ret.append('-- task details begin --') - ret.append('action: %s' % str(self.m_action)) - ret.append('idx: %s' % str(self.m_idx)) - ret.append('source: %s' % str(self.m_inputs)) - ret.append('target: %s' % str(self.m_outputs)) - ret.append('-- task details end --') - return '\n'.join(ret) - - def debug(self, level=0): - fun = Params.debug - if level>0: fun = Params.error - fun(self.debug_info()) - - def debug_why(self, old_sigs): - "explains why a task is run" - - new_sigs = self.cache_sig - v = Params.view_sig - - debug("Task %s must run: %s" % (self.m_idx, old_sigs[0] != new_sigs[0]), 'task') - if (new_sigs[1] != old_sigs[1]): - debug(' -> A source file (or a dependency) has changed %s %s' % (v(old_sigs[1]), v(new_sigs[1])), 'task') - if (new_sigs[2] != old_sigs[2]): - debug(' -> An environment variable has changed %s %s' % (v(old_sigs[2]), v(new_sigs[2])), 'task') - if (new_sigs[3] != old_sigs[3]): - debug(' -> A manual dependency has changed %s %s' % (v(old_sigs[3]), v(new_sigs[3])), 'task') - if (new_sigs[4] != old_sigs[4]): - debug(' -> A user-given environment variable has changed %s %s' % (v(old_sigs[4]), v(new_sigs[4])), 'task') - -class TaskCmd(TaskBase): - "TaskCmd executes commands. Instances always execute their function" - def __init__(self, fun, env): - TaskBase.__init__(self) - self.fun = fun - self.m_env = env - def prepare(self): - self.m_display = "* executing: %s" % self.fun.__name__ - def debug_info(self): - return 'TaskCmd:fun %s' % self.fun.__name__ - def debug(self): - return 'TaskCmd:fun %s' % self.fun.__name__ - def run(self): - self.fun(self) - def env(self): - return self.m_env - -def funex(c): - exec(c) - return f - -reg_act = re.compile(r"(?P\$\$)|(?P\$\{(?P\w+)(?P.*?)\})", re.M) -def compile_fun(name, line): - """Compiles a string (once) into an function, eg: - simple_action('c++', '${CXX} -o ${TGT[0]} ${SRC} -I ${SRC[0].m_parent.bldpath()}') - - The env variables (CXX, ..) on the task must not hold dicts (order) - The reserved keywords TGT and SRC represent the task input and output nodes - """ - extr = [] - def repl(match): - g = match.group - if g('dollar'): return "$" - elif g('subst'): extr.append((g('var'), g('code'))); return "%s" - return None - - line = reg_act.sub(repl, line) - - parm = [] - dvars = [] - app = parm.append - for (var, meth) in extr: - if var == 'SRC': - if meth: app('task.m_inputs%s' % meth) - else: app('" ".join([a.srcpath(env) for a in task.m_inputs])') - elif var == 'TGT': - if meth: app('task.m_outputs%s' % meth) - else: app('" ".join([a.bldpath(env) for a in task.m_outputs])') - else: - if not var in dvars: dvars.append(var) - app("p('%s')" % var) - if parm: parm = "%% (%s) " % (',\n\t\t'.join(parm)) - else: parm = '' - - c = ''' -def f(task): - env = task.env() - p = env.get_flat - try: cmd = "%s" %s - except Exception: task.debug(); raise - return Runner.exec_command(cmd) -''' % (line, parm) - - debug(c, 'action') - return (funex(c), dvars) - -def simple_task_type(name, line, color='GREEN', vars=[], prio=100): - """return a new Task subclass with the function run compiled from the line given""" - (fun, dvars) = compile_fun(name, line) - params = { - 'run': fun, - 'm_vars': vars or dvars, - 'm_color': color, - 'prio': prio, - 'line': line, - 'm_name': name, - } - cls = new.classobj(name, (Task,), params) - setattr(cls, 'm_action', cls) # <- compat - - global g_task_types - g_task_types[name] = cls - - return cls - -def task_type_from_func(name, func, vars=[], color='GREEN', prio=100): - """return a new Task subclass with the function run compiled from the line given""" - params = { - 'run': func, - 'm_vars': vars, - 'm_color': color, - 'prio': prio, - 'm_name': name, - } - cls = new.classobj(name, (Task,), params) - setattr(cls, 'm_action', cls) # <- compat - - global g_task_types - g_task_types[name] = cls - return cls - - - diff -Nru a2jmidid-8~dfsg0/wafadmin/Tools/ar.py a2jmidid-9/wafadmin/Tools/ar.py --- a2jmidid-8~dfsg0/wafadmin/Tools/ar.py 2008-07-21 22:39:47.000000000 +0000 +++ a2jmidid-9/wafadmin/Tools/ar.py 1970-01-01 00:00:00.000000000 +0000 @@ -1,75 +0,0 @@ -#! /usr/bin/env python -# encoding: utf-8 - -#! /usr/bin/env python -# encoding: utf-8 -# Thomas Nagy, 2006-2008 (ita) -# Ralf Habacker, 2006 (rh) - -"ar and ranlib" - -import os, sys -import Task -from Configure import conftest - -ar_str = '${AR} ${ARFLAGS} ${TGT} ${SRC} && ${RANLIB} ${RANLIBFLAGS} ${TGT}' - -# FIXME -if sys.platform == "win32": - ar_str = '${AR} s${ARFLAGS} ${TGT} ${SRC}' -Task.simple_task_type('ar_link_static', ar_str, color='YELLOW', prio=111) - -def detect(conf): - comp = conf.find_program('ar', var='AR') - if not comp: return - - ranlib = conf.find_program('ranlib', var='RANLIB') - if not ranlib: return - - v = conf.env - v['AR'] = comp - v['ARFLAGS'] = 'rc' - v['RANLIB'] = ranlib - v['RANLIBFLAGS'] = '' - -def find_ar(conf): - v = conf.env - conf.check_tool('ar') - if not v['AR']: conf.fatal('ar is required for static libraries - not found') - -def find_cpp(conf): - v = conf.env - cpp = None - if v['CPP']: cpp = v['CPP'] - elif 'CPP' in os.environ: cpp = os.environ['CPP'] - if not cpp: cpp = conf.find_program('cpp', var='CPP') - if not cpp: cpp = v['CC'] - if not cpp: cpp = v['CXX'] - v['CPP'] = cpp - -def cc_add_flags(conf): - conf.add_os_flags('CFLAGS', 'CCFLAGS') - conf.add_os_flags('CPPFLAGS') - conf.add_os_flags('LINKFLAGS') - -def cxx_add_flags(conf): - conf.add_os_flags('CXXFLAGS') - conf.add_os_flags('CPPFLAGS') - conf.add_os_flags('LINKFLAGS') - -def cc_load_tools(conf): - conf.check_tool('cc') - conf.check_tool('checks') - -def cxx_load_tools(conf): - conf.check_tool('cxx') - conf.check_tool('checks') - - - -conftest(find_ar) -conftest(find_cpp) -conftest(cc_add_flags) -conftest(cxx_add_flags) -conftest(cc_load_tools) -conftest(cxx_load_tools) diff -Nru a2jmidid-8~dfsg0/wafadmin/Tools/batched_cc.py a2jmidid-9/wafadmin/Tools/batched_cc.py --- a2jmidid-8~dfsg0/wafadmin/Tools/batched_cc.py 2008-07-21 22:39:47.000000000 +0000 +++ a2jmidid-9/wafadmin/Tools/batched_cc.py 1970-01-01 00:00:00.000000000 +0000 @@ -1,137 +0,0 @@ -#! /usr/bin/env python -# encoding: utf-8 - -#! /usr/bin/env python -# encoding: utf-8 -# Thomas Nagy, 2006 (ita) - -""" -Batched builds - compile faster -instead of compiling object files one by one, c/c++ compilers are often able to compile at once: -cc -c ../file1.c ../file2.c ../file3.c - -Files are output on the directory where the compiler is called, and dependencies are more difficult -to track (do not run the command on all source files if only one file changes) - -As such, we do as if the files were compiled one by one, but no command is actually run: -replace each cc/cpp Task by a TaskSlave -A new task called TaskMaster collects the signatures from each slave and finds out the command-line -to run. - -To set this up, the method ccroot::create_task is replaced by a new version, to enable batched builds -it is only necessary to import this module in the configuration (no other change required) -""" - -EXT_C = ['.c', '.cc', '.cpp', '.cxx'] - -import shutil, os -import TaskGen, Task, ccroot, Params -from TaskGen import extension - -cc_str = '${CC} ${CCFLAGS} ${CPPFLAGS} ${_CCINCFLAGS} ${_CCDEFFLAGS} -c ${SRC}' -#Task.simple_task_type('all_cc', cc_str, 'GREEN') - -cxx_str = '${CXX} ${CXXFLAGS} ${CPPFLAGS} ${_CXXINCFLAGS} ${_CXXDEFFLAGS} -c ${SRC}' -#Task.simple_task_type('all_cxx', cxx_str, color='GREEN') - -class TaskMaster(Task.Task): - def __init__(self, action_name, env, priority=92, normal=1, master=None): - Task.Task.__init__(self, action_name, env, prio=priority, normal=normal) - self.slaves=[] - self.m_inputs2=[] - self.m_outputs2=[] - self.act = action_name - - def add_slave(self, slave): - self.slaves.append(slave) - self.set_run_after(slave) - - def may_start(self): - for t in self.m_run_after: - if not t.m_hasrun: return 0 - - for t in self.slaves: - self.m_inputs.append(t.m_inputs[0]) - self.m_outputs.append(t.m_outputs[0]) - if t.m_must_run: - self.m_inputs2.append(t.m_inputs[0]) - self.m_outputs2.append(t.m_outputs[0]) - return 1 - - def run(self): - tmpinputs = self.m_inputs - self.m_inputs = self.m_inputs2 - tmpoutputs = self.m_outputs - self.m_outputs = self.m_outputs2 - - ret = self.__class__.__dict__[self.act](self) - env = self.env() - - rootdir = Params.g_build.m_srcnode.abspath(env) - - # unfortunately building the files in batch mode outputs them in the current folder (the build dir) - # now move the files from the top of the builddir to the correct location - for i in self.m_outputs: - name = i.m_name - if name[-1] == "s": name = name[:-1] # extension for shlib is .os, remove the s - shutil.move(name, i.bldpath(env)) - - self.m_inputs = tmpinputs - self.m_outputs = tmpoutputs - - return ret - - # ouch, vars are ignored - m_vars = [] - (fun, v) = Task.compile_fun("all_cc", cc_str) - all_cc = fun - m_vars += v - (fun, v) = Task.compile_fun("all_cxx", cxx_str) - all_cxx = fun - m_vars += v - -class TaskSlave(Task.Task): - def __init__(self, action_name, env, priority=90, normal=1, master=None): - Task.Task.__init__(self, action_name, env, priority, normal) - self.m_master = master - - def get_display(self): - return "* skipping %s\n" % self.m_inputs[0].m_name - - def update_stat(self): - self.m_executed=1 - - def must_run(self): - self.m_must_run = Task.Task.must_run(self) - return self.m_must_run - - def run(self): - return 0 - - def can_retrieve_cache(self, sig): - return None - -def create_task_cxx_new(self, node): - comp = 'cxx' in self.features and 'cxx' or 'cc' - - try: - mm = self.mastertask - except AttributeError: - mm = TaskMaster("all_"+comp, self.env) - self.mastertask = mm - - task = TaskSlave(comp, self.env, 40, master=mm) - self.m_tasks.append(task) - mm.add_slave(task) - - task.m_scanner = ccroot.g_c_scanner - task.defines = self.scanner_defines - - task.set_inputs(node) - task.set_outputs(node.change_ext('.o')) - - self.compiled_tasks.append(task) - - - -extension(EXT_C)(create_task_cxx_new) diff -Nru a2jmidid-8~dfsg0/wafadmin/Tools/bison.py a2jmidid-9/wafadmin/Tools/bison.py --- a2jmidid-8~dfsg0/wafadmin/Tools/bison.py 2008-07-21 22:39:47.000000000 +0000 +++ a2jmidid-9/wafadmin/Tools/bison.py 1970-01-01 00:00:00.000000000 +0000 @@ -1,33 +0,0 @@ -#! /usr/bin/env python -# encoding: utf-8 - -#! /usr/bin/env python -# encoding: utf-8 -# John O'Meara, 2006 - -"Bison processing" - -import TaskGen - -def decide_ext(self, node): - c_ext = '.tab.c' - if node.m_name.endswith('.yc'): c_ext = '.tab.cc' - if '-d' in self.env['BISONFLAGS']: - return [c_ext, c_ext.replace('c', 'h')] - else: - return c_ext - -TaskGen.declare_chain( - name = 'bison', - action = 'cd ${SRC[0].bld_dir(env)} && ${BISON} ${BISONFLAGS} ${SRC[0].abspath()} -o ${TGT[0].m_name}', - ext_in = ['.y', '.yc'], - decider = decide_ext, -) - -def detect(conf): - bison = conf.find_program('bison', var='BISON') - if not bison: conf.fatal("bison was not found") - v = conf.env - v['BISONFLAGS'] = '-d' - - diff -Nru a2jmidid-8~dfsg0/wafadmin/Tools/boost2.py a2jmidid-9/wafadmin/Tools/boost2.py --- a2jmidid-8~dfsg0/wafadmin/Tools/boost2.py 2008-07-21 22:39:47.000000000 +0000 +++ a2jmidid-9/wafadmin/Tools/boost2.py 1970-01-01 00:00:00.000000000 +0000 @@ -1,314 +0,0 @@ -#! /usr/bin/env python -# encoding: utf-8 -import sys -if sys.hexversion < 0x020400f0: from sets import Set as set -#! /usr/bin/env python -# encoding: utf-8 -# -# written by Ruediger Sonderfeld , 2008 -# modified by Bjoern Michaelsen, 2008 -# -# partially based on boost.py written by Gernot Vormayr - -""" -Boost Configurator: - -written by Ruediger Sonderfeld , 2008 -modified by Bjoern Michaelsen, 2008 -partially based on boost.py written by Gernot Vormayr - -Usage: -## wscript -# ... - -def set_options(opt): - opt.tool_options('boost2') - # ... - -def configure(conf): - # ... (e.g. conf.check_tool('g++')) - conf.check_tool('boost2)' - - boostconf = conf.create_boost_configurator() - boostconf.lib = ['iostream', 'filesystem'] - # we dont care about other tags, but version has to be explicitly tagged - boostconf.min_score = 1000 - boostconf.tagscores['version'] = (1000,-1000) - # we want a static lib - boostconf.static = boostconf.STATIC_ONLYSTATIC - boostconf.run() - -ISSUES: - * find_includes should be called only once! - -TODO: - * run_cache - * support mandatory - * ... - -""" - -import os, os.path, glob, types, re -import Params, Configure, config_c -from Params import fatal, warning -from Configure import conf - -class boost_configurator(config_c.configurator_base): - """ - - min_version - - max_version - - version - - include_path - - lib_path - - lib - - toolsettag - None or a regexp - - threadingtag - None or a regexp - - abitag - None or a regexp - - versiontag - WARNING: you should rather use version or min_version/max_version - - static - look for static libs (values: - 'nostatic' or STATIC_NOSTATIC - ignore static libs (default) - 'both' or STATIC_BOTH - find static libs, too - 'onlystatic' or STATIC_ONLYSTATIC - find only static libs - - tagscores['version'] - - tagscores['abi'] - - tagscores['threading'] - - tagscores['toolset'] - - the tagscores are tuples (match_score, nomatch_score) - match_score is the added to the score if the tag is matched - nomatch_score is added when a tag is found and does not match - - min_score - """ - ## __metaclass__ = config_c.attached_conf ## autohook - STATIC_NOSTATIC = 'nostatic' - STATIC_BOTH = 'both' - STATIC_ONLYSTATIC = 'onlystatic' - def __init__(self, conf): - config_c.configurator_base.__init__(self, conf) - - (self.min_version, self.max_version, self.version) = ('','','') - self.lib_path = ['/usr/lib', '/usr/local/lib', '/opt/local/lib', '/sw/lib', '/lib'] - self.include_path = ['/usr/include', '/usr/local/include', '/opt/local/include', '/sw/include'] - self.lib = '' - (self.threadingtag, self.abitag, self.versiontag, self.toolsettag) = (None, '^[^d]*$', None, None) - self.tagscores = { - 'threading': (10,-10), - 'abi': (10,-10), - 'toolset': (1,-1), - 'version': (100,-100) } - self.min_score = 0 - self.static = boost_configurator.STATIC_NOSTATIC - - self.conf = conf - self.found_includes = 0 - - def run_cache(self, retval): pass # todo - - def validate(self): - if self.version: - self.min_version = self.max_version = self.version - - def get_boost_version_number(self, dir): - test_obj = Configure.check_data() - test_obj.code = ''' -#include -#include -int main() { std::cout << BOOST_VERSION << std::endl; } -''' - test_obj.env = self.conf.env - backup = test_obj.env['CPPPATH'] - test_obj.env['CPPPATH'] = [dir] - test_obj.execute = 1 - test_obj.force_compiler = 'cpp' - ret = self.conf.run_check(test_obj) - test_obj.env['CPPPATH'] = backup - if ret: - return int(ret['result']) - else: - return -1 - - def string_to_version(str): - version = str.split('.') - return int(version[0])*100000 + int(version[1])*100 + int(version[2]) - - def version_string(self, version): - major = version / 100000 - minor = version / 100 % 1000 - minor_minor = version % 100 - if minor_minor == 0: - return "%d_%d" % (major, minor) - else: - return "%d_%d_%d" % (major, minor, minor_minor) - - def find_includes(self): - """ - find_includes checks every path in self.include_path for subdir - that either starts with boost- or is named boost. - - Than the version is checked and selected accordingly to - min_version/max_version. The highest possible version number is - selected! - - If no versiontag is set the versiontag is set accordingly to the - selected library and CPPPATH_BOOST is set. - """ - env = self.conf.env - guess = [] - include_paths = [getattr(Params.g_options, 'boostincludes', '')] - if not include_paths[0]: - if self.include_path is types.StringType: - include_paths = [self.include_path] - else: - include_paths = self.include_path - - min_version = 0 - if self.min_version: - min_version = string_to_version(self.min_version) - max_version = 0xFFFFFFFFFFFFFFFF - if self.max_version: - max_version = string_to_version(self.max_version) - - version = 0 - boost_path = '' - for include_path in include_paths: - boost_paths = glob.glob(include_path + '/boost*') - for path in boost_paths: - pathname = path[len(include_path)+1:] - ret = -1 - if pathname == 'boost': - path = include_path - ret = self.get_boost_version_number(path) - elif pathname.startswith('boost-'): - ret = self.get_boost_version_number(path) - - if ret != -1 and ret >= min_version and ret <= max_version and ret > version: - boost_path = path - version = ret - - if version == 0 or len(boost_path) == 0: - fatal('boost headers not found! (required version min: %s max: %s)' - % (self.min_version, self.max_version)) - return 0 - - found_version = self.version_string(version) - versiontag = '^' + found_version + '$' - if self.versiontag is None: - self.versiontag = versiontag - elif self.versiontag != versiontag: - warning('boost header version and versiontag do _not_ match!') - self.conf.check_message('header', 'boost', 1, 'Version ' + found_version + - ' (' + boost_path + ')') - env['CPPPATH_BOOST'] = boost_path - env['BOOST_VERSION'] = found_version - self.found_includes = 1 - - def get_toolset(self): - v = self.conf.env - toolset = v['CXX_NAME'] - if v['CXX_VERSION']: - version_no = v['CXX_VERSION'].split('.') - toolset += version_no[0] - if len(version_no) > 1: - toolset += version_no[1] - return toolset - - def tags_score(self, tags): - """ - checks library tags - - see http://www.boost.org/doc/libs/1_35_0/more/getting_started/unix-variants.html 6.1 - - """ - is_versiontag = re.compile('^\d+_\d+_?\d*$') - is_threadingtag = re.compile('^mt$') - is_abitag = re.compile('^[sgydpn]+$') - is_toolsettag = re.compile('^(acc|borland|como|cw|dmc|darwin|gcc|hp_cxx|intel|kylix|msvc|qcc|sun|vacpp)\d*$') - score = 0 - needed_tags = { - 'threading': self.threadingtag, - 'abi': self.abitag, - 'toolset': self.toolsettag, - 'version': self.versiontag } - if self.toolsettag is None: - needed_tags['toolset'] = self.get_toolset() - found_tags = {} - for tag in tags: - if is_versiontag.match(tag): found_tags['version'] = tag - if is_threadingtag.match(tag): found_tags['threading'] = tag - if is_abitag.match(tag): found_tags['abi'] = tag - if is_toolsettag.match(tag): found_tags['toolset'] = tag - for tagname in needed_tags.iterkeys(): - if needed_tags[tagname] is not None and found_tags.has_key(tagname): - if re.compile(needed_tags[tagname]).match(found_tags[tagname]): - score += self.tagscores[tagname][0] - else: - score += self.tagscores[tagname][1] - return score - - def libfiles(self, lib, pattern, lib_paths): - result = [] - for lib_path in lib_paths: - libname = pattern % ('boost_' + lib + '*') - result += glob.glob(lib_path + '/' + libname) - return result - - def find_library_from_list(self, lib, files): - lib_pattern = re.compile('.*boost_(.*?)\..*') - result = (None, None) - resultscore = self.min_score-1 - for file in files: - m = lib_pattern.search(file, 1) - if m: - libname = m.group(1) - libtags = libname.split('-')[1:] - currentscore = self.tags_score(libtags) - if currentscore > resultscore: - result = (libname, file) - resultscore = currentscore - return result - - def find_library(self, lib): - """ - searches library paths for lib. - """ - lib_paths = [getattr(Params.g_options, 'boostlibs', '')] - if not lib_paths[0]: - if self.lib_path is types.StringType: - lib_paths = [self.lib_path] - else: - lib_paths = self.lib_path - (libname, file) = (None, None) - if self.static in [boost_configurator.STATIC_NOSTATIC, boost_configurator.STATIC_BOTH]: - st_env_prefix = 'LIB' - files = self.libfiles(lib, self.conf.env['shlib_PATTERN'], lib_paths) - (libname, file) = self.find_library_from_list(lib, files) - if libname is None and self.static in [boost_configurator.STATIC_ONLYSTATIC, boost_configurator.STATIC_BOTH]: - st_env_prefix = 'STATICLIB' - files = self.libfiles(lib, self.conf.env['staticlib_PATTERN'], lib_paths) - (libname, file) = self.find_library_from_list(lib, files) - if libname is not None: - self.conf.check_message('library', 'boost_'+lib, 1, file) - self.conf.env['LIBPATH_BOOST_' + lib.upper()] = os.path.split(file)[0] - self.conf.env[st_env_prefix + '_BOOST_' + lib.upper()] = 'boost_'+libname - return - fatal('lib boost_' + lib + ' not found!') - - def find_libraries(self): - libs_to_find = self.lib - if self.lib is types.StringType: libs_to_find = [self.lib] - for lib in libs_to_find: - self.find_library(lib) - - def run_test(self): - if not self.found_includes: - self.find_includes() - self.find_libraries() - -def detect(conf): - def create_boost_configurator(self): - return boost_configurator(conf) - conf.hook(create_boost_configurator) - -def set_options(opt): - opt.add_option('--boost-includes', type='string', default='', dest='boostincludes', help='path to the boost directory where the includes are e.g. /usr/local/include/boost-1_35') - opt.add_option('--boost-libs', type='string', default='', dest='boostlibs', help='path to the directory where the boost libs are e.g. /usr/local/lib') - diff -Nru a2jmidid-8~dfsg0/wafadmin/Tools/boost.py a2jmidid-9/wafadmin/Tools/boost.py --- a2jmidid-8~dfsg0/wafadmin/Tools/boost.py 2008-07-21 22:39:47.000000000 +0000 +++ a2jmidid-9/wafadmin/Tools/boost.py 1970-01-01 00:00:00.000000000 +0000 @@ -1,248 +0,0 @@ -#! /usr/bin/env python -# encoding: utf-8 - -#! /usr/bin/env python -# encoding: utf-8 -# Gernot Vormayr, 2008 - -# changes by Ruediger Sonderfeld , 2008 - - -print """WARNING: You are using the deprecated boost tool! - This tool will be removed in the next major version. - Please consider converting you wscripts to use boost2. - """ - - - - -""" -Quick n dirty boost detections -""" - -import os, glob, types -import Params, Configure -from Params import fatal - -def detect_boost(conf): - env = conf.env - opt = Params.g_options - - want_asio = 0 - - if env['WANT_BOOST']: - if type(env['WANT_BOOST']) is types.StringType: - want_libs = env['WANT_BOOST'].split() - else: - want_libs = env['WANT_BOOST'] - if want_libs.count('ASIO'): - want_libs.remove('ASIO') - want_asio=1 - if want_libs.count('ASIO_MT'): - want_libs.remove('ASIO_MT') - want_asio=2 - else: - want_libs = 0 - - boostlibs = getattr(opt, 'boostlibs', '') - boostincludes = getattr(opt, 'boostincludes', '') - asioincludes = getattr(opt, 'asioincludes', '') - boostfolder = getattr(opt, 'boostfolder', '') - - if boostfolder: - boostincludes=boostfolder+'/include' - boostlibs=boostfolder+'/lib' - - #let's try to find boost which is not easy, cause boost seems like it wants to hide :( - if not boostincludes: - boostincludes= ['/sw/include', '/usr/local/include', '/opt/include', '/opt/local/include', '/usr/include'] - else: - boostincludes=[boostincludes] - guess=[] - for dir in boostincludes: - try: - for subdir in os.listdir(dir): - # we have to check for boost or boost-version cause there are systems - # which put boost directly into a boost subdir (eg. gentoo) - if subdir=='boost': guess.append(dir) - elif subdir.startswith('boost-'): guess.append(dir+'/'+subdir) - except OSError: pass - if not guess: - fatal('boost headers not found') - return 0 - versions={} - for dir in guess: - test_obj = Configure.check_data() - test_obj.code = '#include \n#include \nint main() { std::cout << BOOST_VERSION << std::endl; return 0; }\n' - test_obj.env = env - test_obj.env['CPPPATH']=[dir] - test_obj.execute = 1 - test_obj.force_compiler='cpp' - ret=conf.run_check(test_obj) - if ret: - versions[int(ret['result'])]=dir - version=versions.keys() - - errtext='' - - if env['WANT_BOOST_MIN']: - errtext+='>= '+env['WANT_BOOST_MIN']+' ' - min_version=env['WANT_BOOST_MIN'].split('.') - min_version=int(min_version[0])*100000+int(min_version[1])*100+int(min_version[2]) - version=filter(lambda x:x>=min_version,version) - if env['WANT_BOOST_MAX']: - errtext+='<= '+env['WANT_BOOST_MAX']+' ' - max_version=env['WANT_BOOST_MAX'].split('.') - max_version=int(max_version[0])*100000+int(max_version[1])*100+int(max_version[2]) - version=filter(lambda x:x<=max_version,version) - - version.sort() - if len(version) is 0: - fatal('No boost '+errtext+'found!') - - version=version.pop() - boost_includes=versions[version] - if version % 100 == 0: - boost_version="%d_%d" % (version/100000, version/100%1000) - else: - boost_version="%d_%d_%d" % (version/100000, version/100%1000, - version%100) - version="%d.%d.%d" % (version/100000,version/100%1000,version%100) - conf.check_message('header','boost/version.hpp',1,'Version '+boost_includes+' ('+version+')') - env['CPPPATH_BOOST']=boost_includes - - # search vor asio - if want_asio: - errtext='' - asio_version=min_version=max_version=0 - if env['WANT_ASIO_MIN']: - errtext+='>= '+env['WANT_ASIO_MIN']+' ' - min_version=env['WANT_ASIO_MIN'].split('.') - min_version=int(min_version[0])*100000+int(min_version[1])*100+int(min_version[2]) - if env['WANT_ASIO_MAX']: - errtext+='<= '+env['WANT_ASIO_MAX']+' ' - max_version=env['WANT_ASIO_MAX'].split('.') - max_version=int(max_version[0])*100000+int(max_version[1])*100+int(max_version[2]) - #first look in the boost dir - but not when asioincludes is set - if not asioincludes: - test_obj = Configure.check_data() - test_obj.code = '#include \n#include \nint main() { std::cout << BOOST_ASIO_VERSION << std::endl; return 0; }\n' - test_obj.env = env - test_obj.env['CPPPATH']=[boost_includes] - test_obj.execute = 1 - test_obj.force_compiler='cpp' - ret=conf.run_check(test_obj) - if ret: - asio_version=int(ret['result']) - if min_version and asio_versionmax_version: - asio_version=0 - if asio_version: - conf.define('BOOST_ASIO',1) - version="%d.%d.%d" % (asio_version/100000,asio_version/100%1000,asio_version%100) - conf.check_message('header','boost/asio/version.hpp',1,'Version '+version) - if want_asio==1: - if want_libs: - try: want_libs.remove('BOOST_SYSTEM') - except ValueError: pass - want_libs.append('BOOST_SYSTEM') - else: - want_libs=['BOOST_SYSTEM'] - else: - if want_libs: - try: want_libs.remove('BOOST_SYSTEM_MT') - except ValueError: pass - want_libs.append('BOOST_SYSTEM_MT') - else: - want_libs=['BOOST_SYSTEM_MT'] - #ok not in boost dir - ahh did i say ok? na imho that's not ok! - if not asio_version: - if not asioincludes: - asioincludes= ['/sw/include', '/usr/local/include', '/opt/include', '/opt/local/include', '/usr/include'] - else: - asioincludes=[asioincludes] - versions={} - for dir in asioincludes: - test_obj = Configure.check_data() - test_obj.code = '#include \n#include \nint main() { std::cout << ASIO_VERSION << std::endl; return 0; }\n' - test_obj.env = env - test_obj.env['CPPPATH']=[dir] - test_obj.execute = 1 - test_obj.force_compiler='cpp' - ret=conf.run_check(test_obj) - if ret: - versions[int(ret['result'])]=dir - version=versions.keys() - if min_version: - version=filter(lambda x:x>=min_version,version) - if max_version: - version=filter(lambda x:x<=max_version,version) - - version.sort() - if len(version) is 0: - fatal('No asio '+errtext+'found!') - - version=version.pop() - asio_includes=versions[version] - version="%d.%d.%d" % (version/100000,version/100%1000,version%100) - conf.check_message('header','asio/version.hpp',1,'Version '+asio_includes+' ('+version+')') - env['CPPPATH_ASIO']=asio_includes - env['CPPPATH_ASIO_MT']=asio_includes - conf.undefine('BOOST_ASIO') - #well now we've found our includes - let's search for the precompiled libs - if want_libs: - def check_boost_libs(libs,lib_path): - ext = env['shlib_PATTERN'].split('%s')[1] - files=glob.glob(lib_path+'/libboost_*'+ext) - files=map(lambda x:x[len(lib_path)+4:-len(ext)] ,filter(lambda x: x.find('-d')==-1 ,files)) - for lib in libs: - libname=lib.lower() - use_single_threaded = 0 - if libname.endswith('_mt'): - libname=libname[0:-3]+'-mt' - elif libname.endswith('_st'): - libname=libname[0:-3] - use_single_threaded = 1 - for file in files: - if file.startswith(libname) and file.endswith(boost_version): - if use_single_threaded and file.find('-mt') != -1: - continue - conf.check_message('library',libname,1,file) - env['LIBPATH_'+lib]=lib_path - env['LIB_'+lib]=file - if lib is 'BOOST_SYSTEM': - env['LIB_ASIO']=file - env['LIBPATH_ASIO']=file - elif lib is 'BOOST_SYSTEM_MT': - env['LIB_ASIO_MT']=file - env['LIBPATH_ASIO_MT']=file - break - else: - fatal('lib '+libname+' not found!') - - if not boostlibs: - boostlibs=['/usr/lib64', '/usr/lib32', '/usr/lib', '/sw/lib', '/usr/local/lib', '/opt/lib', '/opt/local/lib'] - else: - boostlibs=[boostlibs] - - lib_path=Configure.find_file_ext('libboost_*'+version+'*',boostlibs) - if lib_path=='': - lib_path=Configure.find_file_ext('libboost_*',boostlibs) - if lib_path=='': - conf.check_message('library','boost',0,'') - else: - check_boost_libs(want_libs,lib_path) - else: - check_boost_libs(want_libs,lib_path) - return 1 - -def detect(conf): - return detect_boost(conf) - -def set_options(opt): - opt.add_option('--boost-includes', type='string', default='', dest='boostincludes', help='path to the boost directory where the includes are e.g. /usr/local/include/boost-1_34_1') - opt.add_option('--boost-libs', type='string', default='', dest='boostlibs', help='path to the directory where the boost libs are e.g. /usr/local/lib') - opt.add_option('--boost', type='string', default='', dest='boostfolder', help='path to the directory where the boost lives are e.g. /usr/local') - opt.add_option('--asio-includes', type='string', default='', dest='asioincludes', help='path to asio e.g. /usr/local/include/asio') - diff -Nru a2jmidid-8~dfsg0/wafadmin/Tools/cc.py a2jmidid-9/wafadmin/Tools/cc.py --- a2jmidid-8~dfsg0/wafadmin/Tools/cc.py 2008-07-21 22:39:47.000000000 +0000 +++ a2jmidid-9/wafadmin/Tools/cc.py 1970-01-01 00:00:00.000000000 +0000 @@ -1,111 +0,0 @@ -#! /usr/bin/env python -# encoding: utf-8 -import sys -if sys.hexversion < 0x020400f0: from sets import Set as set -#! /usr/bin/env python -# encoding: utf-8 -# Thomas Nagy, 2006 (ita) - -"Base for c programs/libraries" - -import sys -import TaskGen, Params, Utils, Task -from Params import debug -import ccroot # <- do not remove -from TaskGen import taskgen, before, extension - -g_cc_flag_vars = [ -'FRAMEWORK', 'FRAMEWORKPATH', -'STATICLIB', 'LIB', 'LIBPATH', 'LINKFLAGS', 'RPATH', -'INCLUDE', -'CCFLAGS', 'CPPPATH', 'CPPFLAGS', 'CCDEFINES'] - -EXT_CC = ['.c'] -CC_METHS = ['init_cc', 'apply_type_vars', 'apply_incpaths', 'apply_defines_cc', -'apply_core', 'apply_lib_vars', 'apply_obj_vars_cc'] - -TaskGen.add_feature('cc', CC_METHS) - -g_cc_type_vars = ['CCFLAGS', 'LINKFLAGS'] - -class cc_taskgen(ccroot.ccroot_abstract): - def __init__(self, *kw): - ccroot.ccroot_abstract.__init__(self, *kw) - -def init_cc(self): - if hasattr(self, 'p_flag_vars'): self.p_flag_vars = set(self.p_flag_vars).union(g_cc_flag_vars) - else: self.p_flag_vars = g_cc_flag_vars - - if hasattr(self, 'p_type_vars'): self.p_type_vars = set(self.p_type_vars).union(g_cc_type_vars) - else: self.p_type_vars = g_cc_type_vars - -def apply_obj_vars_cc(self): - debug('apply_obj_vars_cc', 'ccroot') - env = self.env - app = env.append_unique - cpppath_st = env['CPPPATH_ST'] - - # local flags come first - # set the user-defined includes paths - for i in env['INC_PATHS']: - app('_CCINCFLAGS', cpppath_st % i.bldpath(env)) - app('_CCINCFLAGS', cpppath_st % i.srcpath(env)) - - # set the library include paths - for i in env['CPPPATH']: - app('_CCINCFLAGS', cpppath_st % i) - - # this is usually a good idea - app('_CCINCFLAGS', cpppath_st % '.') - app('_CCINCFLAGS', cpppath_st % env.variant()) - tmpnode = self.path - app('_CCINCFLAGS', cpppath_st % tmpnode.bldpath(env)) - app('_CCINCFLAGS', cpppath_st % tmpnode.srcpath(env)) - -def apply_defines_cc(self): - tree = Params.g_build - self.defines = getattr(self, 'defines', []) - lst = self.to_list(self.defines) + self.to_list(self.env['CCDEFINES']) - milst = [] - - # now process the local defines - for defi in lst: - if not defi in milst: - milst.append(defi) - - # CCDEFINES_ - libs = self.to_list(self.uselib) - for l in libs: - val = self.env['CCDEFINES_'+l] - if val: milst += val - self.env['DEFLINES'] = ["%s %s" % (x[0], Utils.trimquotes('='.join(x[1:]))) for x in [y.split('=') for y in milst]] - y = self.env['CCDEFINES_ST'] - self.env['_CCDEFFLAGS'] = [y%x for x in milst] - -def c_hook(self, node): - # create the compilation task: cpp or cc - task = self.create_task('cc', self.env) - try: obj_ext = self.obj_ext - except AttributeError: obj_ext = '_%d.o' % self.idx - - task.m_scanner = ccroot.g_c_scanner - task.defines = self.scanner_defines - - task.m_inputs = [node] - task.m_outputs = [node.change_ext(obj_ext)] - self.compiled_tasks.append(task) - -cc_str = '${CC} ${CCFLAGS} ${CPPFLAGS} ${_CCINCFLAGS} ${_CCDEFFLAGS} ${CC_SRC_F}${SRC} ${CC_TGT_F}${TGT}' -link_str = '${LINK_CC} ${CCLNK_SRC_F}${SRC} ${CCLNK_TGT_F}${TGT} ${LINKFLAGS} ${_LIBDIRFLAGS} ${_LIBFLAGS}' - -Task.simple_task_type('cc', cc_str, 'GREEN', prio=100) -Task.simple_task_type('cc_link', link_str, color='YELLOW', prio=111) - -TaskGen.declare_order('apply_incpaths', 'apply_defines_cc', 'apply_core', 'apply_lib_vars', 'apply_obj_vars_cc', 'apply_obj_vars') - - -taskgen(init_cc) -before('apply_type_vars')(init_cc) -taskgen(apply_obj_vars_cc) -taskgen(apply_defines_cc) -extension(EXT_CC)(c_hook) diff -Nru a2jmidid-8~dfsg0/wafadmin/Tools/ccroot.py a2jmidid-9/wafadmin/Tools/ccroot.py --- a2jmidid-8~dfsg0/wafadmin/Tools/ccroot.py 2008-07-21 22:39:47.000000000 +0000 +++ a2jmidid-9/wafadmin/Tools/ccroot.py 1970-01-01 00:00:00.000000000 +0000 @@ -1,490 +0,0 @@ -#! /usr/bin/env python -# encoding: utf-8 - -#! /usr/bin/env python -# encoding: utf-8 -# Thomas Nagy, 2005-2008 (ita) - -"base for all c/c++ programs and libraries" - -import os, sys, re -import TaskGen, Params, Scan, Common, Utils, Node, preproc -from Params import error, debug, fatal, warning -from Utils import md5 -from TaskGen import taskgen, after, before, feature -from Constants import * - -import config_c # <- necessary for the configuration, do not touch - -get_version_re = re.compile('\d+\.\d+(\.?\d+)*') -def get_cc_version(conf, cc, version_var): - v = conf.env - output = os.popen('%s -dumpversion' % cc).read() - if output: - match = get_version_re.search(output) - if match: - v[version_var] = match.group(0) - conf.check_message('compiler', 'version', 1, v[version_var]) - return v[version_var] - Params.warning('could not determine the compiler version') - -class DEBUG_LEVELS: - ULTRADEBUG = "ultradebug" - DEBUG = "debug" - RELEASE = "release" - OPTIMIZED = "optimized" - CUSTOM = "custom" - - ALL = [ULTRADEBUG, DEBUG, RELEASE, OPTIMIZED, CUSTOM] - -class c_scanner(Scan.scanner): - "scanner for c/c++ files" - def __init__(self): - Scan.scanner.__init__(self) - self.vars = ('CCDEFINES', 'CXXDEFINES') - - def scan(self, task, node): - "look for .h the .cpp need" - debug("_scan_preprocessor(self, node, env, path_lst)", 'ccroot') - gruik = preproc.c_parser(nodepaths = task.env()['INC_PATHS'], defines = task.defines) - gruik.start(node, task.env()) - if Params.g_verbose: - debug("nodes found for %s: %s %s" % (str(node), str(gruik.m_nodes), str(gruik.m_names)), 'deps') - debug("deps found for %s: %s" % (str(node), str(gruik.deps)), 'deps') - seen = [] - all = [] - for x in gruik.m_nodes: - if id(x) in seen: continue - seen.append(id(x)) - all.append(x) - return (all, gruik.m_names) - - def get_signature_queue(self, tsk): - """compute signatures from .cpp and inferred .h files - there is a single list (no tree traversal) - hot spot so do not touch""" - m = md5() - upd = m.update - - # additional variables to hash (command-line defines for example) - env = tsk.env() - for x in self.vars: - k = env[x] - if k: upd(str(k)) - - tree = Params.g_build - rescan = tree.rescan - tstamp = tree.m_tstamp_variants - - # headers to hash - try: - idx = tsk.m_inputs[0].id - variant = tsk.m_inputs[0].variant(env) - upd(tstamp[variant][idx]) - - for k in Params.g_build.node_deps[variant][idx]: - - # unlikely but necessary if it happens - try: tree.m_scanned_folders[k.m_parent.id] - except KeyError: rescan(k.m_parent) - - if k.id & 3 == Node.FILE: upd(tstamp[0][k.id]) - else: upd(tstamp[env.variant()][k.id]) - - except KeyError: - return None - - return m.digest() - -g_c_scanner = c_scanner() -"scanner for c programs" - -class ccroot_abstract(TaskGen.task_gen): - "Parent class for programs and libraries in languages c, c++ and moc (Qt)" - def __init__(self, *k, **kw): - TaskGen.task_gen.__init__(self, *k) - - # TODO m_type is obsolete - if len(k)>1: self.m_type = k[1] - else: self.m_type = '' - if self.m_type: - self.features.append('c' + self.m_type) - - # includes, seen from the current directory - self.includes='' - - self.defines = '' - self.rpaths = '' - - self.uselib = '' - - # new scheme: provide the names of the local libraries to link with - # the objects found will be post()-ed - self.uselib_local='' - - # add .o files produced by another task_gen class (task generator names) - self.add_objects = '' - - # version number for shared libraries - #self.vnum='1.2.3' # - #self.soname='.so.3' # else soname is computed from vnum - - #self.program_chmod = 0755 # by default: 0755 - - # do not forget to set the following variables in a subclass - self.p_flag_vars = [] - self.p_type_vars = [] - - #self.link = '' # optional: kind of link to apply (ar, cc, cxx, ..) - - self.scanner_defines = {} - - self.compiled_tasks = [] - self.link_task = None - - # characteristics of what we want to build: cc, cpp, program, staticlib, shlib, etc - #self.features = ['program'] - - def clone(self, env): - new_obj = TaskGen.task_gen.clone(self, env) - variant = '_' + self.env.variant() - - if self.name: new_obj.name = self.name + variant - else: new_obj.name = self.target + variant - new_obj.uselib_local = [x + variant for x in Utils.to_list(self.uselib_local) ] - return new_obj - -def get_target_name(self): - name = self.target - pattern = self.env[self.m_type+'_PATTERN'] - if not pattern: pattern = '%s' - - # name can be src/mylib - k = name.rfind('/') - return name[0:k+1] + pattern % name[k+1:] - -def apply_verif(self): - if not 'objects' in self.features: - if not self.source: - fatal('no source files specified for %s' % self) - if not self.target: - fatal('no target for %s' % self) - -def install_shlib(task): - nums = task.vnum.split('.') - - inst_var = task.inst_var - inst_dir = task.inst_dir - - libname = task.m_outputs[0].m_name - - name3 = libname+'.'+task.vnum - name2 = libname+'.'+nums[0] - name1 = libname - - filename = task.m_outputs[0].abspath(task.env()) - Common.install_as(inst_var, os.path.join(inst_dir, name3), filename, env=task.env()) - Common.symlink_as(inst_var, name3, os.path.join(inst_dir, name2)) - Common.symlink_as(inst_var, name3, os.path.join(inst_dir, name1)) - -# TODO reference the d programs, shlibs in d.py, not here - -def vars_target_cprogram(self): - self.inst_var_default = 'PREFIX' - self.inst_dir_default = 'bin' - -def vars_target_cstaticlib(self): - self.inst_var_default = 'PREFIX' - self.inst_dir_default = 'lib' - -def vars_target_cshlib(self): - self.inst_var_default = 'PREFIX' - self.inst_dir_default = 'lib' - -def install_target_cprogram(self): - if not Params.g_install: return - try: mode = self.program_chmod - except AttributeError: mode = 0755 - self.link_task.install = {'var':self.inst_var,'dir':self.inst_dir,'chmod':mode} - -def install_target_cstaticlib(self): - if not Params.g_install: return - self.link_task.install = {'var':self.inst_var,'dir':self.inst_dir} - -def install_target_cshlib(self): - if not Params.g_install: return - if getattr(self, 'vnum', '') and sys.platform != 'win32': - tsk = self.link_task - tsk.vnum = getattr(self, 'vnum', '') - tsk.inst_var = self.inst_var - tsk.inst_dir = self.inst_dir - tsk.install = install_shlib - else: - self.link_task.install = {'var':self.inst_var,'dir':self.inst_dir} - -def apply_incpaths(self): - lst = [] - for i in self.to_list(self.uselib): - if self.env['CPPPATH_'+i]: - lst += self.to_list(self.env['CPPPATH_'+i]) - self.includes = getattr(self, 'includes', []) - inc_lst = self.to_list(self.includes) + lst - if preproc.go_absolute: - inc_lst.extend(preproc.standard_includes) - lst = [] - - tree = Params.g_build - for dir in inc_lst: - node = 0 - if os.path.isabs(dir): - if preproc.go_absolute: - node = Params.g_build.m_root.find_dir(dir) - else: - node = self.path.find_dir(dir) - - if node is None: - error("node not found in ccroot:apply_incpaths "+str(dir)) - elif node: - if not node in lst: lst.append(node) - Params.g_build.rescan(node) - self.env['INC_PATHS'] = self.env['INC_PATHS'] + lst - # now the nodes are added to self.incpaths_lst - -def apply_type_vars(self): - # if the type defines uselib to add, add them - st = self.env[self.m_type+'_USELIB'] - if st: self.uselib = self.uselib + ' ' + st - - # each compiler defines variables like 'shlib_CXXFLAGS', 'shlib_LINKFLAGS', etc - # so when we make a cppobj of the type shlib, CXXFLAGS are modified accordingly - for var in self.p_type_vars: - compvar = '_'.join([self.m_type, var]) - #print compvar - value = self.env[compvar] - if value: self.env.append_value(var, value) - -def apply_link(self): - # use a custom linker if specified (self.link) - link = getattr(self, 'link', None) - if not link: - if 'cstaticlib' in self.features: link = 'ar_link_static' - elif 'cxx' in self.features: link = 'cxx_link' - else: link = 'cc_link' - linktask = self.create_task(link, self.env) - outputs = [t.m_outputs[0] for t in self.compiled_tasks] - linktask.set_inputs(outputs) - linktask.set_outputs(self.path.find_or_declare(get_target_name(self))) - - self.link_task = linktask - -def apply_lib_vars(self): - env = self.env - - # 1. the case of the libs defined in the project (visit ancestors first) - # the ancestors external libraries (uselib) will be prepended - uselib = self.to_list(self.uselib) - seen = [] - names = [] + self.to_list(self.uselib_local) # consume a copy of the list of names - while names: - x = names.pop(0) - # visit dependencies only once - if x in seen: - continue - - # object does not exist ? - y = TaskGen.name_to_obj(x) - if not y: - fatal("object '%s' was not found in uselib_local (required by '%s')" % (x, self.name)) - - # object has ancestors to process: add them to the end of the list - if y.uselib_local: - lst = y.to_list(y.uselib_local) - for u in lst: - if not u in seen: - names.append(u) - - # safe to process the current object - if not y.m_posted: y.post() - seen.append(x) - - if 'cshlib' in y.features: - env.append_value('LIB', y.target) - elif 'cstaticlib' in y.features: - env.append_value('STATICLIB', y.target) - - # add the link path too - tmp_path = y.path.bldpath(self.env) - if not tmp_path in env['LIBPATH']: env.prepend_value('LIBPATH', tmp_path) - - # set the dependency over the link task - if y.link_task is not None: - self.link_task.set_run_after(y.link_task) - dep_nodes = getattr(self.link_task, 'dep_nodes', []) - self.link_task.dep_nodes = dep_nodes + y.link_task.m_outputs - - # add ancestors uselib too - morelibs = y.to_list(y.uselib) - for v in morelibs: - if v in uselib: continue - uselib = [v]+uselib - - # if the library task generator provides 'export_incdirs', add to the include path - # if no one uses this feature, it will be removed - if getattr(y, 'export_incdirs', None): - cpppath_st = self.env['CPPPATH_ST'] - app = self.env.append_unique - for x in self.to_list(y.export_incdirs): - node = y.path.find_dir(x) - if not node: fatal('object %s: invalid folder %s in export_incdirs' % (y.target, x)) - if not node in self.env['INC_PATHS']: self.env['INC_PATHS'].append(node) - - # 2. the case of the libs defined outside - for x in uselib: - for v in self.p_flag_vars: - val = self.env[v+'_'+x] - if val: self.env.append_value(v, val) - -def apply_objdeps(self): - "add the .o files produced by some other object files in the same manner as uselib_local" - seen = [] - names = self.to_list(self.add_objects) - while names: - x = names[0] - - # visit dependencies only once - if x in seen: - names = names[1:] - continue - - # object does not exist ? - y = TaskGen.name_to_obj(x) - if not y: - error('object not found in add_objects: obj %s add_objects %s' % (self.name, x)) - names = names[1:] - continue - - # object has ancestors to process first ? update the list of names - if y.add_objects: - added = 0 - lst = y.to_list(y.add_objects) - lst.reverse() - for u in lst: - if u in seen: continue - added = 1 - names = [u]+names - if added: continue # list of names modified, loop - - # safe to process the current object - if not y.m_posted: y.post() - seen.append(x) - - self.link_task.m_inputs += y.out_nodes - -def apply_obj_vars(self): - lib_st = self.env['LIB_ST'] - staticlib_st = self.env['STATICLIB_ST'] - libpath_st = self.env['LIBPATH_ST'] - staticlibpath_st = self.env['STATICLIBPATH_ST'] - - app = self.env.append_unique - - if self.env['FULLSTATIC']: - self.env.append_value('LINKFLAGS', self.env['FULLSTATIC_MARKER']) - - for i in self.env['RPATH']: - app('LINKFLAGS', i) - - for i in self.env['LIBPATH']: - app('LINKFLAGS', libpath_st % i) - - for i in self.env['LIBPATH']: - app('LINKFLAGS', staticlibpath_st % i) - - if self.env['STATICLIB']: - self.env.append_value('LINKFLAGS', self.env['STATICLIB_MARKER']) - k = [(staticlib_st % i) for i in self.env['STATICLIB']] - app('LINKFLAGS', k) - - # fully static binaries ? - if not self.env['FULLSTATIC']: - if self.env['STATICLIB'] or self.env['LIB']: - self.env.append_value('LINKFLAGS', self.env['SHLIB_MARKER']) - - app('LINKFLAGS', [lib_st % i for i in self.env['LIB']]) - -def apply_vnum(self): - "use self.vnum and self.soname to modify the command line (un*x)" - try: vnum = self.vnum - except AttributeError: return - # this is very unix-specific - if sys.platform != 'darwin' and sys.platform != 'win32': - nums = self.vnum.split('.') - try: name3 = self.soname - except AttributeError: name3 = self.link_task.m_outputs[0].m_name+'.'+self.vnum.split('.')[0] - self.env.append_value('LINKFLAGS', '-Wl,-h,'+name3) - -def process_obj_files(self): - if not hasattr(self, 'obj_files'): return - for x in self.obj_files: - node = self.path.find_resource(x) - self.link_task.m_inputs.append(node) - -def add_obj_file(self, file): - """Small example on how to link object files as if they were source - obj = bld.create_obj('cc') - obj.add_obj_file('foo.o')""" - if not hasattr(self, 'obj_files'): self.obj_files = [] - if not 'process_obj_files' in self.meths: self.meths.add('process_obj_files') - self.obj_files.append(file) - -def make_objects_available(self): - """when we do not link; make the .o files available - if we are only building .o files, tell which ones we built""" - self.out_nodes = [] - app = self.out_nodes.append - for t in self.compiled_tasks: app(t.m_outputs[0]) - - -taskgen(apply_verif) -taskgen(vars_target_cprogram) -feature('cprogram', 'dprogram')(vars_target_cprogram) -before('apply_core')(vars_target_cprogram) -taskgen(vars_target_cstaticlib) -feature('cstaticlib', 'dstaticlib')(vars_target_cstaticlib) -before('apply_core')(vars_target_cstaticlib) -taskgen(vars_target_cshlib) -feature('cshlib', 'dshlib')(vars_target_cshlib) -before('apply_core')(vars_target_cshlib) -taskgen(install_target_cprogram) -feature('cprogram', 'dprogram')(install_target_cprogram) -after('apply_objdeps')(install_target_cprogram) -taskgen(install_target_cstaticlib) -feature('cstaticlib', 'dstaticlib')(install_target_cstaticlib) -after('apply_objdeps')(install_target_cstaticlib) -taskgen(install_target_cshlib) -feature('cshlib', 'dshlib')(install_target_cshlib) -after('apply_objdeps')(install_target_cshlib) -taskgen(apply_incpaths) -after('apply_type_vars')(apply_incpaths) -taskgen(apply_type_vars) -taskgen(apply_link) -feature('cprogram', 'cshlib', 'cstaticlib')(apply_link) -after('apply_core')(apply_link) -taskgen(apply_lib_vars) -after('apply_vnum')(apply_lib_vars) -taskgen(apply_objdeps) -feature('cprogram', 'cshlib', 'cstaticlib')(apply_objdeps) -after('apply_obj_vars')(apply_objdeps) -after('apply_vnum')(apply_objdeps) -taskgen(apply_obj_vars) -feature('cprogram', 'cshlib', 'cstaticlib')(apply_obj_vars) -after('apply_lib_vars')(apply_obj_vars) -taskgen(apply_vnum) -feature('cprogram', 'cshlib', 'cstaticlib')(apply_vnum) -after('apply_link')(apply_vnum) -taskgen(process_obj_files) -after('apply_link')(process_obj_files) -taskgen(add_obj_file) -taskgen(make_objects_available) -feature('objects')(make_objects_available) -after('apply_core')(make_objects_available) diff -Nru a2jmidid-8~dfsg0/wafadmin/Tools/checks.py a2jmidid-9/wafadmin/Tools/checks.py --- a2jmidid-8~dfsg0/wafadmin/Tools/checks.py 2008-07-21 22:39:47.000000000 +0000 +++ a2jmidid-9/wafadmin/Tools/checks.py 1970-01-01 00:00:00.000000000 +0000 @@ -1,278 +0,0 @@ -#! /usr/bin/env python -# encoding: utf-8 - -#! /usr/bin/env python -# encoding: utf-8 -# Thomas Nagy, 2006 (ita) - -""" -Additional configuration checks hooked on the configuration class -we use the decorator notation: @conf -to attach the functions as methods on the Configure class (the conf object) -""" - -import Utils, Configure, config_c -from Configure import conf -from Params import error, fatal - -endian_str = ''' -#include -int is_big_endian() -{ - long one = 1; - return !(*((char *)(&one))); -} -int main() -{ - if (is_big_endian()) printf("bigendian=1\\n"); - else printf("bigendian=0\\n"); - return 0; -} -''' - -class compile_configurator(config_c.configurator_base): - "inheritance demo" - def __init__(self, conf): - config_c.configurator_base.__init__(self, conf) - self.name = '' - self.code = '' - self.flags = '' - self.define = '' - self.uselib = '' - self.want_message = 0 - self.msg = '' - self.force_compiler = None - - def error(self): - fatal('test program would not run') - - def run_cache(self, retval): - if self.want_message: - self.conf.check_message('compile code (cached)', '', not (retval is False), option=self.msg) - - def validate(self): - if not self.code: - fatal('test configurator needs code to compile and run!') - - def run_test(self): - obj = config_c.check_data() - obj.code = self.code - obj.env = self.env - obj.uselib = self.uselib - obj.flags = self.flags - if self.force_compiler: obj.force_compiler = self.force_compiler - ret = self.conf.run_check(obj) - - if self.want_message: - self.conf.check_message('compile code', '', not (ret is False), option=self.msg) - - return ret - -def create_compile_configurator(self): - return compile_configurator(self) - -def checkEndian(self, define='', pathlst=[]): - """the point of checkEndian is to make an example, the following is better - if sys.byteorder == "little":""" - - if define == '': define = 'IS_BIGENDIAN' - - if self.is_defined(define): return self.get_define(define) - - global endian - - test = self.create_test_configurator() - test.code = endian_str - code = test.run()['result'] - - t = Utils.to_hashtable(code) - try: - is_big = int(t['bigendian']) - except KeyError: - raise Configure.ConfigurationError('endian test failed '+code) - - if is_big: strbig = 'big endian' - else: strbig = 'little endian' - self.check_message_custom('endianness', '', strbig) - - self.define_cond(define, is_big) - return is_big - -features_str = ''' -#include -int is_big_endian() -{ - long one = 1; - return !(*((char *)(&one))); -} -int main() -{ - if (is_big_endian()) printf("bigendian=1\\n"); - else printf("bigendian=0\\n"); - printf("int_size=%d\\n", sizeof(int)); - printf("long_int_size=%d\\n", sizeof(long int)); - printf("long_long_int_size=%d\\n", sizeof(long long int)); - printf("double_size=%d\\n", sizeof(double)); - return 0; -} -''' - -def checkFeatures(self, lst=[], pathlst=[]): - - global endian - - test = self.create_test_configurator() - test.code = features_str - code = test.run()['result'] - - t = Utils.to_hashtable(code) - try: - is_big = int(t['bigendian']) - except KeyError: - raise Configure.ConfigurationError('endian test failed '+code) - - if is_big: strbig = 'big endian' - else: strbig = 'little endian' - self.check_message_custom('endianness', '', strbig) - - self.check_message_custom('int size', '', t['int_size']) - self.check_message_custom('long int size', '', t['long_int_size']) - self.check_message_custom('long long int size', '', t['long_long_int_size']) - self.check_message_custom('double size', '', t['double_size']) - - self.define_cond('IS_BIGENDIAN', is_big) - self.define_cond('INT_SIZE', int(t['int_size'])) - self.define_cond('LONG_INT_SIZE', int(t['long_int_size'])) - self.define_cond('LONG_LONG_INT_SIZE', int(t['long_long_int_size'])) - self.define_cond('DOUBLE_SIZE', int(t['double_size'])) - - return is_big - -def detect_platform(self): - """adapted from scons""" - import os, sys - if os.name == 'posix': - if sys.platform == 'cygwin': - return 'cygwin' - if str.find(sys.platform, 'linux') != -1: - return 'linux' - if str.find(sys.platform, 'irix') != -1: - return 'irix' - if str.find(sys.platform, 'sunos') != -1: - return 'sunos' - if str.find(sys.platform, 'hp-ux') != -1: - return 'hpux' - if str.find(sys.platform, 'aix') != -1: - return 'aix' - if str.find(sys.platform, 'darwin') != -1: - return 'darwin' - return 'posix' - elif os.name == 'os2': - return 'os2' - elif os.name == 'java': - return 'java' - else: - return sys.platform - -def find_header(self, header, define='', paths=''): - if not define: - define = self.have_define(header) - test = self.create_header_enumerator() - test.mandatory = 1 - test.name = header - test.path = paths - test.define = define - return test.run() - -def check_header(self, header, define='', mandatory=0): - if not define: - define = self.have_define(header) - - test = self.create_header_configurator() - test.name = header - test.define = define - test.mandatory = mandatory - return test.run() - -def try_build_and_exec(self, code, uselib=''): - test = self.create_test_configurator() - test.uselib = uselib - test.code = code - ret = test.run() - if ret: return ret['result'] - return None - -def try_build(self, code, uselib='', msg='', force_compiler = ''): - test = self.create_compile_configurator() - test.uselib = uselib - test.code = code - if force_compiler: - test.force_compiler = force_compiler - if msg: - test.want_message = 1 - test.msg = msg - ret = test.run() - return ret - -def check_flags(self, flags, uselib='', options='', kind='cc', msg=1): - test = self.create_test_configurator() - test.uselib = uselib - test.code = 'int main() {return 0;}\n' - test.force_compiler = kind - test.flags = flags - ret = test.run() - - if msg: self.check_message('flags', flags, not (ret is False)) - - if ret: return 1 - return None - -# function wrappers for convenience -def check_header2(self, name, mandatory=1, define=''): - import os - ck_hdr = self.create_header_configurator() - if define: ck_hdr.define = define - # header provides no fallback for define: - else: ck_hdr.define = self.have_define(os.path.basename(name)) - ck_hdr.mandatory = mandatory - ck_hdr.name = name - return ck_hdr.run() - -def check_library2(self, name, mandatory=1, uselib=''): - ck_lib = self.create_library_configurator() - if uselib: ck_lib.uselib = uselib - ck_lib.mandatory = mandatory - ck_lib.name = name - return ck_lib.run() - -def check_pkg2(self, name, version, mandatory=1, uselib=''): - ck_pkg = self.create_pkgconfig_configurator() - if uselib: ck_pkg.uselib = uselib - ck_pkg.mandatory = mandatory - ck_pkg.version = version - ck_pkg.name = name - return ck_pkg.run() - -def check_cfg2(self, name, mandatory=1, define='', uselib=''): - ck_cfg = self.create_cfgtool_configurator() - if uselib: ck_cfg.uselib = uselib - # cfgtool provides no fallback for uselib: - else: ck_cfg.uselib = name.upper() - ck_cfg.mandatory = mandatory - ck_cfg.binary = name + '-config' - return ck_cfg.run() - - -conf(create_compile_configurator) -conf(checkEndian) -conf(checkFeatures) -conf(detect_platform) -conf(find_header) -conf(check_header) -conf(try_build_and_exec) -conf(try_build) -conf(check_flags) -conf(check_header2) -conf(check_library2) -conf(check_pkg2) -conf(check_cfg2) diff -Nru a2jmidid-8~dfsg0/wafadmin/Tools/compiler_cc.py a2jmidid-9/wafadmin/Tools/compiler_cc.py --- a2jmidid-8~dfsg0/wafadmin/Tools/compiler_cc.py 2008-07-21 22:39:47.000000000 +0000 +++ a2jmidid-9/wafadmin/Tools/compiler_cc.py 1970-01-01 00:00:00.000000000 +0000 @@ -1,57 +0,0 @@ -#! /usr/bin/env python -# encoding: utf-8 - -#! /usr/bin/env python -# encoding: utf-8 -# Matthias Jahn jahn dôt matthias ât freenet dôt de, 2007 (pmarat) - -import os, sys, imp, types -import optparse -import Utils, Params, checks, Configure - -c_compiler = { - 'win32': ['msvc', 'gcc'], - 'cygwin': ['gcc'], - 'darwin': ['gcc'], - 'aix5': ['gcc'], - 'linux': ['gcc', 'suncc'], - 'sunos': ['suncc', 'gcc'], - 'irix': ['gcc'], - 'hpux': ['gcc'], - 'default': ['gcc'] -} - -def __list_possible_compiler(platform): - try: - return c_compiler[platform] - except KeyError: - return c_compiler["default"] - -def detect(conf): - try: test_for_compiler = Params.g_options.check_c_compiler - except AttributeError: raise Configure.ConfigurationError("Add set_options(opt): opt.tool_options('compiler_cc')") - for c_compiler in test_for_compiler.split(): - conf.check_tool(c_compiler) - if conf.env['CC']: - conf.check_message("%s" %c_compiler, '', True) - conf.env["COMPILER_CC"] = "%s" % c_compiler #store the selected c compiler - return - conf.check_message("%s" %c_compiler, '', False) - conf.env["COMPILER_CC"] = None - -def set_options(opt): - detected_platform = checks.detect_platform(None) - possible_compiler_list = __list_possible_compiler(detected_platform) - test_for_compiler = str(" ").join(possible_compiler_list) - cc_compiler_opts = opt.add_option_group("C Compiler Options") - try: - cc_compiler_opts.add_option('--check-c-compiler', default="%s" % test_for_compiler, - help='On this platform (%s) the following C-Compiler will be checked by default: "%s"' % - (detected_platform, test_for_compiler), - dest="check_c_compiler") - except optparse.OptionConflictError: - pass - - for c_compiler in test_for_compiler.split(): - opt.tool_options('%s' % c_compiler, option_group=cc_compiler_opts) - diff -Nru a2jmidid-8~dfsg0/wafadmin/Tools/compiler_cxx.py a2jmidid-9/wafadmin/Tools/compiler_cxx.py --- a2jmidid-8~dfsg0/wafadmin/Tools/compiler_cxx.py 2008-07-21 22:39:47.000000000 +0000 +++ a2jmidid-9/wafadmin/Tools/compiler_cxx.py 1970-01-01 00:00:00.000000000 +0000 @@ -1,57 +0,0 @@ -#! /usr/bin/env python -# encoding: utf-8 - -#! /usr/bin/env python -# encoding: utf-8 -# Matthias Jahn jahn dôt matthias ât freenet dôt de 2007 (pmarat) - -import os, sys, imp, types -import optparse -import Utils, Params, checks, Configure - -c_compiler = { -'win32': ['msvc', 'g++'], -'cygwin': ['g++'], -'darwin': ['g++'], -'aix5': ['g++'], -'linux': ['g++', 'sunc++'], -'sunos': ['sunc++', 'g++'], -'irix': ['g++'], -'hpux': ['g++'], -'default': ['g++'] -} - -def __list_possible_compiler(platform): - try: - return(c_compiler[platform]) - except KeyError: - return(c_compiler["default"]) - -def detect(conf): - try: test_for_compiler = Params.g_options.check_cxx_compiler - except AttributeError: raise Configure.ConfigurationError("Add set_options(opt): opt.tool_options('compiler_cxx')") - for cxx_compiler in test_for_compiler.split(): - conf.check_tool(cxx_compiler) - if conf.env['CXX']: - conf.check_message("%s" %cxx_compiler, '', True) - conf.env["COMPILER_CXX"] = "%s" %cxx_compiler #store the selected c++ compiler - return - conf.check_message("%s" %cxx_compiler, '', False) - conf.env["COMPILER_CXX"] = None - -def set_options(opt): - detected_platform = checks.detect_platform(None) - possible_compiler_list = __list_possible_compiler(detected_platform) - test_for_compiler = str(" ").join(possible_compiler_list) - cxx_compiler_opts = opt.add_option_group("C++ Compiler Options") - try: - cxx_compiler_opts.add_option('--check-cxx-compiler', default="%s" % test_for_compiler, - help='On this platform (%s) the following C++ Compiler will be checked by default: "%s"' % - (detected_platform, test_for_compiler), - dest="check_cxx_compiler") - except optparse.OptionConflictError: - pass - for cxx_compiler in test_for_compiler.split(): - opt.tool_options('%s' % cxx_compiler, option_group=cxx_compiler_opts) - - diff -Nru a2jmidid-8~dfsg0/wafadmin/Tools/compiler_d.py a2jmidid-9/wafadmin/Tools/compiler_d.py --- a2jmidid-8~dfsg0/wafadmin/Tools/compiler_d.py 2008-07-21 22:39:47.000000000 +0000 +++ a2jmidid-9/wafadmin/Tools/compiler_d.py 1970-01-01 00:00:00.000000000 +0000 @@ -1,35 +0,0 @@ -#! /usr/bin/env python -# encoding: utf-8 - -#! /usr/bin/env python -# encoding: utf-8 -# Carlos Rafael Giani, 2007 (dv) - -import os, sys, imp, types -import Utils, Params, checks, Configure - -def detect(conf): - if getattr(Params.g_options, 'check_dmd_first', None): - test_for_compiler = ['dmd', 'gdc'] - else: - test_for_compiler = ['gdc', 'dmd'] - - for d_compiler in test_for_compiler: - conf.check_tool(d_compiler) - if conf.env['D_COMPILER']: - conf.check_message("%s" % d_compiler, '', True) - conf.env["COMPILER_D"] = d_compiler - return - conf.check_message("%s" % d_compiler, '', False) - -def set_options(opt): - d_compiler_opts = opt.add_option_group("D Compiler Options") - try: - d_compiler_opts.add_option('--check-dmd-first', action = "store_true", help = 'checks for the gdc compiler before dmd (default is the other way round)', dest = 'check_dmd_first',default = False) - except Exception: - pass - - for d_compiler in ['gdc', 'dmd']: - opt.tool_options('%s' % d_compiler, option_group=d_compiler_opts) - - diff -Nru a2jmidid-8~dfsg0/wafadmin/Tools/config_c.py a2jmidid-9/wafadmin/Tools/config_c.py --- a2jmidid-8~dfsg0/wafadmin/Tools/config_c.py 2008-07-21 22:39:47.000000000 +0000 +++ a2jmidid-9/wafadmin/Tools/config_c.py 1970-01-01 00:00:00.000000000 +0000 @@ -1,1175 +0,0 @@ -#! /usr/bin/env python -# encoding: utf-8 -import sys -if sys.hexversion < 0x020400f0: from sets import Set as set -#! /usr/bin/env python -# encoding: utf-8 -# Thomas Nagy, 2005-2008 (ita) - -""" -c/c++ configuration routines - -classes such as program_enumerator are attached to the Configure class, -avoiding lots of imports in user scripts - -Usage example (see demos/adv/wscript): -program_enumerator -> conf.create_program_enumerator - -The functions preceded by "@conf" are attached in the same manner -""" - -import os, types, imp, cPickle, sys, shlex, warnings -from Utils import md5 -import Params, Environment, Runner, Build, Utils, Configure, TaskGen, Task -from Params import fatal, warning -from Constants import * -from Configure import conf, conftest - -class attached_conf(type): - """no decorators for classes, so we use a metaclass - map 'conf.create_classname()' to 'classname()'""" - def __init__(cls, name, bases, dict): - super(attached_conf, cls).__init__(name, bases, dict) - def fun_create(self): - inst = cls(self) - return inst - setattr(Configure.Configure, 'create_' + cls.__name__, fun_create) - -class enumerator_base(object): - def __init__(self, conf): - self.conf = conf - self.env = conf.env - self.define = '' - self.mandatory = 0 - self.message = '' - - def error(self): - if self.message: - fatal(self.message) - else: - fatal('A mandatory check failed. Make sure all dependencies are ok and can be found.') - - def update_hash(self, md5hash): - classvars = vars(self) - for (var, value) in classvars.iteritems(): - # TODO comparing value to env is fast or slow ? - if callable(var): continue - if value == self: continue - if value == self.env: continue - if value == self.conf: continue - md5hash.update(str(value)) - - def update_env(self, hashtable): - # skip this if hashtable is only a string - if not type(hashtable) is types.StringType: - for name in hashtable.keys(): - self.env.append_value(name, hashtable[name]) - - def validate(self): - pass - - def hash(self): - m = md5() - self.update_hash(m) - return m.digest() - - def run_cache(self, retvalue): - # interface, do not remove - pass - - def run(self): - self.validate() - if Params.g_cache_global and not Params.g_options.nocache: - newhash = self.hash() - try: - ret = self.conf.m_cache_table[newhash] - except KeyError: - pass # go to A1 just below - else: - self.run_cache(ret) - if self.mandatory and not ret: self.error() - return ret - - # A1 - no cache or new test - ret = self.run_test() - if self.mandatory and not ret: self.error() - - if Params.g_cache_global: - newhash = self.hash() - self.conf.m_cache_table[newhash] = ret - return ret - - # Override this method, not run()! - def run_test(self): - return not Configure.TEST_OK - -class configurator_base(enumerator_base): - def __init__(self, conf): - enumerator_base.__init__(self, conf) - self.uselib = '' - -class program_enumerator(enumerator_base): - __metaclass__ = attached_conf - def __init__(self,conf): - enumerator_base.__init__(self, conf) - - self.name = '' - self.path = [] - self.var = None - - def error(self): - errmsg = 'program %s cannot be found' % self.name - if self.message: errmsg += '\n%s' % self.message - fatal(errmsg) - - def run_cache(self, retval): - self.conf.check_message('program %s (cached)' % self.name, '', retval, option=retval) - if self.var: self.env[self.var] = retval - - def run_test(self): - ret = Configure.find_program_impl(self.env, self.name, self.path, self.var) - self.conf.check_message('program', self.name, ret, ret) - if self.var: self.env[self.var] = ret - return ret - -class function_enumerator(enumerator_base): - __metaclass__ = attached_conf - def __init__(self,conf): - enumerator_base.__init__(self, conf) - - self.function = '' - self.define = '' - - self.headers = [] - self.header_code = '' - self.custom_code = '' - - self.include_paths = [] - self.libs = [] - self.lib_paths = [] - - def error(self): - errmsg = 'function %s cannot be found' % self.function - if self.message: errmsg += '\n%s' % self.message - fatal(errmsg) - - def validate(self): - if not self.define: - self.define = self.function.upper() - - def run_cache(self, retval): - self.conf.check_message('function %s (cached)' % self.function, '', retval, option='') - if retval: - self.conf.define(self.define, retval) - else: - self.conf.undefine(self.define) - - def run_test(self): - ret = not Configure.TEST_OK - - oldlibpath = self.env['LIBPATH'] - oldlib = self.env['LIB'] - - code = [] - code.append(self.header_code) - code.append('\n') - for header in self.headers: - code.append('#include <%s>\n' % header) - - if self.custom_code: - code.append('int main(){%s\nreturn 0;}\n' % self.custom_code) - else: - code.append('int main(){\nvoid *p;\np=(void*)(%s);\nreturn 0;\n}\n' % self.function) - - self.env['LIB'] = Utils.to_list(self.libs) - self.env['LIBPATH'] = Utils.to_list(self.lib_paths) - - obj = check_data() - obj.code = "\n".join(code) - obj.includes = self.include_paths - obj.env = self.env - - ret = int(self.conf.run_check(obj)) - self.conf.check_message('function %s' % self.function, '', ret, option='') - - if ret: - self.conf.define(self.define, ret) - else: - self.conf.undefine(self.define) - - self.env['LIB'] = oldlib - self.env['LIBPATH'] = oldlibpath - - return ret - -class library_enumerator(enumerator_base): - "find a library in a list of paths" - __metaclass__ = attached_conf - def __init__(self, conf): - enumerator_base.__init__(self, conf) - - self.name = '' - self.path = [] - self.code = 'int main() {return 0;}\n' - self.uselib = '' # to set the LIB_NAME and LIBPATH_NAME - self.nosystem = 0 # do not use standard lib paths - self.want_message = 1 - - def error(self): - errmsg = 'library %s cannot be found' % self.name - if self.message: errmsg += '\n%s' % self.message - fatal(errmsg) - - def run_cache(self, retval): - if self.want_message: - self.conf.check_message('library %s (cached)' % self.name, '', retval, option=retval) - self.update_env(retval) - - def validate(self): - if not self.nosystem and not self.path: - self.path += Configure.g_stdlibpath - - def run_test(self): - ret = '' # returns a string - - patterns = [self.env['shlib_PATTERN'], 'lib%s.dll.a', 'lib%s.lib', self.env['staticlib_PATTERN']] - for x in patterns: - name = x % self.name - ret = Configure.find_file(name, self.path) - if ret: break - - if self.want_message: - self.conf.check_message('library '+self.name, '', ret, option=ret) - - if self.uselib: - self.env['LIB_'+self.uselib] += [ self.name ] - self.env['LIBPATH_'+self.uselib] += [ ret ] - - return ret - -class header_enumerator(enumerator_base): - "find a header in a list of paths" - __metaclass__ = attached_conf - def __init__(self,conf): - enumerator_base.__init__(self, conf) - - self.name = [] - self.path = [] - self.define = [] - self.nosystem = 0 - self.want_message = 1 - - def validate(self): - if not self.nosystem and not self.path: - self.path = Configure.g_stdincpath - - def error(self): - errmsg = 'cannot find %s in %s' % (self.name, str(self.path)) - if self.message: errmsg += '\n%s' % self.message - fatal(errmsg) - - def run_cache(self, retval): - if self.want_message: - self.conf.check_message('header %s (cached)' % self.name, '', retval, option=retval) - if self.define: self.env[self.define] = retval - - def run_test(self): - ret = Configure.find_file(self.name, self.path) - if self.want_message: - self.conf.check_message('header', self.name, ret, ret) - if self.define: self.env[self.define] = ret - return ret - -## ENUMERATORS END -################### - -################### -## CONFIGURATORS - -class cfgtool_configurator(configurator_base): - __metaclass__ = attached_conf - def __init__(self,conf): - configurator_base.__init__(self, conf) - - self.uselib = '' - self.define = '' - self.binary = '' - - self.tests = {} - - def error(self): - errmsg = '%s cannot be found' % self.binary - if self.message: errmsg += '\n%s' % self.message - fatal(errmsg) - - def validate(self): - if not self.binary: - raise ValueError, "no binary given in cfgtool!" - if not self.uselib: - raise ValueError, "no uselib given in cfgtool!" - if not self.define and self.uselib: - self.define = self.conf.have_define(self.uselib) - - if not self.tests: - self.tests['--cflags'] = 'CCFLAGS' - self.tests['--cflags'] = 'CXXFLAGS' - self.tests['--libs'] = 'LINKFLAGS' - - def run_cache(self, retval): - if retval: - self.update_env(retval) - self.conf.define(self.define, 1) - else: - self.conf.undefine(self.define) - self.conf.check_message('config-tool %s (cached)' % self.binary, '', retval, option='') - - def run_test(self): - retval = {} - found = Configure.TEST_OK - - null='2>/dev/null' - if sys.platform == "win32": null='2>nul' - try: - ret = os.popen('%s %s %s' % (self.binary, self.tests.keys()[0], null)).close() - if ret: raise ValueError, "error" - - for flag in self.tests: - var = self.tests[flag] + '_' + self.uselib - cmd = '%s %s %s' % (self.binary, flag, null) - retval[var] = [os.popen(cmd).read().strip()] - - self.update_env(retval) - except ValueError: - retval = {} - found = not Configure.TEST_OK - - if found: - self.conf.define(self.define, found) - else: - self.conf.undefine(self.define) - self.conf.check_message('config-tool ' + self.binary, '', found, option = '') - return retval - -class pkgconfig_configurator(configurator_base): - """ pkgconfig_configurator is a frontend to pkg-config variables: - - name: name of the .pc file (has to be set at least) - - version: atleast-version to check for - - path: override the pkgconfig path (PKG_CONFIG_PATH) - - uselib: name that could be used in tasks with obj.uselib if not set uselib = upper(name) - - define: name that will be used in config.h if not set define = HAVE_+uselib - - variables: list of addional variables to be checked for, for example variables='prefix libdir' - - static - """ - __metaclass__ = attached_conf - def __init__(self, conf): - configurator_base.__init__(self,conf) - - self.name = '' # name of the .pc file - self.version = '' # version to check - self.pkgpath = os.path.join(Params.g_options.prefix, 'lib', 'pkgconfig') # pkg config path - self.uselib = '' # can be set automatically - self.define = '' # can be set automatically - self.binary = '' # name and path for pkg-config - self.static = False - - # You could also check for extra values in a pkg-config file. - # Use this value to define which values should be checked - # and defined. Several formats for this value are supported: - # - string with spaces to separate a list - # - list of values to check (define name will be upper(uselib"_"value_name)) - # - a list of [value_name, override define_name] - self.variables = [] - self.defines = {} - - def error(self): - if self.version: - errmsg = 'pkg-config cannot find %s >= %s' % (self.name, self.version) - else: - errmsg = 'pkg-config cannot find %s' % self.name - if self.message: errmsg += '\n%s' % self.message - fatal(errmsg) - - def validate(self): - if not self.uselib: - self.uselib = self.name.upper() - if not self.define: - self.define = self.conf.have_define(self.uselib) - - def run_cache(self, retval): - if self.version: - self.conf.check_message('package %s >= %s (cached)' % (self.name, self.version), '', retval, option='') - else: - self.conf.check_message('package %s (cached)' % self.name, '', retval, option='') - if retval: - self.conf.define(self.define, 1) - else: - self.conf.undefine(self.define) - self.update_env(retval) - - def _setup_pkg_config_path(self): - pkgpath = self.pkgpath - if not pkgpath: - return "" - - if sys.platform == 'win32': - if hasattr(self, 'pkgpath_win32_setup'): - return "" - pkgpath_env=os.getenv('PKG_CONFIG_PATH') - - if pkgpath_env: - pkgpath_env = pkgpath_env + ';' +pkgpath - else: - pkgpath_env = pkgpath - - os.putenv('PKG_CONFIG_PATH',pkgpath_env) - setattr(self,'pkgpath_win32_setup',True) - return "" - - pkgpath = 'PKG_CONFIG_PATH=$PKG_CONFIG_PATH:' + pkgpath - return pkgpath - - def run_test(self): - pkgbin = self.binary - uselib = self.uselib - - # check if self.variables is a string with spaces - # to separate the variables to check for - # if yes convert variables to a list - if type(self.variables) is types.StringType: - self.variables = str(self.variables).split() - - if not pkgbin: - pkgbin = 'pkg-config' - pkgpath = self._setup_pkg_config_path() - pkgcom = '%s %s' % (pkgpath, pkgbin) - - for key, val in self.defines.items(): - pkgcom += ' --define-variable=%s=%s' % (key, val) - - if self.static: - pkgcom += ' --static' - - g_defines = self.env['PKG_CONFIG_DEFINES'] - if type(g_defines) is types.DictType: - for key, val in g_defines.items(): - if self.defines and self.defines.has_key(key): - continue - pkgcom += ' --define-variable=%s=%s' % (key, val) - - retval = {} - - try: - if self.version: - cmd = "%s --atleast-version=%s \"%s\"" % (pkgcom, self.version, self.name) - ret = os.popen(cmd).close() - Params.debug("pkg-config cmd '%s' returned %s" % (cmd, ret)) - self.conf.check_message('package %s >= %s' % (self.name, self.version), '', not ret) - if ret: raise ValueError, "error" - else: - cmd = "%s \"%s\"" % (pkgcom, self.name) - ret = os.popen(cmd).close() - Params.debug("pkg-config cmd '%s' returned %s" % (cmd, ret)) - self.conf.check_message('package %s' % (self.name), '', not ret) - if ret: - raise ValueError, "error" - - cflags_I = shlex.split(os.popen('%s --cflags-only-I \"%s\"' % (pkgcom, self.name)).read()) - cflags_other = shlex.split(os.popen('%s --cflags-only-other \"%s\"' % (pkgcom, self.name)).read()) - retval['CCFLAGS_'+uselib] = cflags_other - retval['CXXFLAGS_'+uselib] = cflags_other - retval['CPPPATH_'+uselib] = [] - for incpath in cflags_I: - assert incpath[:2] == '-I' or incpath[:2] == '/I' - retval['CPPPATH_'+uselib].append(incpath[2:]) # strip '-I' or '/I' - - static_l = '' - if self.static: - static_l = 'STATIC' - - #env['LINKFLAGS_'+uselib] = os.popen('%s --libs %s' % (pkgcom, self.name)).read().strip() - # Store the library names: - modlibs = os.popen('%s --libs-only-l \"%s\"' % (pkgcom, self.name)).read().strip().split() - retval[static_l+'LIB_'+uselib] = [] - for item in modlibs: - retval[static_l+'LIB_'+uselib].append( item[2:] ) #Strip '-l' - - # Store the library paths: - modpaths = os.popen('%s --libs-only-L \"%s\"' % (pkgcom, self.name)).read().strip().split() - retval['LIBPATH_'+uselib] = [] - for item in modpaths: - retval['LIBPATH_'+uselib].append( item[2:] ) #Strip '-l' - - # Store only other: - modother = os.popen('%s --libs-only-other \"%s\"' % (pkgcom, self.name)).read().strip().split() - retval['LINKFLAGS_'+uselib] = [] - for item in modother: - if str(item).endswith(".la"): - import libtool - la_config = libtool.libtool_config(item) - libs_only_L = la_config.get_libs_only_L() - libs_only_l = la_config.get_libs_only_l() - for entry in libs_only_l: - retval[static_l + 'LIB_'+uselib].append( entry[2:] ) #Strip '-l' - for entry in libs_only_L: - retval['LIBPATH_'+uselib].append( entry[2:] ) #Strip '-L' - else: - retval['LINKFLAGS_'+uselib].append( item ) #do not strip anything - - for variable in self.variables: - var_defname = '' - # check if variable is a list - if (type(variable) is types.ListType): - # is it a list of [value_name, override define_name] ? - if len(variable) == 2 and variable[1]: - # if so use the overrided define_name as var_defname - var_defname = variable[1] - # convert variable to a string that name the variable to check for. - variable = variable[0] - - # if var_defname was not overrided by the list containing the define_name - if not var_defname: - var_defname = uselib + '_' + variable.upper() - - retval[var_defname] = os.popen('%s --variable=%s \"%s\"' % (pkgcom, variable, self.name)).read().strip() - - self.conf.define(self.define, 1) - self.update_env(retval) - except ValueError: - retval = {} - self.conf.undefine(self.define) - - return retval - -class test_configurator(configurator_base): - __metaclass__ = attached_conf - def __init__(self, conf): - configurator_base.__init__(self, conf) - self.name = '' - self.code = '' - self.flags = '' - self.define = '' - self.uselib = '' - self.want_message = 0 - - def error(self): - errmsg = 'test program would not run' - if self.message: errmsg += '\n%s' % self.message - fatal(errmsg) - - def run_cache(self, retval): - if self.want_message: - self.conf.check_message('custom code (cached)', '', 1, option=retval['result']) - - def validate(self): - if not self.code: - fatal('test configurator needs code to compile and run!') - - def run_test(self): - obj = check_data() - obj.code = self.code - obj.env = self.env - obj.uselib = self.uselib - obj.flags = self.flags - obj.force_compiler = getattr(self, 'force_compiler', None) - obj.execute = 1 - ret = self.conf.run_check(obj) - - if self.want_message: - if ret: data = ret['result'] - else: data = '' - self.conf.check_message('custom code', '', ret, option=data) - - return ret - -class library_configurator(configurator_base): - __metaclass__ = attached_conf - def __init__(self,conf): - configurator_base.__init__(self,conf) - - self.name = '' - self.path = [] - self.define = '' - self.nosystem = 0 - self.uselib = '' - self.static = False - self.libs = [] - self.lib_paths = [] - - self.code = 'int main(){return 0;}\n' - - def error(self): - errmsg = 'library %s cannot be linked' % self.name - if self.message: errmsg += '\n%s' % self.message - fatal(errmsg) - - def run_cache(self, retval): - self.conf.check_message('library %s (cached)' % self.name, '', retval) - if retval: - self.update_env(retval) - self.conf.define(self.define, 1) - else: - self.conf.undefine(self.define) - - def validate(self): - if not self.uselib: - self.uselib = self.name.upper() - if not self.define: - self.define = self.conf.have_define(self.uselib) - - if not self.uselib: - fatal('uselib is not defined') - if not self.code: - fatal('library enumerator must have code to compile') - - def run_test(self): - oldlibpath = self.env['LIBPATH'] - oldlib = self.env['LIB'] - - static_l = '' - if self.static: - static_l = 'STATIC' - - olduselibpath = self.env['LIBPATH_'+self.uselib] - olduselib = self.env[static_l+'LIB_'+self.uselib] - - # try the enumerator to find the correct libpath - test = self.conf.create_library_enumerator() - test.nosystem = self.nosystem - test.name = self.name - test.want_message = 0 - test.path = self.path - test.env = self.env - ret = test.run() - - if ret: - self.env['LIBPATH_'+self.uselib] += [ ret ] - - self.env[static_l+'LIB_'+self.uselib] += [ self.name ] - - self.env['LIB'] = [self.name] + self.libs - self.env['LIBPATH'] = self.lib_paths - - obj = check_data() - obj.code = self.code - obj.env = self.env - obj.uselib = self.uselib - obj.libpath = self.path - - ret = int(self.conf.run_check(obj)) - self.conf.check_message('library %s' % self.name, '', ret) - - if ret: - self.conf.define(self.define, ret) - else: - self.conf.undefine(self.define) - - val = {} - if ret: - val['LIBPATH_'+self.uselib] = self.env['LIBPATH_'+self.uselib] - val[static_l+'LIB_'+self.uselib] = self.env['LIB_'+self.uselib] - val[self.define] = ret - else: - self.env['LIBPATH_'+self.uselib] = olduselibpath - self.env[static_l+'LIB_'+self.uselib] = olduselib - - self.env['LIB'] = oldlib - self.env['LIBPATH'] = oldlibpath - - return val - -class framework_configurator(configurator_base): - __metaclass__ = attached_conf - def __init__(self,conf): - configurator_base.__init__(self,conf) - - self.name = '' - self.custom_code = '' - self.code = 'int main(){return 0;}\n' - - self.define = '' # HAVE_something - - self.path = [] - self.uselib = '' - self.remove_dot_h = False - - def error(self): - errmsg = 'framework %s cannot be found via compiler, try pass -F' % self.name - if self.message: errmsg += '\n%s' % self.message - fatal(errmsg) - - def validate(self): - if not self.uselib: - self.uselib = self.name.upper() - if not self.define: - self.define = self.conf.have_define(self.uselib) - if not self.code: - self.code = "#include <%s>\nint main(){return 0;}\n" - if not self.uselib: - self.uselib = self.name.upper() - - def run_cache(self, retval): - self.conf.check_message('framework %s (cached)' % self.name, '', retval) - self.update_env(retval) - if retval: - self.conf.define(self.define, 1) - else: - self.conf.undefine(self.define) - - def run_test(self): - oldlkflags = [] - oldccflags = [] - oldcxxflags = [] - - oldlkflags += self.env['LINKFLAGS'] - oldccflags += self.env['CCFLAGS'] - oldcxxflags += self.env['CXXFLAGS'] - - code = [] - if self.remove_dot_h: - code.append('#include <%s/%s>\n' % (self.name, self.name)) - else: - code.append('#include <%s/%s.h>\n' % (self.name, self.name)) - - code.append('int main(){%s\nreturn 0;}\n' % self.custom_code) - - linkflags = [] - linkflags += ['-framework', self.name] - linkflags += ['-F%s' % p for p in self.path] - cflags = ['-F%s' % p for p in self.path] - - myenv = self.env.copy() - myenv['LINKFLAGS'] += linkflags - - obj = check_data() - obj.code = "\n".join(code) - obj.env = myenv - obj.uselib = self.uselib - - obj.flags += " ".join (cflags) - - ret = int(self.conf.run_check(obj)) - self.conf.check_message('framework %s' % self.name, '', ret, option='') - if ret: - self.conf.define(self.define, ret) - else: - self.conf.undefine(self.define) - - val = {} - if ret: - val["LINKFLAGS_" + self.uselib] = linkflags - val["CCFLAGS_" + self.uselib] = cflags - val["CXXFLAGS_" + self.uselib] = cflags - val[self.define] = ret - - self.env['LINKFLAGS'] = oldlkflags - self.env['CCFLAGS'] = oldccflags - self.env['CXXFLAGS'] = oldcxxflags - - self.update_env(val) - - return val - -class header_configurator(configurator_base): - __metaclass__ = attached_conf - def __init__(self, conf): - configurator_base.__init__(self,conf) - - self.name = '' - self.path = [] - self.header_code = '' - self.custom_code = '' - self.code = 'int main() {return 0;}\n' - - self.define = '' # HAVE_something - self.nosystem = 0 - - self.libs = [] - self.lib_paths = [] - self.uselib = '' - - def error(self): - errmsg = 'header %s cannot be found via compiler' % self.name - if self.message: errmsg += '\n%s' % self.message - fatal(errmsg) - - def validate(self): - # self.names = self.names.split() - if not self.define: - if self.name: self.define = self.conf.have_define(self.name) - elif self.uselib: self.define = self.conf.have_define(self.uselib) - - if not self.code: - self.code = "#include <%s>\nint main(){return 0;}\n" - if not self.define: - fatal('no define given') - - def run_cache(self, retvalue): - self.conf.check_message('header %s (cached)' % self.name, '', retvalue) - if retvalue: - self.update_env(retvalue) - self.conf.define(self.define, 1) - else: - self.conf.undefine(self.define) - - def run_test(self): - ret = {} # not found - - oldlibpath = self.env['LIBPATH'] - oldlib = self.env['LIB'] - - # try the enumerator to find the correct includepath - if self.uselib: - test = self.conf.create_header_enumerator() - test.nosystem = self.nosystem - test.name = self.name - test.want_message = 0 - test.path = self.path - test.env = self.env - ret = test.run() - - if ret: - self.env['CPPPATH_'+self.uselib] = ret - - code = [] - code.append(self.header_code) - code.append('\n') - code.append('#include <%s>\n' % self.name) - - code.append('int main(){%s\nreturn 0;}\n' % self.custom_code) - - self.env['LIB'] = Utils.to_list(self.libs) - self.env['LIBPATH'] = Utils.to_list(self.lib_paths) - - obj = check_data() - obj.code = "\n".join(code) - obj.includes = self.path - obj.env = self.env - obj.uselib = self.uselib - - ret = int(self.conf.run_check(obj)) - self.conf.check_message('header %s' % self.name, '', ret, option='') - - if ret: - self.conf.define(self.define, ret) - else: - self.conf.undefine(self.define) - - self.env['LIB'] = oldlib - self.env['LIBPATH'] = oldlibpath - - val = {} - if ret: - val['CPPPATH_'+self.uselib] = self.env['CPPPATH_'+self.uselib] - val[self.define] = ret - - if not ret: return {} - return val - -class common_include_configurator(header_enumerator): - """Looks for a given header. If found, it will be written later by write_config_header() - - Forced include files are headers that are being used by all source files. - One can include files this way using gcc '-include file.h' or msvc '/fi file.h'. - The alternative suggested here (common includes) is: - Make all files include 'config.h', then add these forced-included headers to - config.h (good for compilers that don't have have this feature and - for further flexibility). - """ - __metaclass__ = attached_conf - def run_test(self): - # if a header was found, header_enumerator returns its directory. - header_dir = header_enumerator.run_test(self) - - if header_dir: - # if the header was found, add its path to set of forced_include files - # to be using later in write_config_header() - header_path = os.path.join(header_dir, self.name) - - # if this header was not stored already, add it to the list of common headers. - self.env.append_unique(COMMON_INCLUDES, header_path) - - # the return value of all enumerators is checked by enumerator_base.run() - return header_dir - -# CONFIGURATORS END -#################### - -class check_data(object): - def __init__(self): - - self.env = '' # environment to use - - self.code = '' # the code to execute - - self.flags = '' # the flags to give to the compiler - - self.uselib = '' # uselib - self.includes = '' # include paths - - self.function_name = '' # function to check for - - self.lib = [] - self.libpath = [] # libpath for linking - - self.define = '' # define to add if run is successful - - self.header_name = '' # header name to check for - - self.execute = 0 # execute the program produced and return its output - self.options = '' # command-line options - - self.force_compiler = None - self.build_type = 'program' -setattr(Configure, 'check_data', check_data) # warning, attached to the module - -def define(self, define, value, quote=1): - """store a single define and its state into an internal list for later - writing to a config header file. Value can only be - a string or int; other types not supported. String - values will appear properly quoted in the generated - header file.""" - assert define and isinstance(define, str) - - tbl = self.env[DEFINES] or Utils.ordered_dict() - - # the user forgot to tell if the value is quoted or not - if isinstance(value, str): - if quote == 1: - tbl[define] = '"%s"' % str(value) - else: - tbl[define] = value - elif isinstance(value, int): - tbl[define] = value - else: - raise TypeError - - # add later to make reconfiguring faster - self.env[DEFINES] = tbl - self.env[define] = value # <- not certain this is necessary - -def undefine(self, define): - """store a single define and its state into an internal list - for later writing to a config header file""" - assert define and isinstance(define, str) - - tbl = self.env[DEFINES] or Utils.ordered_dict() - - value = UNDEFINED - tbl[define] = value - - # add later to make reconfiguring faster - self.env[DEFINES] = tbl - self.env[define] = value - -def define_cond(self, name, value): - """Conditionally define a name. - Formally equivalent to: if value: define(name, 1) else: undefine(name)""" - if value: - self.define(name, 1) - else: - self.undefine(name) - -def is_defined(self, key): - defines = self.env[DEFINES] - if not defines: - return False - try: - value = defines[key] - except KeyError: - return False - else: - return value != UNDEFINED - -def get_define(self, define): - "get the value of a previously stored define" - try: return self.env[DEFINES][define] - except KeyError: return None - -def have_define(self, name): - "prefix the define with 'HAVE_' and make sure it has valid characters." - return "HAVE_%s" % Utils.quote_define_name(name) - -def write_config_header(self, configfile='', env=''): - "save the defines into a file" - if not configfile: configfile = self.configheader - - lst = Utils.split_path(configfile) - base = lst[:-1] - - if not env: env = self.env - base = [self.m_blddir, env.variant()]+base - dir = os.path.join(*base) - if not os.path.exists(dir): - os.makedirs(dir) - - dir = os.path.join(dir, lst[-1]) - - # remember config files - do not remove them on "waf clean" - self.env.append_value('waf_config_files', os.path.abspath(dir)) - - inclusion_guard_name = '_%s_WAF' % Utils.quote_define_name(configfile) - - dest = open(dir, 'w') - dest.write('/* Configuration header created by Waf - do not edit */\n') - dest.write('#ifndef %s\n#define %s\n\n' % (inclusion_guard_name, inclusion_guard_name)) - - # yes, this is special - if not configfile in self.env['dep_files']: - self.env['dep_files'] += [configfile] - - tbl = env[DEFINES] or Utils.ordered_dict() - for key in tbl.allkeys: - value = tbl[key] - if value is None: - dest.write('#define %s\n' % key) - elif value is UNDEFINED: - dest.write('/* #undef %s */\n' % key) - else: - dest.write('#define %s %s\n' % (key, value)) - - # Adds common-includes to config header. Should come after defines, - # so they will be defined for the common include files too. - for include_file in self.env[COMMON_INCLUDES]: - dest.write('\n#include "%s"' % include_file) - - dest.write('\n#endif /* %s */\n' % (inclusion_guard_name,)) - dest.close() - -def set_config_header(self, header): - "set a config header file" - self.configheader = header - -def run_check(self, obj): - """compile, link and run if necessary - @param obj: data of type check_data - @return: (False if a error during build happens) or ( (True if build ok) or (a {'result': ''} if execute was set)) - """ - # first make sure the code to execute is defined - if not obj.code: - raise Configure.ConfigurationError('run_check: no code to process in check') - - # create a small folder for testing - dir = os.path.join(self.m_blddir, '.wscript-trybuild') - - # if the folder already exists, remove it - for (root, dirs, filenames) in os.walk(dir): - for f in list(filenames): - os.remove(os.path.join(root, f)) - - bdir = os.path.join(dir, '_testbuild_') - - if (not obj.force_compiler and Task.g_task_types.get('cxx', None)) or obj.force_compiler == "cpp": - tp = 'cpp' - test_f_name = 'test.cpp' - else: - tp = 'cc' - test_f_name = 'test.c' - - # FIXME: by default the following lines are called more than once - # we have to make sure they get called only once - if not os.path.exists(dir): - os.makedirs(dir) - - if not os.path.exists(bdir): - os.makedirs(bdir) - - if obj.env: env = obj.env - else: env = self.env.copy() - - dest=open(os.path.join(dir, test_f_name), 'w') - dest.write(obj.code) - dest.close() - - # very important - Utils.reset() - - back=os.path.abspath('.') - - bld = Build.Build() - bld.m_allenvs.update(self.m_allenvs) - bld.m_allenvs['default'] = env - bld._variants=bld.m_allenvs.keys() - bld.load_dirs(dir, bdir, isconfigure=1) - - os.chdir(dir) - - bld.rescan(bld.m_srcnode) - - #o = TaskGen.task_gen.classes[tp](obj.build_type) - o = bld.create_obj(tp, obj.build_type) - o.source = test_f_name - o.target = 'testprog' - o.uselib = obj.uselib - o.cppflags = obj.flags - o.includes = obj.includes - - # compile the program - try: - ret = bld.compile() - except Build.BuildError: - ret = 1 - - # keep the name of the program to execute - if obj.execute: - lastprog = o.link_task.m_outputs[0].abspath(o.env) - - #if runopts is not None: - # ret = os.popen(obj.link_task.m_outputs[0].abspath(obj.env)).read().strip() - - os.chdir(back) - Utils.reset() - - # if we need to run the program, try to get its result - if obj.execute: - if ret: return not ret - data = os.popen('"%s"' %lastprog).read().strip() - ret = {'result': data} - return ret - - return not ret - -def cc_check_features(self, kind='cc'): - v = self.env - # check for compiler features: programs, shared and static libraries - test = Configure.check_data() - test.code = 'int main() {return 0;}\n' - test.env = v - test.execute = 1 - test.force_compiler = kind - ret = self.run_check(test) - self.check_message('compiler could create', 'programs', not (ret is False)) - if not ret: self.fatal("no programs") - - lib_obj = Configure.check_data() - lib_obj.code = "int k = 3;\n" - lib_obj.env = v - lib_obj.build_type = "shlib" - lib_obj.force_compiler = kind - ret = self.run_check(lib_obj) - self.check_message('compiler could create', 'shared libs', not (ret is False)) - if not ret: self.fatal("no shared libs") - - lib_obj = Configure.check_data() - lib_obj.code = "int k = 3;\n" - lib_obj.env = v - lib_obj.build_type = "staticlib" - lib_obj.force_compiler = kind - ret = self.run_check(lib_obj) - self.check_message('compiler could create', 'static libs', not (ret is False)) - if not ret: self.fatal("no static libs") - -def cxx_check_features(self): - return cc_check_features(self, kind='cpp') - - -conf(define) -conf(undefine) -conf(define_cond) -conf(is_defined) -conf(get_define) -conf(have_define) -conf(write_config_header) -conf(set_config_header) -conf(run_check) -conftest(cc_check_features) -conftest(cxx_check_features) diff -Nru a2jmidid-8~dfsg0/wafadmin/Tools/cs.py a2jmidid-9/wafadmin/Tools/cs.py --- a2jmidid-8~dfsg0/wafadmin/Tools/cs.py 2008-07-21 22:39:47.000000000 +0000 +++ a2jmidid-9/wafadmin/Tools/cs.py 1970-01-01 00:00:00.000000000 +0000 @@ -1,77 +0,0 @@ -#! /usr/bin/env python -# encoding: utf-8 - -#! /usr/bin/env python -# encoding: utf-8 -# Thomas Nagy, 2006 (ita) - -"C# support" - -import Params, TaskGen, Utils, Task -from Params import error - -g_types_lst = ['program', 'library'] -class cs_taskgen(TaskGen.task_gen): - def __init__(self, *k): - TaskGen.task_gen.__init__(self, *k) - - self.m_type = k[1] - - self.source = '' - self.target = '' - - self.flags = '' - self.assemblies = '' - self.resources = '' - - self.uselib = '' - - self._flag_vars = ['FLAGS', 'ASSEMBLIES'] - - if not self.m_type in g_types_lst: - error('type for csobj is undefined '+type) - type='program' - - def apply(self): - self.apply_uselib() - - # process the flags for the assemblies - assemblies_flags = [] - for i in self.to_list(self.assemblies) + self.env['ASSEMBLIES']: - assemblies_flags += '/r:'+i - self.env['_ASSEMBLIES'] += assemblies_flags - - # process the flags for the resources - for i in self.to_list(self.resources): - self.env['_RESOURCES'].append('/resource:'+i) - - # additional flags - self.env['_FLAGS'] += self.to_list(self.flags) + self.env['FLAGS'] - - curnode = self.path - - # process the sources - nodes = [] - for i in self.to_list(self.source): - nodes.append(curnode.find_resource(i)) - - # create the task - task = self.create_task('mcs', self.env) - task.m_inputs = nodes - task.set_outputs(self.path.find_build(self.target)) - - def apply_uselib(self): - if not self.uselib: - return - for var in self.to_list(self.uselib): - for v in self._flag_vars: - val = self.env[v+'_'+var] - if val: self.env.append_value(v, val) - -Task.simple_task_type('mcs', '${MCS} ${SRC} /out:${TGT} ${_FLAGS} ${_ASSEMBLIES} ${_RESOURCES}', color='YELLOW', prio=101) - -def detect(conf): - mcs = conf.find_program('mcs', var='MCS') - if not mcs: mcs = conf.find_program('gmcs', var='MCS') - - diff -Nru a2jmidid-8~dfsg0/wafadmin/Tools/cxx.py a2jmidid-9/wafadmin/Tools/cxx.py --- a2jmidid-8~dfsg0/wafadmin/Tools/cxx.py 2008-07-21 22:39:47.000000000 +0000 +++ a2jmidid-9/wafadmin/Tools/cxx.py 1970-01-01 00:00:00.000000000 +0000 @@ -1,120 +0,0 @@ -#! /usr/bin/env python -# encoding: utf-8 -import sys -if sys.hexversion < 0x020400f0: from sets import Set as set -#! /usr/bin/env python -# encoding: utf-8 -# Thomas Nagy, 2005 (ita) - -"Base for c++ programs and libraries" - -import sys -import TaskGen, Params, Task, Utils -from Params import debug, fatal -import ccroot # <- do not remove -from TaskGen import taskgen, before, extension - -g_cpp_flag_vars = [ -'FRAMEWORK', 'FRAMEWORKPATH', -'STATICLIB', 'LIB', 'LIBPATH', 'LINKFLAGS', 'RPATH', -'INCLUDE', -'CXXFLAGS', 'CCFLAGS', 'CPPPATH', 'CPPFLAGS', 'CXXDEFINES'] -"main cpp variables" - -EXT_CXX = ['.cpp', '.cc', '.cxx', '.C', '.c++'] -CXX_METHS = ['init_cxx', 'apply_type_vars', 'apply_incpaths', 'apply_defines_cxx', -'apply_core', 'apply_lib_vars', 'apply_obj_vars_cxx'] - -TaskGen.add_feature('cxx', CXX_METHS) - -g_cpp_type_vars=['CXXFLAGS', 'LINKFLAGS'] -class cpp_taskgen(ccroot.ccroot_abstract): - def __init__(self, *k): - ccroot.ccroot_abstract.__init__(self, *k) - - # it is called cpp for backward compatibility, in fact it is cxx - self.features[0] = 'cxx' - -def init_cxx(self): - if not 'cc' in self.features: - self.mappings['.c'] = TaskGen.task_gen.mappings['.cxx'] - - if hasattr(self, 'p_flag_vars'): self.p_flag_vars = set(self.p_flag_vars).union(g_cpp_flag_vars) - else: self.p_flag_vars = g_cpp_flag_vars - - if hasattr(self, 'p_type_vars'): self.p_type_vars = set(self.p_type_vars).union(g_cpp_type_vars) - else: self.p_type_vars = g_cpp_type_vars - -def apply_obj_vars_cxx(self): - debug('apply_obj_vars_cxx', 'ccroot') - env = self.env - app = env.append_unique - cxxpath_st = env['CPPPATH_ST'] - - # local flags come first - # set the user-defined includes paths - for i in env['INC_PATHS']: - app('_CXXINCFLAGS', cxxpath_st % i.bldpath(env)) - app('_CXXINCFLAGS', cxxpath_st % i.srcpath(env)) - - # set the library include paths - for i in env['CPPPATH']: - app('_CXXINCFLAGS', cxxpath_st % i) - #print self.env['_CXXINCFLAGS'] - #print " appending include ",i - - # this is usually a good idea - app('_CXXINCFLAGS', cxxpath_st % '.') - app('_CXXINCFLAGS', cxxpath_st % self.env.variant()) - tmpnode = self.path - app('_CXXINCFLAGS', cxxpath_st % tmpnode.bldpath(env)) - app('_CXXINCFLAGS', cxxpath_st % tmpnode.srcpath(env)) - -def apply_defines_cxx(self): - tree = Params.g_build - self.defines = getattr(self, 'defines', []) - lst = self.to_list(self.defines) + self.to_list(self.env['CXXDEFINES']) - milst = [] - - # now process the local defines - for defi in lst: - if not defi in milst: - milst.append(defi) - - # CXXDEFINES_USELIB - libs = self.to_list(self.uselib) - for l in libs: - val = self.env['CXXDEFINES_'+l] - if val: milst += self.to_list(val) - - self.env['DEFLINES'] = ["%s %s" % (x[0], Utils.trimquotes('='.join(x[1:]))) for x in [y.split('=') for y in milst]] - y = self.env['CXXDEFINES_ST'] - self.env['_CXXDEFFLAGS'] = [y%x for x in milst] - -def cxx_hook(self, node): - # create the compilation task: cpp or cc - task = self.create_task('cxx', self.env) - try: obj_ext = self.obj_ext - except AttributeError: obj_ext = '_%d.o' % self.idx - - task.m_scanner = ccroot.g_c_scanner - task.defines = self.scanner_defines - - task.m_inputs = [node] - task.m_outputs = [node.change_ext(obj_ext)] - self.compiled_tasks.append(task) - -cxx_str = '${CXX} ${CXXFLAGS} ${CPPFLAGS} ${_CXXINCFLAGS} ${_CXXDEFFLAGS} ${CXX_SRC_F}${SRC} ${CXX_TGT_F}${TGT}' -link_str = '${LINK_CXX} ${CXXLNK_SRC_F}${SRC} ${CXXLNK_TGT_F}${TGT} ${LINKFLAGS} ${_LIBDIRFLAGS} ${_LIBFLAGS}' - -Task.simple_task_type('cxx', cxx_str, color='GREEN', prio=100) -Task.simple_task_type('cxx_link', link_str, color='YELLOW', prio=111) - -TaskGen.declare_order('apply_incpaths', 'apply_defines_cxx', 'apply_core', 'apply_lib_vars', 'apply_obj_vars_cxx', 'apply_obj_vars') - - -taskgen(init_cxx) -before('apply_type_vars')(init_cxx) -taskgen(apply_obj_vars_cxx) -taskgen(apply_defines_cxx) -extension(EXT_CXX)(cxx_hook) diff -Nru a2jmidid-8~dfsg0/wafadmin/Tools/dmd.py a2jmidid-9/wafadmin/Tools/dmd.py --- a2jmidid-8~dfsg0/wafadmin/Tools/dmd.py 2008-07-21 22:39:47.000000000 +0000 +++ a2jmidid-9/wafadmin/Tools/dmd.py 1970-01-01 00:00:00.000000000 +0000 @@ -1,72 +0,0 @@ -#! /usr/bin/env python -# encoding: utf-8 - -#! /usr/bin/env python -# encoding: utf-8 -# Carlos Rafael Giani, 2007 (dv) -# Thomas Nagy, 2008 (ita) - -import sys -import ar - -def find_dmd(conf): - v = conf.env - d_compiler = None - if v['D_COMPILER']: - d_compiler = v['D_COMPILER'] - if not d_compiler: d_compiler = conf.find_program('dmd', var='D_COMPILER') - if not d_compiler: return 0 - v['D_COMPILER'] = d_compiler - -def common_flags(conf): - v = conf.env - - # _DFLAGS _DIMPORTFLAGS _DLIBDIRFLAGS _DLIBFLAGS - - # Compiler is dmd so 'gdc' part will be ignored, just - # ensure key is there, so wscript can append flags to it - v['DFLAGS'] = {'gdc': [], 'dmd': ['-version=Posix']} - - v['D_SRC_F'] = '' - v['D_TGT_F'] = '-c -of' - v['DPATH_ST'] = '-I%s' # template for adding import paths - - # linker - v['D_LINKER'] = v['D_COMPILER'] - v['DLNK_SRC_F'] = '' - v['DLNK_TGT_F'] = '-of' - - v['DLIB_ST'] = '-L-l%s' # template for adding libs - v['DLIBPATH_ST'] = '-L-L%s' # template for adding libpaths - - # linker debug levels - v['DFLAGS_OPTIMIZED'] = ['-O'] - v['DFLAGS_DEBUG'] = ['-g', '-debug'] - v['DFLAGS_ULTRADEBUG'] = ['-g', '-debug'] - v['DLINKFLAGS'] = ['-quiet'] - - v['D_shlib_DFLAGS'] = ['-fPIC'] - v['D_shlib_LINKFLAGS'] = ['-L-shared'] - - v['DHEADER_ext'] = '.di' - v['D_HDR_F'] = '-H -Hf' - - if sys.platform == "win32": - v['D_program_PATTERN'] = '%s.exe' - v['D_shlib_PATTERN'] = 'lib%s.dll' - v['D_staticlib_PATTERN'] = 'lib%s.a' - else: - v['D_program_PATTERN'] = '%s' - v['D_shlib_PATTERN'] = 'lib%s.so' - v['D_staticlib_PATTERN'] = 'lib%s.a' - -def detect(conf): - v = conf.env - find_dmd(conf) - ar.find_ar(conf) - conf.check_tool('d') - common_flags(conf) - -def set_options(opt): - pass - diff -Nru a2jmidid-8~dfsg0/wafadmin/Tools/d.py a2jmidid-9/wafadmin/Tools/d.py --- a2jmidid-8~dfsg0/wafadmin/Tools/d.py 2008-07-21 22:39:47.000000000 +0000 +++ a2jmidid-9/wafadmin/Tools/d.py 1970-01-01 00:00:00.000000000 +0000 @@ -1,529 +0,0 @@ -#! /usr/bin/env python -# encoding: utf-8 - -#! /usr/bin/env python -# encoding: utf-8 -# Carlos Rafael Giani, 2007 (dv) -# Thomas Nagy, 2007-2008 (ita) - -import os, sys, re, optparse -import ccroot # <- leave this -import TaskGen, Utils, Task, Params, checks, Configure, Scan -from Params import debug, error -from TaskGen import taskgen, feature, after, before, extension - -EXT_D = ['.d', '.di', '.D'] -D_METHS = ['apply_core', 'apply_vnum', 'apply_objdeps'] # additional d methods - -def filter_comments(filename): - f = open(filename, 'r') - txt = f.read() - f.close() - buf = [] - - i = 0 - max = len(txt) - while i < max: - c = txt[i] - # skip a string - if c == '"': - i += 1 - c = '' - while i < max: - p = c - c = txt[i] - i += 1 - if i == max: return buf - if c == '"': - cnt = 0 - while i < cnt and i < max: - #print "cntcnt = ", str(cnt), self.txt[self.i-2-cnt] - if txt[i-2-cnt] == '\\': cnt+=1 - else: break - #print "cnt is ", str(cnt) - if (cnt%2)==0: break - # i -= 1 # <- useless in practice - # skip a char - elif c == "'": - i += 1 - if i == max: return buf - c = txt[i] - if c == '\\': - i += 1 - if i == max: return buf - c = txt[i] - if c == 'x': - i += 2 # skip two chars - elif c == 'u': - i += 4 # skip unicode chars - i += 1 - if i == max: return buf - c = txt[i] - if c != '\'': error("uh-oh, invalid character") - - # skip a comment - elif c == '/': - if i == max: break - c = txt[i+1] - # eat /+ +/ comments - if c == '+': - i += 1 - nesting = 1 - prev = 0 - while i < max: - c = txt[i] - if c == '+': - prev = 1 - elif c == '/': - if prev: - nesting -= 1 - if nesting == 0: break - else: - if i < max: - i += 1 - c = txt[i] - if c == '+': - nesting += 1 - else: - return buf - else: - prev = 0 - i += 1 - # eat /* */ comments - elif c == '*': - i += 1 - while i < max: - c = txt[i] - if c == '*': - prev = 1 - elif c == '/': - if prev: break - else: - prev = 0 - i += 1 - # eat // comments - elif c == '/': - i += 1 - c = txt[i] - while i < max and c != '\n': - i += 1 - c = txt[i] - # a valid char, add it to the buffer - else: - buf.append(c) - i += 1 - return buf - -class d_parser(object): - def __init__(self, env, incpaths): - #self.code = '' - #self.module = '' - #self.imports = [] - - self.allnames = [] - - self.re_module = re.compile("module\s+([^;]+)") - self.re_import = re.compile("import\s+([^;]+)") - self.re_import_bindings = re.compile("([^:]+):(.*)") - self.re_import_alias = re.compile("[^=]+=(.+)") - - self.env = env - - self.m_nodes = [] - self.m_names = [] - - self.incpaths = incpaths - - def tryfind(self, filename): - found = 0 - for n in self.incpaths: - found = n.find_resource(filename.replace('.', '/') + '.d') - if found: - self.m_nodes.append(found) - self.waiting.append(found) - break - if not found: - if not filename in self.m_names: - self.m_names.append(filename) - - def get_strings(self, code): - #self.imports = [] - self.module = '' - lst = [] - - # get the module name (if present) - - mod_name = self.re_module.search(code) - if mod_name: - self.module = re.sub('\s+', '', mod_name.group(1)) # strip all whitespaces - - # go through the code, have a look at all import occurrences - - # first, lets look at anything beginning with "import" and ending with ";" - import_iterator = self.re_import.finditer(code) - if import_iterator: - for import_match in import_iterator: - import_match_str = re.sub('\s+', '', import_match.group(1)) # strip all whitespaces - - # does this end with an import bindings declaration? - # (import bindings always terminate the list of imports) - bindings_match = self.re_import_bindings.match(import_match_str) - if bindings_match: - import_match_str = bindings_match.group(1) - # if so, extract the part before the ":" (since the module declaration(s) is/are located there) - - # split the matching string into a bunch of strings, separated by a comma - matches = import_match_str.split(',') - - for match in matches: - alias_match = self.re_import_alias.match(match) - if alias_match: - # is this an alias declaration? (alias = module name) if so, extract the module name - match = alias_match.group(1) - - lst.append(match) - return lst - - def start(self, node): - self.waiting = [node] - # while the stack is not empty, add the dependencies - while self.waiting: - nd = self.waiting.pop(0) - self.iter(nd) - - def iter(self, node): - path = node.abspath(self.env) # obtain the absolute path - code = "".join(filter_comments(path)) # read the file and filter the comments - names = self.get_strings(code) # obtain the import strings - for x in names: - # optimization - if x in self.allnames: continue - self.allnames.append(x) - - # for each name, see if it is like a node or not - self.tryfind(x) - -class d_scanner(Scan.scanner): - "scanner for d files" - def __init__(self): - Scan.scanner.__init__(self) - - def scan(self, task, node): - "look for .d/.di the .d source need" - debug("_scan_preprocessor(self, node, env, path_lst)", 'ccroot') - gruik = d_parser(task.env(), task.env()['INC_PATHS']) - gruik.start(node) - - if Params.g_verbose: - debug("nodes found for %s: %s %s" % (str(node), str(gruik.m_nodes), str(gruik.m_names)), 'deps') - #debug("deps found for %s: %s" % (str(node), str(gruik.deps)), 'deps') - return (gruik.m_nodes, gruik.m_names) - -g_d_scanner = d_scanner() -"scanner for d programs" - -def get_target_name(self): - "for d programs and libs" - v = self.env - return v['D_%s_PATTERN' % self.m_type] % self.target - -d_params = { -'dflags': {'gdc':'', 'dmd':''}, -'importpaths':'', -'libs':'', -'libpaths':'', -'generate_headers':False, -} - -def init_d(self): - for x in d_params: - setattr(self, x, getattr(self, x, d_params[x])) - -class d_taskgen(TaskGen.task_gen): - def __init__(self, *k): - TaskGen.task_gen.__init__(self, *k) - - # TODO m_type is obsolete - if len(k)>1: self.m_type = k[1] - else: self.m_type = '' - if self.m_type: - self.features.append('d' + self.m_type) - - self.dflags = {'gdc':'', 'dmd':''} - self.importpaths = '' - self.libs = '' - self.libpaths = '' - self.uselib = '' - self.uselib_local = '' - - self.generate_headers = False # set to true if you want .di files as well as .o - - self.compiled_tasks = [] - - self.add_objects = [] - - self.vnum = '1.0.0' - -TaskGen.add_feature('d', D_METHS) - -def apply_d_libs(self): - uselib = self.to_list(self.uselib) - seen = [] - local_libs = self.to_list(self.uselib_local) - libs = [] - libpaths = [] - env = self.env - while local_libs: - x = local_libs.pop() - - # visit dependencies only once - if x in seen: - continue - else: - seen.append(x) - - # object does not exist ? - y = TaskGen.name_to_obj(x) - if not y: - fatal('object not found in uselib_local: obj %s uselib %s' % (self.name, x)) - - # object has ancestors to process first ? update the list of names - if y.uselib_local: - added = 0 - lst = y.to_list(y.uselib_local) - lst.reverse() - for u in lst: - if u in seen: continue - added = 1 - local_libs = [u]+local_libs - if added: continue # list of names modified, loop - - # safe to process the current object - if not y.m_posted: y.post() - seen.append(x) - - if 'dshlib' in y.features or 'dstaticlib' in y.features: - libs.append(y.target) - - # add the link path too - tmp_path = y.path.bldpath(env) - if not tmp_path in libpaths: libpaths = [tmp_path] + libpaths - - # set the dependency over the link task - if y.link_task is not None: - self.link_task.set_run_after(y.link_task) - dep_nodes = getattr(self.link_task, 'dep_nodes', []) - self.link_task.dep_nodes = dep_nodes + y.link_task.m_outputs - - # add ancestors uselib too - # TODO potential problems with static libraries ? - morelibs = y.to_list(y.uselib) - for v in morelibs: - if v in uselib: continue - uselib = [v]+uselib - self.uselib = uselib - -def apply_d_link(self): - link = getattr(self, 'link', None) - if not link: - if 'dstaticlib' in self.features: link = 'ar_link_static' - else: link = 'd_link' - linktask = self.create_task(link, self.env) - outputs = [t.m_outputs[0] for t in self.compiled_tasks] - linktask.set_inputs(outputs) - linktask.set_outputs(self.path.find_or_declare(get_target_name(self))) - - self.link_task = linktask - -def apply_d_vars(self): - env = self.env - dpath_st = env['DPATH_ST'] - lib_st = env['DLIB_ST'] - libpath_st = env['DLIBPATH_ST'] - - dflags = {'gdc':[], 'dmd':[]} - importpaths = self.to_list(self.importpaths) - libpaths = [] - libs = [] - uselib = self.to_list(self.uselib) - - # add compiler flags - for i in uselib: - if env['DFLAGS_' + i]: - for dflag in self.to_list(env['DFLAGS_' + i][env['COMPILER_D']]): - if not dflag in dflags[env['COMPILER_D']]: - dflags[env['COMPILER_D']] += [dflag] - dflags[env['COMPILER_D']] = self.to_list(self.dflags[env['COMPILER_D']]) + dflags[env['COMPILER_D']] - - for dflag in dflags[env['COMPILER_D']]: - if not dflag in env['DFLAGS'][env['COMPILER_D']]: - env['DFLAGS'][env['COMPILER_D']] += [dflag] - - d_shlib_dflags = env['D_' + self.m_type + '_DFLAGS'] - if d_shlib_dflags: - for dflag in d_shlib_dflags: - if not dflag in env['DFLAGS'][env['COMPILER_D']]: - env['DFLAGS'][env['COMPILER_D']] += [dflag] - - env['_DFLAGS'] = env['DFLAGS'][env['COMPILER_D']] - - # add import paths - for i in uselib: - if env['DPATH_' + i]: - for entry in self.to_list(env['DPATH_' + i]): - if not entry in importpaths: - importpaths.append(entry) - - # now process the import paths - for path in importpaths: - if os.path.isabs(path): - env.append_unique('_DIMPORTFLAGS', dpath_st % path) - else: - node = self.path.find_dir(path) - self.env.append_unique('INC_PATHS', node) - env.append_unique('_DIMPORTFLAGS', dpath_st % node.srcpath(env)) - env.append_unique('_DIMPORTFLAGS', dpath_st % node.bldpath(env)) - - # add library paths - for i in uselib: - if env['LIBPATH_' + i]: - for entry in self.to_list(env['LIBPATH_' + i]): - if not entry in libpaths: - libpaths += [entry] - libpaths = self.to_list(self.libpaths) + libpaths - - # now process the library paths - for path in libpaths: - env.append_unique('_DLIBDIRFLAGS', libpath_st % path) - - # add libraries - for i in uselib: - if env['LIB_' + i]: - for entry in self.to_list(env['LIB_' + i]): - if not entry in libs: - libs += [entry] - libs = libs + self.to_list(self.libs) - - # now process the libraries - for lib in libs: - env.append_unique('_DLIBFLAGS', lib_st % lib) - - # add linker flags - for i in uselib: - dlinkflags = env['DLINKFLAGS_' + i] - if dlinkflags: - for linkflag in dlinkflags: - env.append_unique('DLINKFLAGS', linkflag) - -def add_shlib_d_flags(self): - for linkflag in self.env['D_shlib_LINKFLAGS']: - self.env.append_unique('DLINKFLAGS', linkflag) - -def d_hook(self, node): - # create the compilation task: cpp or cc - task = self.create_task(self.generate_headers and 'd_with_header' or 'd') - try: obj_ext = self.obj_ext - except AttributeError: obj_ext = '_%d.o' % self.idx - - global g_d_scanner - task.m_scanner = g_d_scanner - - task.m_inputs = [node] - task.m_outputs = [node.change_ext(obj_ext)] - self.compiled_tasks.append(task) - - if self.generate_headers: - header_node = node.change_ext(self.env['DHEADER_ext']) - task.m_outputs += [header_node] - -d_str = '${D_COMPILER} ${_DFLAGS} ${_DIMPORTFLAGS} ${D_SRC_F}${SRC} ${D_TGT_F}${TGT}' -d_with_header_str = '${D_COMPILER} ${_DFLAGS} ${_DIMPORTFLAGS} \ -${D_HDR_F}${TGT[1].bldpath(env)} \ -${D_SRC_F}${SRC} \ -${D_TGT_F}${TGT[0].bldpath(env)}' -link_str = '${D_LINKER} ${DLNK_SRC_F}${SRC} ${DLNK_TGT_F}${TGT} ${DLINKFLAGS} ${_DLIBDIRFLAGS} ${_DLIBFLAGS}' - -Task.simple_task_type('d', d_str, 'GREEN', prio=100) -Task.simple_task_type('d_with_header', d_with_header_str, 'GREEN', prio=100) -Task.simple_task_type('d_link', link_str, color='YELLOW', prio=111) - -# for feature request #104 -def generate_header(self, filename, inst_var, inst_dir): - if not hasattr(self, 'header_lst'): self.header_lst = [] - self.meths.add('process_header') - self.header_lst.append([filename, inst_var, inst_dir]) - -def process_header(self): - env = self.env - for i in getattr(self, 'header_lst', []): - node = self.path.find_resource(i[0]) - - if not node: - fatal('file not found on d obj '+i[0]) - - task = self.create_task('d_header', env, 2) - task.set_inputs(node) - task.set_outputs(node.change_ext('.di')) - -d_header_str = '${D_COMPILER} ${D_HEADER} ${SRC}' -Task.simple_task_type('d_header', d_header_str, color='BLUE', prio=80) - - -# quick test # -if __name__ == "__main__": - #Params.g_verbose = 2 - #Params.g_zones = ['preproc'] - #class dum: - # def __init__(self): - # self.parse_cache_d = {} - #Params.g_build = dum() - - try: arg = sys.argv[1] - except IndexError: arg = "file.d" - - print "".join(filter_comments(arg)) - # TODO - paths = ['.'] - - #gruik = filter() - #gruik.start(arg) - - #code = "".join(gruik.buf) - - #print "we have found the following code" - #print code - - #print "now parsing" - #print "-------------------------------------------" - """ - parser_ = d_parser() - parser_.start(arg) - - print "module: %s" % parser_.module - print "imports: ", - for imp in parser_.imports: - print imp + " ", - print -""" - - -taskgen(init_d) -before('apply_type_vars')(init_d) -feature('d')(init_d) -taskgen(apply_d_libs) -feature('d')(apply_d_libs) -after('apply_d_link')(apply_d_libs) -before('apply_vnum')(apply_d_libs) -taskgen(apply_d_link) -feature('dprogram', 'dshlib', 'dstaticlib')(apply_d_link) -after('apply_core')(apply_d_link) -taskgen(apply_d_vars) -feature('d')(apply_d_vars) -after('apply_core')(apply_d_vars) -taskgen(add_shlib_d_flags) -after('apply_d_vars')(add_shlib_d_flags) -feature('dshlib')(add_shlib_d_flags) -extension(EXT_D)(d_hook) -taskgen(generate_header) -taskgen(process_header) -before('apply_core')(process_header) diff -Nru a2jmidid-8~dfsg0/wafadmin/Tools/flex.py a2jmidid-9/wafadmin/Tools/flex.py --- a2jmidid-8~dfsg0/wafadmin/Tools/flex.py 2008-07-21 22:39:47.000000000 +0000 +++ a2jmidid-9/wafadmin/Tools/flex.py 1970-01-01 00:00:00.000000000 +0000 @@ -1,30 +0,0 @@ -#! /usr/bin/env python -# encoding: utf-8 - -#! /usr/bin/env python -# encoding: utf-8 -# John O'Meara, 2006 -# Thomas Nagy, 2006-2008 - -"Flex processing" - -import TaskGen - -def decide_ext(self, node): - if 'cxx' in self.features: return '.lex.cc' - else: return '.lex.c' - -TaskGen.declare_chain( - name = 'flex', - action = '${FLEX} -o${TGT} ${FLEXFLAGS} ${SRC}', - ext_in = '.l', - decider = decide_ext, -) - -def detect(conf): - flex = conf.find_program('flex', var='FLEX') - if not flex: conf.fatal("flex was not found") - v = conf.env - v['FLEXFLAGS'] = '' - - diff -Nru a2jmidid-8~dfsg0/wafadmin/Tools/gas.py a2jmidid-9/wafadmin/Tools/gas.py --- a2jmidid-8~dfsg0/wafadmin/Tools/gas.py 2008-07-21 22:39:47.000000000 +0000 +++ a2jmidid-9/wafadmin/Tools/gas.py 1970-01-01 00:00:00.000000000 +0000 @@ -1,49 +0,0 @@ -#! /usr/bin/env python -# encoding: utf-8 - -#! /usr/bin/env python -# encoding: utf-8 -# Thomas Nagy, 2008 (ita) - -"as and gas" - -import os, sys -import Task -from TaskGen import extension, taskgen, after, before - -EXT_ASM = ['.s', '.S', '.asm', '.ASM', '.spp', '.SPP'] - -as_str = '${AS} ${ASFLAGS} ${_ASINCFLAGS} ${SRC} -o ${TGT}' -Task.simple_task_type('asm', as_str, 'PINK', prio=100) - -def asm_hook(self, node): - # create the compilation task: cpp or cc - task = self.create_task('asm', self.env) - try: obj_ext = self.obj_ext - except AttributeError: obj_ext = '_%d.o' % self.idx - - task.m_inputs = [node] - task.m_outputs = [node.change_ext(obj_ext)] - self.compiled_tasks.append(task) - -def asm_incflags(self): - if self.env['ASINCFLAGS']: self.env['_ASINCFLAGS'] = self.env['ASINCFLAGS'] - if 'cxx' in self.features: self.env['_ASINCFLAGS'] = self.env['_CCINCFLAGS'] - else: self.env['_ASINCFLAGS'] = self.env['_CXXINCFLAGS'] - -def detect(conf): - comp = os.environ.get('AS', '') - if not comp: comp = conf.find_program('as', var='AS') - if not comp: comp = conf.find_program('gas', var='AS') - if not comp: comp = conf.env['CC'] - if not comp: return - - v = conf.env - v['ASFLAGS'] = '' - - -extension(EXT_ASM)(asm_hook) -taskgen(asm_incflags) -after('apply_obj_vars_cc')(asm_incflags) -after('apply_obj_vars_cxx')(asm_incflags) -before('apply_link')(asm_incflags) diff -Nru a2jmidid-8~dfsg0/wafadmin/Tools/gcc.py a2jmidid-9/wafadmin/Tools/gcc.py --- a2jmidid-8~dfsg0/wafadmin/Tools/gcc.py 2008-07-21 23:09:49.000000000 +0000 +++ a2jmidid-9/wafadmin/Tools/gcc.py 1970-01-01 00:00:00.000000000 +0000 @@ -1,178 +0,0 @@ -#! /usr/bin/env python -# encoding: utf-8 - -#! /usr/bin/env python -# encoding: utf-8 -# Thomas Nagy, 2006-2008 (ita) -# Ralf Habacker, 2006 (rh) - -import os, optparse, sys -import Params, Configure -import ccroot, ar -from Configure import conftest - -def find_gcc(conf): - v = conf.env - cc = None - if v['CC']: cc = v['CC'] - elif 'CC' in os.environ: cc = os.environ['CC'] - if not cc: cc = conf.find_program('gcc', var='CC') - if not cc: cc = conf.find_program('cc', var='CC') - if not cc: conf.fatal('gcc was not found') - v['CC'] = cc - v['CC_NAME'] = 'gcc' - ccroot.get_cc_version(conf, cc, 'CC_VERSION') - -def gcc_common_flags(conf): - v = conf.env - - # CPPFLAGS CCDEFINES _CCINCFLAGS _CCDEFFLAGS _LIBDIRFLAGS _LIBFLAGS - - v['CC_SRC_F'] = '' - v['CC_TGT_F'] = '-c -o ' - v['CPPPATH_ST'] = '-I%s' # template for adding include paths - - # linker - if not v['LINK_CC']: v['LINK_CC'] = v['CC'] - v['CCLNK_SRC_F'] = '' - v['CCLNK_TGT_F'] = '-o ' - - v['LIB_ST'] = '-l%s' # template for adding libs - v['LIBPATH_ST'] = '-L%s' # template for adding libpaths - v['STATICLIB_ST'] = '-l%s' - v['STATICLIBPATH_ST'] = '-L%s' - v['CCDEFINES_ST'] = '-D%s' - - v['SHLIB_MARKER'] = '-Wl,-Bdynamic' - v['STATICLIB_MARKER'] = '-Wl,-Bstatic' - - # program - v['program_PATTERN'] = '%s' - - # shared library - v['shlib_CCFLAGS'] = ['-fPIC', '-DPIC'] - v['shlib_LINKFLAGS'] = ['-shared'] - v['shlib_PATTERN'] = 'lib%s.so' - - # static lib - v['staticlib_LINKFLAGS'] = ['-Wl,-Bstatic'] - v['staticlib_PATTERN'] = 'lib%s.a' - - # osx stuff - v['MACBUNDLE_LINKFLAGS'] = ['-bundle', '-undefined dynamic_lookup'] - v['MACBUNDLE_CCFLAGS'] = ['-fPIC'] - v['MACBUNDLE_PATTERN'] = '%s.bundle' - -def gcc_modifier_win32(conf): - v = conf.env - if sys.platform != 'win32': return - v['program_PATTERN'] = '%s.exe' - - v['shlib_PATTERN'] = 'lib%s.dll' - v['shlib_CCFLAGS'] = [] - - v['staticlib_LINKFLAGS'] = [] - -def gcc_modifier_cygwin(conf): - v = conf.env - if sys.platform != 'cygwin': return - v['program_PATTERN'] = '%s.exe' - - v['shlib_PATTERN'] = 'lib%s.dll' - v['shlib_CCFLAGS'] = [] - - v['staticlib_LINKFLAGS'] = [] - -def gcc_modifier_darwin(conf): - v = conf.env - if sys.platform != 'darwin': return - v['shlib_CCFLAGS'] = ['-fPIC'] - v['shlib_LINKFLAGS'] = ['-dynamiclib'] - v['shlib_PATTERN'] = 'lib%s.dylib' - - v['staticlib_LINKFLAGS'] = [] - - v['SHLIB_MARKER'] = '' - v['STATICLIB_MARKER'] = '' - -def gcc_modifier_aix5(conf): - v = conf.env - if sys.platform != 'aix5': return - v['program_LINKFLAGS'] = ['-Wl,-brtl'] - - v['shlib_LINKFLAGS'] = ['-shared','-Wl,-brtl,-bexpfull'] - - v['SHLIB_MARKER'] = '' - -def gcc_modifier_debug(conf): - v = conf.env - # compiler debug levels - if conf.check_flags('-O2'): - v['CCFLAGS_OPTIMIZED'] = ['-O2'] - v['CCFLAGS_RELEASE'] = ['-O2'] - if conf.check_flags('-g -DDEBUG'): - v['CCFLAGS_DEBUG'] = ['-g', '-DDEBUG'] - v['LINKFLAGS_DEBUG'] = ['-g'] - if conf.check_flags('-g3 -O0 -DDEBUG'): - v['CCFLAGS_ULTRADEBUG'] = ['-g3', '-O0', '-DDEBUG'] - v['LINKFLAGS_ULTRADEBUG'] = ['-g'] - if conf.check_flags('-Wall'): - for x in 'OPTIMIZED RELEASE DEBUG ULTRADEBUG'.split(): v.append_unique('CCFLAGS_'+x, '-Wall') - try: - debug_level = Params.g_options.debug_level.upper() - except AttributeError: - debug_level = ccroot.DEBUG_LEVELS.CUSTOM - v.append_value('CCFLAGS', v['CCFLAGS_'+debug_level]) - v.append_value('LINKFLAGS', v['LINKFLAGS_'+debug_level]) - -detect = ''' -find_gcc -find_cpp -find_ar -gcc_common_flags -gcc_modifier_win32 -gcc_modifier_cygwin -gcc_modifier_darwin -gcc_modifier_aix5 -cc_load_tools -cc_check_features -gcc_modifier_debug -cc_add_flags -''' - -""" -If you want to remove the tests you do not want, use something like this: - -conf.check_tool('gcc', funs=''' -find_gcc -find_cpp -find_ar -gcc_common_flags -gcc_modifier_win32 -gcc_modifier_cygwin -gcc_modifier_darwin -gcc_modifier_aix5 -cc_add_flags -cc_load_tools -''' -)""" - -def set_options(opt): - try: - opt.add_option('-d', '--debug-level', - action = 'store', - default = ccroot.DEBUG_LEVELS.RELEASE, - help = "Specify the debug level, does nothing if CFLAGS is set in the environment. [Allowed Values: '%s']" % "', '".join(ccroot.DEBUG_LEVELS.ALL), - choices = ccroot.DEBUG_LEVELS.ALL, - dest = 'debug_level') - except optparse.OptionConflictError: - pass - - -conftest(find_gcc) -conftest(gcc_common_flags) -conftest(gcc_modifier_win32) -conftest(gcc_modifier_cygwin) -conftest(gcc_modifier_darwin) -conftest(gcc_modifier_aix5) -conftest(gcc_modifier_debug) diff -Nru a2jmidid-8~dfsg0/wafadmin/Tools/gdc.py a2jmidid-9/wafadmin/Tools/gdc.py --- a2jmidid-8~dfsg0/wafadmin/Tools/gdc.py 2008-07-21 22:39:47.000000000 +0000 +++ a2jmidid-9/wafadmin/Tools/gdc.py 1970-01-01 00:00:00.000000000 +0000 @@ -1,70 +0,0 @@ -#! /usr/bin/env python -# encoding: utf-8 - -#! /usr/bin/env python -# encoding: utf-8 -# Carlos Rafael Giani, 2007 (dv) - -import sys -import ar - -def find_gdc(conf): - v = conf.env - d_compiler = None - if v['D_COMPILER']: - d_compiler = v['D_COMPILER'] - if not d_compiler: d_compiler = conf.find_program('gdc', var='D_COMPILER') - if not d_compiler: return 0 - v['D_COMPILER'] = d_compiler - -def common_flags(conf): - v = conf.env - - # _DFLAGS _DIMPORTFLAGS _DLIBDIRFLAGS _DLIBFLAGS - - # for mory info about the meaning of this dict see dmd.py - v['DFLAGS'] = {'gdc':[], 'dmd':[]} - - v['D_SRC_F'] = '' - v['D_TGT_F'] = '-c -o ' - v['DPATH_ST'] = '-I%s' # template for adding import paths - - # linker - v['D_LINKER'] = v['D_COMPILER'] - v['DLNK_SRC_F'] = '' - v['DLNK_TGT_F'] = '-o ' - - v['DLIB_ST'] = '-l%s' # template for adding libs - v['DLIBPATH_ST'] = '-L%s' # template for adding libpaths - - # debug levels - v['DLINKFLAGS'] = [] - v['DFLAGS_OPTIMIZED'] = ['-O3'] - v['DFLAGS_DEBUG'] = ['-O0'] - v['DFLAGS_ULTRADEBUG'] = ['-O0'] - - v['D_shlib_DFLAGS'] = [] - v['D_shlib_LINKFLAGS'] = ['-shared'] - - v['DHEADER_ext'] = '.di' - v['D_HDR_F'] = '-fintfc -fintfc-file=' - - if sys.platform == "win32": - v['D_program_PATTERN'] = '%s.exe' - v['D_shlib_PATTERN'] = 'lib%s.dll' - v['D_staticlib_PATTERN'] = 'lib%s.a' - else: - v['D_program_PATTERN'] = '%s' - v['D_shlib_PATTERN'] = 'lib%s.so' - v['D_staticlib_PATTERN'] = 'lib%s.a' - -def detect(conf): - v = conf.env - find_gdc(conf) - ar.find_ar(conf) - conf.check_tool('d') - common_flags(conf) - -def set_options(opt): - pass - diff -Nru a2jmidid-8~dfsg0/wafadmin/Tools/gnome.py a2jmidid-9/wafadmin/Tools/gnome.py --- a2jmidid-8~dfsg0/wafadmin/Tools/gnome.py 2008-07-21 22:39:47.000000000 +0000 +++ a2jmidid-9/wafadmin/Tools/gnome.py 1970-01-01 00:00:00.000000000 +0000 @@ -1,419 +0,0 @@ -#! /usr/bin/env python -# encoding: utf-8 - -#! /usr/bin/env python -# encoding: utf-8 -# Thomas Nagy, 2006-2008 (ita) - -"Gnome support" - -import os, re -import TaskGen, Params, Common, Scan, Utils, Runner, Task -import cc -from Params import fatal, error -from TaskGen import taskgen, before, after, feature - -n1_regexp = re.compile('(.*)', re.M) -n2_regexp = re.compile('(.*)', re.M) - -def postinstall_schemas(prog_name): - if Params.g_commands['install']: - dir = Common.path_install('PREFIX', 'etc/gconf/schemas/%s.schemas' % prog_name) - if not Params.g_options.destdir: - # add the gconf schema - Params.pprint('YELLOW', "Installing GConf schema.") - command = 'gconftool-2 --install-schema-file=%s 1> /dev/null' % dir - ret = Runner.exec_command(command) - else: - Params.pprint('YELLOW', "GConf schema not installed. After install, run this:") - Params.pprint('YELLOW', "gconftool-2 --install-schema-file=%s" % dir) - -def postinstall_icons(): - dir = Common.path_install('DATADIR', 'icons/hicolor') - if Params.g_commands['install']: - if not Params.g_options.destdir: - # update the pixmap cache directory - Params.pprint('YELLOW', "Updating Gtk icon cache.") - command = 'gtk-update-icon-cache -q -f -t %s' % dir - ret = Runner.exec_command(command) - else: - Params.pprint('YELLOW', "Icon cache not updated. After install, run this:") - Params.pprint('YELLOW', "gtk-update-icon-cache -q -f -t %s" % dir) - -def postinstall_scrollkeeper(prog_name): - if Params.g_commands['install']: - # now the scrollkeeper update if we can write to the log file - try: - dir1 = Common.path_install('PREFIX', 'var/scrollkeeper') - dir2 = Common.path_install('DATADIR', 'omf/%s' % prog_name) - command = 'scrollkeeper-update -q -p %s -o %s' % (dir1, dir2) - ret = Runner.exec_command(command) - except: - pass - -def postinstall(prog_name='myapp', schemas=1, icons=1, scrollkeeper=1): - if schemas: postinstall_schemas(prog_name) - if icons: postinstall_icons() - if scrollkeeper: postinstall_scrollkeeper(prog_name) - -class gnome_doc_taskgen(TaskGen.task_gen): - def __init__(self, *k): - TaskGen.task_gen.__init__(self, *k) - self.inst_var_default = 'PREFIX' - self.inst_dir_default = 'share' - - def apply(self): - self.env['APPNAME'] = self.doc_module - lst = self.to_list(self.doc_linguas) - for x in lst: - tsk = self.create_task('xml2po', self.env) - node = self.path.find_resource(x+'/'+x+'.po') - src = self.path.find_resource('C/%s.xml' % self.doc_module) - out = self.path.find_build('%s/%s.xml' % (x, self.doc_module)) - tsk.set_inputs([node, src]) - tsk.set_outputs(out) - - tsk2 = self.create_task('xsltproc2po', self.env) - out2 = self.path.find_build('%s/%s-%s.omf' % (x, self.doc_module, x)) - tsk2.set_outputs(out2) - node = self.path.find_resource(self.doc_module+".omf.in") - tsk2.m_inputs = [node, out] - - tsk2.m_run_after.append(tsk) - - - if Params.g_install: - inst_dir = self.inst_dir + 'gnome/help/%s/%s' % (self.doc_module, x) - Common.install_files(self.inst_var, self.inst_dir + "omf", out2.abspath(self.env)) - for y in self.to_list(self.doc_figures): - try: - os.stat(self.path.abspath()+'/'+x+'/'+y) - Common.install_as(self.inst_var, inst_dir+'/'+y, self.path.abspath()+'/'+x+'/'+y) - except: - Common.install_as(self.inst_var, inst_dir+'/'+y, self.path.abspath()+'/C/'+y) - Common.install_as(self.inst_var, inst_dir + '/%s.xml' % self.doc_module, out.abspath(self.env)) - -# give specs -class xml_to_taskgen(TaskGen.task_gen): - def __init__(self): - TaskGen.task_gen(self) - self.source = 'xmlfile' - self.xslt = 'xlsltfile' - self.target = 'hey' - self.inst_var_default = 'PREFIX' - self.inst_dir_default = '' - self.task_created = None - def apply(self): - self.env = self.env.copy() - tree = Params.g_build - xmlfile = self.path.find_resource(self.source) - xsltfile = self.path.find_resource(self.xslt) - tsk = self.create_task('xmlto', self.env, 6) - tsk.set_inputs([xmlfile, xsltfile]) - tsk.set_outputs(xmlfile.change_ext('html')) - tsk.install = {'var':self.inst_var, 'dir':self.inst_dir} - -class sgml_man_scanner(Scan.scanner): - def __init__(self): - Scan.scanner.__init__(self) - def scan(self, task, node): - env = task.env() - variant = node.variant(env) - - fi = open(node.abspath(env), 'r') - content = fi.read() - fi.close() - - name = n1_regexp.findall(content)[0] - num = n2_regexp.findall(content)[0] - - doc_name = name+'.'+num - return ([], [doc_name]) - - def do_scan(self, task, node): - Scan.scanner.do_scan(self, task, node) - - variant = node.variant(task.env()) - tmp_lst = Params.g_build.raw_deps[variant][node.id] - name = tmp_lst[0] - task.set_outputs(task.task_generator.path.find_build(name)) - -sgml_scanner = sgml_man_scanner() - -class gnome_sgml2man_taskgen(TaskGen.task_gen): - def __init__(self, *k, **kw): - TaskGen.task_gen.__init__(self) - self.m_tasks = [] - self.m_appname = k[0] # the first argument is the appname - will disappear - def apply(self): - - def install_result(task): - out = task.m_outputs[0] - name = out.m_name - ext = name[-1] - env = task.env() - Common.install_files('DATADIR', 'man/man%s/' % ext, out.abspath(env), env) - - tree = Params.g_build - tree.rescan(self.path) - for name in Params.g_build.cache_dir_contents[self.path.id]: - base, ext = os.path.splitext(name) - if ext != '.sgml': continue - - task = self.create_task('sgml2man', self.env, 2) - task.set_inputs(self.path.find_resource(name)) - task.task_generator = self - if Params.g_install: task.install = install_results - # no outputs, the scanner does it - # no caching for now, this is not a time-critical feature - # in the future the scanner can be used to do more things (find dependencies, etc) - sgml_scanner.do_scan(task, task.m_inputs[0]) - -# Unlike the sgml and doc processing, the dbus and marshal beast -# generate c/c++ code that we want to mix -# here we attach new methods to TaskGen.task_gen - -def add_marshal_file(self, filename, prefix, mode): - if not hasattr(self, 'marshal_lst'): self.marshal_lst = [] - self.meths.add('process_marshal') - self.marshal_lst.append([filename, prefix, mode]) - -def process_marshal(self): - for i in getattr(self, 'marshal_lst', []): - env = self.env.copy() - node = self.path.find_resource(i[0]) - - if not node: - fatal('file not found on gnome obj '+i[0]) - - if i[2] == '--header': - - env['GGM_PREFIX'] = i[1] - env['GGM_MODE'] = i[2] - - task = self.create_task('glib_genmarshal', env, 2) - task.set_inputs(node) - task.set_outputs(node.change_ext('.h')) - - elif i[2] == '--body': - env['GGM_PREFIX'] = i[1] - env['GGM_MODE'] = i[2] - - # the c file generated will be processed too - outnode = node.change_ext('.c') - self.allnodes.append(outnode) - - task = self.create_task('glib_genmarshal', env, 2) - task.set_inputs(node) - task.set_outputs(node.change_ext('.c')) - else: - error("unknown type for marshal "+i[2]) - -def add_dbus_file(self, filename, prefix, mode): - if not hasattr(self, 'dbus_lst'): self.dbus_lst = [] - self.meths.add('process_dbus') - self.dbus_lst.append([filename, prefix, mode]) - -def process_dbus(self): - for i in getattr(self, 'dbus_lst', []): - env = self.env.copy() - node = self.path.find_resource(i[0]) - - if not node: - fatal('file not found on gnome obj '+i[0]) - - env['DBT_PREFIX'] = i[1] - env['DBT_MODE'] = i[2] - - task = self.create_task('dbus_binding_tool', env, 2) - task.set_inputs(node) - task.set_outputs(node.change_ext('.h')) - -def process_enums(self): - for x in getattr(self, 'mk_enums', []): - # temporary - env = self.env.copy() - task = self.create_task('mk_enums', env) - inputs = [] - - # process the source - src_lst = self.to_list(x['source']) - if not src_lst: - Params.fatal('missing source '+str(x)) - src_lst = [self.path.find_resource(k) for k in src_lst] - inputs += src_lst - env['MK_SOURCE'] = [k.abspath(env) for k in src_lst] - - # find the target - if not x['target']: - Params.fatal('missing target '+str(x)) - tgt_node = self.path.find_build(x['target']) - if tgt_node.m_name.endswith('.c'): - self.allnodes.append(tgt_node) - env['MK_TARGET'] = tgt_node.abspath(env) - - # template, if provided - if x['template']: - template_node = self.path.find_resource(x['template']) - env['MK_TEMPLATE'] = '--template %s' % (template_node.abspath(env)) - inputs.append(template_node) - - # update the task instance - task.set_inputs(inputs) - task.set_outputs(tgt_node) - -def add_glib_mkenum(self, source='', template='', target=''): - "just a helper" - if not hasattr(self, 'mk_enums'): self.mk_enums = [] - self.meths.add('process_enums') - self.mk_enums.append({'source':source, 'template':template, 'target':target}) - - -Task.simple_task_type('mk_enums', '${GLIB_MKENUM} ${MK_TEMPLATE} ${MK_SOURCE} > ${MK_TARGET}', 'PINK', prio=30) - -Task.simple_task_type('sgml2man', '${SGML2MAN} -o ${TGT[0].bld_dir(env)} ${SRC} > /dev/null', color='BLUE') - -Task.simple_task_type('glib_genmarshal', - '${GGM} ${SRC} --prefix=${GGM_PREFIX} ${GGM_MODE} > ${TGT}', - color='BLUE') - -Task.simple_task_type('dbus_binding_tool', - '${DBT} --prefix=${DBT_PREFIX} --mode=${DBT_MODE} --output=${TGT} ${SRC}', - color='BLUE') - -Task.simple_task_type('xmlto', '${XMLTO} html -m ${SRC[1].abspath(env)} ${SRC[0].abspath(env)}') - -Task.simple_task_type('xml2po', '${XML2PO} ${XML2POFLAGS} ${SRC} > ${TGT}', color='BLUE') - -# how do you expect someone to understand this?! -xslt_magic = """${XSLTPROC2PO} -o ${TGT[0].abspath(env)} \ ---stringparam db2omf.basename ${APPNAME} \ ---stringparam db2omf.format docbook \ ---stringparam db2omf.lang C \ ---stringparam db2omf.dtd '-//OASIS//DTD DocBook XML V4.3//EN' \ ---stringparam db2omf.omf_dir ${PREFIX}/share/omf \ ---stringparam db2omf.help_dir ${PREFIX}/share/gnome/help \ ---stringparam db2omf.omf_in ${SRC[0].abspath(env)} \ ---stringparam db2omf.scrollkeeper_cl ${SCROLLKEEPER_DATADIR}/Templates/C/scrollkeeper_cl.xml \ -${DB2OMF} ${SRC[1].abspath(env)}""" - -#--stringparam db2omf.dtd '-//OASIS//DTD DocBook XML V4.3//EN' \ -Task.simple_task_type('xsltproc2po', xslt_magic, color='BLUE') - -def detect(conf): - - conf.check_tool('checks') - - sgml2man = conf.find_program('docbook2man', var='SGML2MAN') - glib_genmarshal = conf.find_program('glib-genmarshal', var='GGM') - dbus_binding_tool = conf.find_program('dbus-binding-tool', var='DBT') - mk_enums_tool = conf.find_program('glib-mkenums', var='GLIB_MKENUM') - - def getstr(varname): - return getattr(Params.g_options, varname, '') - - prefix = conf.env['PREFIX'] - datadir = getstr('datadir') - libdir = getstr('libdir') - sysconfdir = getstr('sysconfdir') - localstatedir = getstr('localstatedir') - if not datadir: datadir = os.path.join(prefix,'share') - if not libdir: libdir = os.path.join(prefix,'lib') - if not sysconfdir: - if os.path.normpath(prefix) == '/usr': - sysconfdir = '/etc' - else: - sysconfdir = os.path.join(prefix, 'etc') - if not localstatedir: - if os.path.normpath(prefix) == '/usr': - localstatedir = '/var' - else: - localstatedir = os.path.join(prefix, 'var') - - # addefine also sets the variable to the env - conf.define('GNOMELOCALEDIR', os.path.join(datadir, 'locale')) - conf.define('DATADIR', datadir) - conf.define('LIBDIR', libdir) - conf.define('SYSCONFDIR', sysconfdir) - conf.define('LOCALSTATEDIR', localstatedir) - - xml2po = conf.find_program('xml2po', var='XML2PO') - xsltproc2po = conf.find_program('xsltproc', var='XSLTPROC2PO') - conf.env['XML2POFLAGS'] = '-e -p' - conf.env['SCROLLKEEPER_DATADIR'] = os.popen("scrollkeeper-config --pkgdatadir").read().strip() - conf.env['DB2OMF'] = os.popen("/usr/bin/pkg-config --variable db2omf gnome-doc-utils").read().strip() - - # TODO: maybe the following checks should be in a more generic module. - - #always defined to indicate that i18n is enabled */ - conf.define('ENABLE_NLS', 1) - - # TODO - #Define to 1 if you have the `bind_textdomain_codeset' function. - conf.define('HAVE_BIND_TEXTDOMAIN_CODESET', 1) - - # TODO - #Define to 1 if you have the `dcgettext' function. - conf.define('HAVE_DCGETTEXT', 1) - - #Define to 1 if you have the header file. - conf.check_header('dlfcn.h', 'HAVE_DLFCN_H') - - # TODO - #Define if the GNU gettext() function is already present or preinstalled. - conf.define('HAVE_GETTEXT', 1) - - #Define to 1 if you have the header file. - conf.check_header('inttypes.h', 'HAVE_INTTYPES_H') - - # TODO FIXME - #Define if your file defines LC_MESSAGES. - #conf.add_define('HAVE_LC_MESSAGES', '1') - - #Define to 1 if you have the header file. - conf.check_header('locale.h', 'HAVE_LOCALE_H') - - #Define to 1 if you have the header file. - conf.check_header('memory.h', 'HAVE_MEMORY_H') - - #Define to 1 if you have the header file. - conf.check_header('stdint.h', 'HAVE_STDINT_H') - - #Define to 1 if you have the header file. - conf.check_header('stdlib.h', 'HAVE_STDLIB_H') - - #Define to 1 if you have the header file. - conf.check_header('strings.h', 'HAVE_STRINGS_H') - - #Define to 1 if you have the header file. - conf.check_header('string.h', 'HAVE_STRING_H') - - #Define to 1 if you have the header file. - conf.check_header('sys/stat.h', 'HAVE_SYS_STAT_H') - - #Define to 1 if you have the header file. - conf.check_header('sys/types.h', 'HAVE_SYS_TYPES_H') - - #Define to 1 if you have the header file. - conf.check_header('unistd.h', 'HAVE_UNISTD_H') - -def set_options(opt): - try: - # we do not know yet - opt.add_option('--want-rpath', type='int', default=1, dest='want_rpath', help='set rpath to 1 or 0 [Default 1]') - except Exception: - pass - - for i in "execprefix datadir libdir sysconfdir localstatedir".split(): - opt.add_option('--'+i, type='string', default='', dest=i) - - -taskgen(add_marshal_file) -taskgen(process_marshal) -before('apply_core')(process_marshal) -taskgen(add_dbus_file) -taskgen(process_dbus) -before('apply_core')(process_dbus) -taskgen(process_enums) -before('apply_core')(process_enums) -taskgen(add_glib_mkenum) diff -Nru a2jmidid-8~dfsg0/wafadmin/Tools/gnu_dirs.py a2jmidid-9/wafadmin/Tools/gnu_dirs.py --- a2jmidid-8~dfsg0/wafadmin/Tools/gnu_dirs.py 2008-07-21 22:39:47.000000000 +0000 +++ a2jmidid-9/wafadmin/Tools/gnu_dirs.py 1970-01-01 00:00:00.000000000 +0000 @@ -1,167 +0,0 @@ -#! /usr/bin/env python -# encoding: utf-8 - -#! /usr/bin/env python -# encoding: utf-8 -# Ali Sabil, 2007 - -"""Add options for the standard GNU directories, this tool will add the options -found in autotools, and will update the environment with the following -installation variables: - - * PREFIX : architecture-independent files [/usr/local] - * EXEC_PREFIX : architecture-dependent files [PREFIX] - * BINDIR : user executables [EXEC_PREFIX/bin] - * SBINDIR : user executables [EXEC_PREFIX/sbin] - * LIBEXECDIR : program executables [EXEC_PREFIX/libexec] - * SYSCONFDIR : read-only single-machine data [PREFIX/etc] - * SHAREDSTATEDIR : modifiable architecture-independent data [PREFIX/com] - * LOCALSTATEDIR : modifiable single-machine data [PREFIX/var] - * LIBDIR : object code libraries [EXEC_PREFIX/lib] - * INCLUDEDIR : C header files [PREFIX/include] - * OLDINCLUDEDIR : C header files for non-gcc [/usr/include] - * DATAROOTDIR : read-only arch.-independent data root [PREFIX/share] - * DATADIR : read-only architecture-independent data [DATAROOTDIR] - * INFODIR : info documentation [DATAROOTDIR/info] - * LOCALEDIR : locale-dependent data [DATAROOTDIR/locale] - * MANDIR : man documentation [DATAROOTDIR/man] - * DOCDIR : documentation root [DATAROOTDIR/doc/telepathy-glib] - * HTMLDIR : html documentation [DOCDIR] - * DVIDIR : dvi documentation [DOCDIR] - * PDFDIR : pdf documentation [DOCDIR] - * PSDIR : ps documentation [DOCDIR] -""" - -import os.path, re -import Params, Utils - -APPNAME = Utils.g_module.APPNAME -VERSION = Utils.g_module.VERSION - -_options = ( - ('bindir', 'user executables', '$(EXEC_PREFIX)/bin'), - ('sbindir', 'system admin executables', '$(EXEC_PREFIX)/sbin'), - ('libexecdir', 'program executables', '$(EXEC_PREFIX)/libexec'), - ('sysconfdir', 'read-only single-machine data', '$(PREFIX)/etc'), - ('sharedstatedir', 'modifiable architecture-independent data', '$(PREFIX)/com'), - ('localstatedir', 'modifiable single-machine data', '$(PREFIX)/var'), - ('libdir', 'object code libraries', '$(EXEC_PREFIX)/lib'), - ('includedir', 'C header files', '$(PREFIX)/include'), - ('oldincludedir', 'C header files for non-gcc', '/usr/include'), - ('datarootdir', 'read-only arch.-independent data root', '$(PREFIX)/share'), - ('datadir', 'read-only architecture-independent data', '$(DATAROOTDIR)'), - ('infodir', 'info documentation', '$(DATAROOTDIR)/info'), - ('localedir', 'locale-dependent data', '$(DATAROOTDIR)/locale'), - ('mandir', 'man documentation', '$(DATAROOTDIR)/man'), - ('docdir', 'documentation root', '$(DATAROOTDIR)/doc/$(PACKAGE)'), - ('htmldir', 'html documentation', '$(DOCDIR)'), - ('dvidir', 'dvi documentation', '$(DOCDIR)'), - ('pdfdir', 'pdf documentation', '$(DOCDIR)'), - ('psdir', 'ps documentation', '$(DOCDIR)'), -) - -_varprog = re.compile(r'\$(\w+|\([^)]*\))') -def _substitute_vars(path, vars): - """Substitute variables in a path""" - if '$' not in path: - return path, 0 - - i = 0 - unresolved_count = 0 - while True: - m = _varprog.search(path, i) - if m: - i, j = m.span(0) - name = m.group(1) - if name[:1] == '(' and name[-1:] == ')': - name = name[1:-1] - if name in vars: - tail = path[j:] - path = path[:i] + vars[name] - i = len(path) - path = path + tail - else: - i = j - unresolved_count += 1 - else: - break - return path, unresolved_count - -def detect(conf): - global _options, APPNAME, VERSION - - def get_param(varname): - return getattr(Params.g_options, varname, '') - - conf.env['PREFIX'] = os.path.abspath(conf.env['PREFIX']) - prefix = conf.env['PREFIX'] - - eprefix = get_param('EXEC_PREFIX') - if not eprefix: - eprefix = prefix - conf.env['EXEC_PREFIX'] = eprefix - - resolved_dirs_dict = {'PREFIX' : prefix, 'EXEC_PREFIX': eprefix, - 'APPNAME' : APPNAME, 'PACKAGE': APPNAME, 'VERSION' : VERSION} - unresolved_dirs_dict = {} - for name, help, default in _options: - name = name.upper() - value = get_param(name) - if value: - resolved_dirs_dict[name] = value - else: - unresolved_dirs_dict[name] = default - - # Resolve cross references between the variables, expanding everything - while len(unresolved_dirs_dict) > 0: - for name in unresolved_dirs_dict.keys(): - unresolved_path = unresolved_dirs_dict[name] - path, count = _substitute_vars(unresolved_path, resolved_dirs_dict) - if count == 0: - resolved_dirs_dict[name] = path - del unresolved_dirs_dict[name] - else: - unresolved_dirs_dict[name] = path - - del resolved_dirs_dict['APPNAME'] - del resolved_dirs_dict['PACKAGE'] - del resolved_dirs_dict['VERSION'] - for name, value in resolved_dirs_dict.iteritems(): - conf.env[name] = value - -def set_options(opt): - - # copied from multisync-gui-0.2X wscript - inst_dir = opt.add_option_group("Installation directories", - 'By default, waf install will install all the files in\ - "/usr/local/bin", "/usr/local/lib" etc. An installation prefix other\ - than "/usr/local" can be given using "--prefix",\ - for instance "--prefix=$HOME"') - - #just do some cleanups in the option list - try: - prefix_option = opt.parser.get_option("--prefix") - opt.parser.remove_option("--prefix") - destdir_option = opt.parser.get_option("--destdir") - opt.parser.remove_option("--destdir") - inst_dir.add_option(prefix_option) - inst_dir.add_option(destdir_option) - except: - pass - # end copy - - inst_dir.add_option('--exec-prefix', - help="installation prefix [Default: %s]" % 'PREFIX', - default='', - dest='EXEC_PREFIX') - - dirs_options = opt.add_option_group("Fine tuning of the installation directories", '') - - global _options - for name, help, default in _options: - option_name = '--' + name - str_default = default.replace('$(', '').replace(')', '') - str_help = '%s [Default: %s]' % (help, str_default) - dirs_options.add_option(option_name, help=str_help, default='', dest=name.upper()) - - diff -Nru a2jmidid-8~dfsg0/wafadmin/Tools/gob2.py a2jmidid-9/wafadmin/Tools/gob2.py --- a2jmidid-8~dfsg0/wafadmin/Tools/gob2.py 2008-07-21 22:39:47.000000000 +0000 +++ a2jmidid-9/wafadmin/Tools/gob2.py 1970-01-01 00:00:00.000000000 +0000 @@ -1,23 +0,0 @@ -#! /usr/bin/env python -# encoding: utf-8 - -#! /usr/bin/env python -# encoding: utf-8 -# Ali Sabil, 2007 - -import TaskGen - -TaskGen.declare_chain( - name = 'gob2', - action = '${GOB2} -o ${TGT[0].bld_dir(env)} ${GOB2FLAGS} ${SRC}', - ext_in = '.gob', - ext_out = '.c' -) - -def detect(conf): - gob2 = conf.find_program('gob2', var='GOB2') - if not gob2: conf.fatal('could not find the gob2 compiler') - conf.env['GOB2'] = gob2 - conf.env['GOB2FLAGS'] = '' - - diff -Nru a2jmidid-8~dfsg0/wafadmin/Tools/g++.py a2jmidid-9/wafadmin/Tools/g++.py --- a2jmidid-8~dfsg0/wafadmin/Tools/g++.py 2008-07-21 23:09:49.000000000 +0000 +++ a2jmidid-9/wafadmin/Tools/g++.py 1970-01-01 00:00:00.000000000 +0000 @@ -1,162 +0,0 @@ -#! /usr/bin/env python -# encoding: utf-8 - -#! /usr/bin/env python -# encoding: utf-8 -# Thomas Nagy, 2006 (ita) -# Ralf Habacker, 2006 (rh) - -import os, optparse, sys, re -import Params, Configure -import ccroot, ar -from Configure import conftest - - -def find_gxx(conf): - v = conf.env - cc = None - if v['CXX']: cc = v['CXX'] - elif 'CXX' in os.environ: cc = os.environ['CXX'] - if not cc: cc = conf.find_program('g++', var='CXX') - if not cc: cc = conf.find_program('c++', var='CXX') - if not cc: conf.fatal('g++ was not found') - v['CXX'] = cc - v['CXX_NAME'] = 'gcc' - ccroot.get_cc_version(conf, cc, 'CXX_VERSION') - -def gxx_common_flags(conf): - v = conf.env - - # CPPFLAGS CXXDEFINES _CXXINCFLAGS _CXXDEFFLAGS _LIBDIRFLAGS _LIBFLAGS - - v['CXX_SRC_F'] = '' - v['CXX_TGT_F'] = '-c -o ' - v['CPPPATH_ST'] = '-I%s' # template for adding include paths - - # linker - if not v['LINK_CXX']: v['LINK_CXX'] = v['CXX'] - v['CXXLNK_SRC_F'] = '' - v['CXXLNK_TGT_F'] = '-o ' - - v['LIB_ST'] = '-l%s' # template for adding libs - v['LIBPATH_ST'] = '-L%s' # template for adding libpaths - v['STATICLIB_ST'] = '-l%s' - v['STATICLIBPATH_ST'] = '-L%s' - v['CXXDEFINES_ST'] = '-D%s' - - v['SHLIB_MARKER'] = '-Wl,-Bdynamic' - v['STATICLIB_MARKER'] = '-Wl,-Bstatic' - v['FULLSTATIC_MARKER'] = '-static' - - # program - v['program_PATTERN'] = '%s' - - # shared library - v['shlib_CXXFLAGS'] = ['-fPIC', '-DPIC'] - v['shlib_LINKFLAGS'] = ['-shared'] - v['shlib_PATTERN'] = 'lib%s.so' - - # static lib - v['staticlib_LINKFLAGS'] = ['-Wl,-Bstatic'] - v['staticlib_PATTERN'] = 'lib%s.a' - - # osx stuff - v['MACBUNDLE_LINKFLAGS'] = ['-bundle', '-undefined dynamic_lookup'] - v['MACBUNDLE_CCFLAGS'] = ['-fPIC'] - v['MACBUNDLE_PATTERN'] = '%s.bundle' - -def gxx_modifier_win32(conf): - if sys.platform != 'win32': return - v = conf.env - v['program_PATTERN'] = '%s.exe' - - v['shlib_PATTERN'] = 'lib%s.dll' - v['shlib_CXXFLAGS'] = [''] - - v['staticlib_LINKFLAGS'] = [''] - -def gxx_modifier_cygwin(conf): - if sys.platform != 'cygwin': return - v = conf.env - v['program_PATTERN'] = '%s.exe' - - v['shlib_PATTERN'] = 'lib%s.dll' - v['shlib_CXXFLAGS'] = [''] - - v['staticlib_LINKFLAGS'] = [''] - -def gxx_modifier_darwin(conf): - if sys.platform != 'darwin': return - v = conf.env - v['shlib_CXXFLAGS'] = ['-fPIC'] - v['shlib_LINKFLAGS'] = ['-dynamiclib'] - v['shlib_PATTERN'] = 'lib%s.dylib' - - v['staticlib_LINKFLAGS'] = [''] - - v['SHLIB_MARKER'] = '' - v['STATICLIB_MARKER'] = '' - -def gxx_modifier_aix5(conf): - if sys.platform != 'aix5': return - v = conf.env - v['program_LINKFLAGS'] = ['-Wl,-brtl'] - - v['shlib_LINKFLAGS'] = ['-shared','-Wl,-brtl,-bexpfull'] - - v['SHLIB_MARKER'] = '' - -def gxx_modifier_debug(conf, kind='cpp'): - v = conf.env - # compiler debug levels - if conf.check_flags('-O2 -DNDEBUG', kind=kind): - v['CXXFLAGS_OPTIMIZED'] = ['-O2', '-DNDEBUG'] - v['CXXFLAGS_RELEASE'] = ['-O2', '-DNDEBUG'] - if conf.check_flags('-g -DDEBUG', kind=kind): - v['CXXFLAGS_DEBUG'] = ['-g', '-DDEBUG'] - v['LINKFLAGS_DEBUG'] = ['-g'] - if conf.check_flags('-g3 -O0 -DDEBUG', kind=kind): - v['CXXFLAGS_ULTRADEBUG'] = ['-g3', '-O0', '-DDEBUG'] - v['LINKFLAGS_ULTRADEBUG'] = ['-g'] - if conf.check_flags('-Wall', kind=kind): - for x in 'OPTIMIZED RELEASE DEBUG ULTRADEBUG'.split(): v.append_unique('CXXFLAGS_'+x, '-Wall') - try: - debug_level = Params.g_options.debug_level.upper() - except AttributeError: - debug_level = ccroot.DEBUG_LEVELS.CUSTOM - v.append_value('CXXFLAGS', v['CXXFLAGS_'+debug_level]) - v.append_value('LINKFLAGS', v['LINKFLAGS_'+debug_level]) - -detect = ''' -find_gxx -find_cpp -find_ar -gxx_common_flags -gxx_modifier_win32 -gxx_modifier_cygwin -gxx_modifier_darwin -gxx_modifier_aix5 -cxx_load_tools -cxx_check_features -gxx_modifier_debug -cxx_add_flags -''' - -def set_options(opt): - try: - opt.add_option('-d', '--debug-level', - action = 'store', - default = ccroot.DEBUG_LEVELS.RELEASE, - help = "Specify the debug level, does nothing if CXXFLAGS is set in the environment. [Allowed Values: '%s']" % "', '".join(ccroot.DEBUG_LEVELS.ALL), - choices = ccroot.DEBUG_LEVELS.ALL, - dest = 'debug_level') - except optparse.OptionConflictError: - pass - -conftest(find_gxx) -conftest(gxx_common_flags) -conftest(gxx_modifier_win32) -conftest(gxx_modifier_cygwin) -conftest(gxx_modifier_darwin) -conftest(gxx_modifier_aix5) -conftest(gxx_modifier_debug) diff -Nru a2jmidid-8~dfsg0/wafadmin/Tools/__init__.py a2jmidid-9/wafadmin/Tools/__init__.py --- a2jmidid-8~dfsg0/wafadmin/Tools/__init__.py 2008-07-21 22:39:47.000000000 +0000 +++ a2jmidid-9/wafadmin/Tools/__init__.py 1970-01-01 00:00:00.000000000 +0000 @@ -1,8 +0,0 @@ -#! /usr/bin/env python -# encoding: utf-8 - -#! /usr/bin/env python -# encoding: utf-8 -# Thomas Nagy, 2006 (ita) - - diff -Nru a2jmidid-8~dfsg0/wafadmin/Tools/intltool.py a2jmidid-9/wafadmin/Tools/intltool.py --- a2jmidid-8~dfsg0/wafadmin/Tools/intltool.py 2008-07-21 22:39:47.000000000 +0000 +++ a2jmidid-9/wafadmin/Tools/intltool.py 1970-01-01 00:00:00.000000000 +0000 @@ -1,124 +0,0 @@ -#! /usr/bin/env python -# encoding: utf-8 - -#! /usr/bin/env python -# encoding: utf-8 -# Thomas Nagy, 2006 (ita) - -"intltool support" - -import os, re -import TaskGen, Task, Params, Common, Scan, Utils, Runner -import cc -from Params import fatal, error - -# intltool -class intltool_in_taskgen(TaskGen.task_gen): - def __init__(self, *k): - TaskGen.task_gen.__init__(self, *k) - self.source = '' - self.inst_var = '' - self.inst_dir = '' - self.flags = '' - self.podir = 'po' - self.intlcache = '.intlcache' - self.m_tasks = [] - - def apply(self): - self.env = self.env.copy() - tree = Params.g_build - for i in self.to_list(self.source): - node = self.path.find_resource(i) - - podirnode = self.path.find_dir(self.podir) - - self.env['INTLCACHE'] = os.path.join(self.path.bldpath(self.env), self.podir, self.intlcache) - self.env['INTLPODIR'] = podirnode.srcpath(self.env) - self.env['INTLFLAGS'] = self.flags - - task = self.create_task('intltool', self.env) - task.set_inputs(node) - task.set_outputs(node.change_ext('')) - - task.install = {'var': self.inst_var, 'dir': self.inst_dir, 'chmod': 0644} - -class intltool_po_taskgen(TaskGen.task_gen): - def __init__(self, *k, **kw): - TaskGen.task_gen.__init__(self, *k) - self.chmod = 0644 - self.inst_var_default = 'LOCALEDIR' - self.appname = kw.get('appname', 'set_your_app_name') - self.podir = '' - self.m_tasks=[] - - def apply(self): - def install_translation(task): - out = task.m_outputs[0] - filename = out.m_name - (langname, ext) = os.path.splitext(filename) - inst_file = langname + os.sep + 'LC_MESSAGES' + os.sep + self.appname + '.mo' - Common.install_as(self.inst_var, inst_file, out.abspath(self.env), chmod=self.chmod) - - linguas = self.path.find_resource(os.path.join(self.podir, 'LINGUAS')) - if linguas: - # scan LINGUAS file for locales to process - file = open(linguas.abspath()) - langs = [] - for line in file.readlines(): - # ignore lines containing comments - if not line.startswith('#'): - langs += line.split() - file.close() - re_linguas = re.compile('[-a-zA-Z_@.]+') - for lang in langs: - # Make sure that we only process lines which contain locales - if re_linguas.match(lang): - node = self.path.find_resource(os.path.join(self.podir, re_linguas.match(lang).group() + '.po')) - task = self.create_task('po', self.env) - task.set_inputs(node) - task.set_outputs(node.change_ext('.mo')) - if Params.g_install: task.install = install_translation - else: - Params.pprint('RED', "Error no LINGUAS file found in po directory") - -Task.simple_task_type('po', '${POCOM} -o ${TGT} ${SRC}', color='BLUE', prio=10) -Task.simple_task_type('intltool', - '${INTLTOOL} ${INTLFLAGS} -q -u -c ${INTLCACHE} ${INTLPODIR} ${SRC} ${TGT}', - color='BLUE', prio=200) - -def detect(conf): - - conf.check_tool('checks') - - pocom = conf.find_program('msgfmt') - #if not pocom: - # fatal('The program msgfmt (gettext) is mandatory!') - conf.env['POCOM'] = pocom - - intltool = conf.find_program('intltool-merge') - #if not intltool: - # fatal('The program intltool-merge (intltool, gettext-devel) is mandatory!') - conf.env['INTLTOOL'] = intltool - - def getstr(varname): - return getattr(Params.g_options, varname, '') - - prefix = conf.env['PREFIX'] - datadir = getstr('datadir') - if not datadir: datadir = os.path.join(prefix,'share') - - conf.define('LOCALEDIR', os.path.join(datadir, 'locale')) - conf.define('DATADIR', datadir) - - #Define to 1 if you have the header file. - conf.check_header('locale.h', 'HAVE_LOCALE_H') - -def set_options(opt): - try: - opt.add_option('--want-rpath', type='int', default=1, dest='want_rpath', help='set rpath to 1 or 0 [Default 1]') - except Exception: - pass - - opt.add_option('--datadir', type='string', default='', dest='datadir', help='read-only application data') - - diff -Nru a2jmidid-8~dfsg0/wafadmin/Tools/java.py a2jmidid-9/wafadmin/Tools/java.py --- a2jmidid-8~dfsg0/wafadmin/Tools/java.py 2008-07-21 22:39:47.000000000 +0000 +++ a2jmidid-9/wafadmin/Tools/java.py 1970-01-01 00:00:00.000000000 +0000 @@ -1,174 +0,0 @@ -#! /usr/bin/env python -# encoding: utf-8 - -#! /usr/bin/env python -# encoding: utf-8 -# Thomas Nagy, 2006-2008 (ita) - -""" -Java support - -Javac is one of the few compilers that behaves very badly: -* it outputs files where it wants to (-d is only for the package root) -* it recompiles files silently behind your back -* it outputs an undefined amount of files (inner classes) - -Fortunately, the convention makes it possible to use th build dir without -too many problems for the moment - -Inner classes must be located and cleaned when a problem arise, -for the moment waf does not track the production of inner classes. - -Adding all the files to a task and executing it if any of the input files -change is only annoying for the compilation times -""" - -import os, re -import TaskGen, Task, Utils, Params - -class java_taskgen(TaskGen.task_gen): - s_default_ext = ['.java'] - def __init__(self, *k): - TaskGen.task_gen.__init__(self, *k) - - self.jarname = '' - self.jaropts = '' - self.classpath = '' - self.source_root = '.' - - # Jar manifest attributes - # TODO: Add manifest creation - self.jar_mf_attributes = {} - self.jar_mf_classpath = [] - - def apply(self): - nodes_lst = [] - - if not self.classpath: - if not self.env['CLASSPATH']: - self.env['CLASSPATH'] = '..' + os.pathsep + '.' - else: - self.env['CLASSPATH'] = self.classpath - - find_resource_lst = self.path.find_resource_lst - - re_foo = re.compile(self.source) - - source_root_node = self.path.find_dir(self.source_root) - - src_nodes = [] - bld_nodes = [] - - prefix_path = source_root_node.abspath() - for (root, dirs, filenames) in os.walk(source_root_node.abspath()): - for x in filenames: - file = root + '/' + x - file = file.replace(prefix_path, '') - if file.startswith('/'): - file = file[1:] - - if re_foo.search(file) > -1: - node = source_root_node.find_resource(file) - src_nodes.append(node) - - node2 = node.change_ext(".class") - bld_nodes.append(node2) - - self.env['OUTDIR'] = source_root_node.abspath(self.env) - - tsk = self.create_task('javac', self.env) - tsk.set_inputs(src_nodes) - tsk.set_outputs(bld_nodes) - - if self.jarname: - tsk = self.create_task('jar_create', self.env) - tsk.set_inputs(bld_nodes) - tsk.set_outputs(self.path.find_build_lst(Utils.split_path(self.jarname))) - - if not self.env['JAROPTS']: - if self.jaropts: - self.env['JAROPTS'] = self.jaropts - else: - dirs = '/' - self.env['JAROPTS'] = '-C %s %s' % (self.env['OUTDIR'], dirs) - -Task.simple_task_type('javac', '${JAVAC} -classpath ${CLASSPATH} -d ${OUTDIR} ${SRC}', color='BLUE', prio=10) -Task.simple_task_type('jar_create', '${JAR} ${JARCREATE} ${TGT} ${JAROPTS}', color='GREEN', prio=50) - -def detect(conf): - # If JAVA_PATH is set, we prepend it to the path list - java_path = os.environ['PATH'].split(os.pathsep) - v = conf.env - - if os.environ.has_key('JAVA_HOME'): - java_path = [os.path.join(os.environ['JAVA_HOME'], 'bin')] + java_path - conf.env['JAVA_HOME'] = os.environ['JAVA_HOME'] - - conf.find_program('javac', var='JAVAC', path_list=java_path) - conf.find_program('java', var='JAVA', path_list=java_path) - conf.find_program('jar', var='JAR', path_list=java_path) - v['JAVA_EXT'] = ['.java'] - - if os.environ.has_key('CLASSPATH'): - v['CLASSPATH'] = os.environ['CLASSPATH'] - - if not v['JAR']: conf.fatal('jar is required for making java packages') - if not v['JAVAC']: conf.fatal('javac is required for compiling java classes') - v['JARCREATE'] = 'cf' # can use cvf - - conf.hook(check_java_class) - -def check_java_class(conf, classname, with_classpath=None): - """ - Check if specified java class is installed. - """ - - class_check_source = """ -public class Test { - public static void main(String[] argv) { - Class lib; - if (argv.length < 1) { - System.err.println("Missing argument"); - System.exit(77); - } - try { - lib = Class.forName(argv[0]); - } catch (ClassNotFoundException e) { - System.err.println("ClassNotFoundException"); - System.exit(1); - } - lib = null; - System.exit(0); - } -} -""" - import shutil - - javatestdir = '.waf-javatest' - - classpath = javatestdir - if conf.env['CLASSPATH']: - classpath += os.pathsep + conf.env['CLASSPATH'] - if isinstance(with_classpath, str): - classpath += os.pathsep + with_classpath - - shutil.rmtree(javatestdir, True) - os.mkdir(javatestdir) - - java_file = open(os.path.join(javatestdir, 'Test.java'), 'w') - java_file.write(class_check_source) - java_file.close() - - # Compile the source - os.popen(conf.env['JAVAC'] + ' ' + os.path.join(javatestdir, 'Test.java')) - - (jstdin, jstdout, jstderr) = os.popen3(conf.env['JAVA'] + ' -cp ' + classpath + ' Test ' + classname) - - found = not bool(jstderr.read()) - conf.check_message('Java class %s' % classname, "", found) - - shutil.rmtree(javatestdir, True) - - return found - - diff -Nru a2jmidid-8~dfsg0/wafadmin/Tools/kde4.py a2jmidid-9/wafadmin/Tools/kde4.py --- a2jmidid-8~dfsg0/wafadmin/Tools/kde4.py 2008-07-21 22:39:47.000000000 +0000 +++ a2jmidid-9/wafadmin/Tools/kde4.py 1970-01-01 00:00:00.000000000 +0000 @@ -1,77 +0,0 @@ -#! /usr/bin/env python -# encoding: utf-8 - -#! /usr/bin/env python -# encoding: utf-8 -# Thomas Nagy, 2006 (ita) - -import os, sys, re, TaskGen, Task, Utils, Common - -class msgfmt_taskgen(TaskGen.task_gen): - def __init__(self, appname='set_your_app_name'): - TaskGen.task_gen.__init__(self) - self.langs = '' # for example "foo/fr foo/br" - self.chmod = 0644 - self.inst_var_default = 'KDE4_LOCALE_INSTALL_DIR' - self.appname = appname - - def apply(self): - - for lang in self.to_list(self.langs): - node = self.path.find_resource_lst(Utils.split_path(lang+'.po')) - task = self.create_task('msgfmt', self.env) - task.set_inputs(node) - task.set_outputs(node.change_ext('.mo')) - - if not Params.g_install: continue - langname = lang.split('/') - langname = langname[-1] - inst_dir = langname+os.sep+'LC_MESSAGES' - task.install = {'var':self.inst_var,'dir':inst_dir+'/','as':self.appname+'.mo','chmod':self.chmod} - -def detect(conf): - kdeconfig = conf.find_program('kde4-config') - if not kdeconfig: - conf.fatal('we need kde4-config') - prefix = os.popen('%s --prefix' % kdeconfig).read().strip() - file = '%s/share/apps/cmake/modules/KDELibsDependencies.cmake' % prefix - try: os.stat(file) - except OSError: - file = '%s/share/apps/cmake/modules/KDELibsDependencies.cmake' % prefix - try: os.stat(file) - except: conf.fatal('could not open %s' % file) - - try: - f = open(file, 'r') - txt = f.read() - f.close() - except (OSError, IOError): - conf.fatal('could not read %s' % file) - - txt = txt.replace('\\\n', '\n') - fu = re.compile('#(.*)\n') - txt = fu.sub('', txt) - - setregexp = re.compile('([sS][eE][tT]\s*\()\s*([^\s]+)\s+\"([^"]+)\"\)') - found = setregexp.findall(txt) - - for (_, key, val) in found: - #print key, val - conf.env[key] = val - - # well well, i could just write an interpreter for cmake files - conf.env['LIB_KDECORE']='kdecore' - conf.env['LIB_KDEUI'] ='kdeui' - conf.env['LIB_KIO'] ='kio' - conf.env['LIB_KHTML'] ='khtml' - conf.env['LIB_KPARTS'] ='kparts' - - conf.env['LIBPATH_KDECORE'] = conf.env['KDE4_LIB_INSTALL_DIR'] - conf.env['CPPPATH_KDECORE'] = conf.env['KDE4_INCLUDE_INSTALL_DIR'] - conf.env.append_value('CPPPATH_KDECORE', conf.env['KDE4_INCLUDE_INSTALL_DIR']+"/KDE") - - conf.env['MSGFMT'] = conf.find_program('msgfmt') - -Task.simple_task_type('msgfmt', '${MSGFMT} ${SRC} -o ${TGT}', color='BLUE', prio=10) - - diff -Nru a2jmidid-8~dfsg0/wafadmin/Tools/libtool.py a2jmidid-9/wafadmin/Tools/libtool.py --- a2jmidid-8~dfsg0/wafadmin/Tools/libtool.py 2008-07-21 22:39:47.000000000 +0000 +++ a2jmidid-9/wafadmin/Tools/libtool.py 1970-01-01 00:00:00.000000000 +0000 @@ -1,344 +0,0 @@ -#! /usr/bin/env python -# encoding: utf-8 - -#! /usr/bin/env python -# encoding: utf-8 -# Matthias Jahn, 2008, jahn matthias ath freenet punto de -# Thomas Nagy, 2008 (ita) - -import sys, re, os, optparse - -import TaskGen, Task, Params, Scan, Common, Utils, preproc -from Params import error, debug, fatal, warning -from TaskGen import taskgen, after, before, feature - -REVISION="0.1.3" - -""" -if you want to use the code here, you must use something like this: -obj = obj.create(...) -obj.features.append("libtool") -obj.vnum = "1.2.3" # optional, but versioned libraries are common -""" - -# fake libtool files -fakelibtool_vardeps = ['CXX', 'PREFIX'] -def fakelibtool_build(task): - # Writes a .la file, used by libtool - env = task.env() - dest = open(task.m_outputs[0].abspath(env), 'w') - sname = task.m_inputs[0].m_name - fu = dest.write - fu("# Generated by ltmain.sh - GNU libtool 1.5.18 - (pwn3d by BKsys II code name WAF)\n") - if env['vnum']: - nums = env['vnum'].split('.') - libname = task.m_inputs[0].m_name - name3 = libname+'.'+env['vnum'] - name2 = libname+'.'+nums[0] - name1 = libname - fu("dlname='%s'\n" % name2) - strn = " ".join([name3, name2, name1]) - fu("library_names='%s'\n" % (strn) ) - else: - fu("dlname='%s'\n" % sname) - fu("library_names='%s %s %s'\n" % (sname, sname, sname) ) - fu("old_library=''\n") - vars = ' '.join(env['libtoolvars']+env['LINKFLAGS']) - fu("dependency_libs='%s'\n" % vars) - fu("current=0\n") - fu("age=0\nrevision=0\ninstalled=yes\nshouldnotlink=no\n") - fu("dlopen=''\ndlpreopen=''\n") - fu("libdir='%s/lib'\n" % env['PREFIX']) - dest.close() - return 0 - -def read_la_file(path): - sp = re.compile(r'^([^=]+)=\'(.*)\'$') - dc={} - file = open(path, "r") - for line in file.readlines(): - try: - #print sp.split(line.strip()) - _, left, right, _ = sp.split(line.strip()) - dc[left]=right - except ValueError: - pass - file.close() - return dc - -def apply_link_libtool(self): - if self.m_type != 'program': - linktask = self.link_task - latask = self.create_task('fakelibtool', self.env) - latask.set_inputs(linktask.m_outputs) - latask.set_outputs(linktask.m_outputs[0].change_ext('.la')) - self.m_latask = latask - - if not (Params.g_commands['install'] or Params.g_commands['uninstall']): return - self.install_results(dest_var, dest_subdir, self.m_latask) - -def apply_libtool(self): - self.env['vnum']=self.vnum - - paths=[] - libs=[] - libtool_files=[] - libtool_vars=[] - - for l in self.env['LINKFLAGS']: - if l[:2]=='-L': - paths.append(l[2:]) - elif l[:2]=='-l': - libs.append(l[2:]) - - for l in libs: - for p in paths: - dict = read_la_file(p+'/lib'+l+'.la') - linkflags2 = dict.get('dependency_libs', '') - for v in linkflags2.split(): - if v.endswith('.la'): - libtool_files.append(v) - libtool_vars.append(v) - continue - self.env.append_unique('LINKFLAGS', v) - break - - self.env['libtoolvars']=libtool_vars - - while libtool_files: - file = libtool_files.pop() - dict = read_la_file(file) - for v in dict['dependency_libs'].split(): - if v[-3:] == '.la': - libtool_files.append(v) - continue - self.env.append_unique('LINKFLAGS', v) - -Task.task_type_from_func('fakelibtool', vars=fakelibtool_vardeps, func=fakelibtool_build, color='BLUE', prio=200) - -class libtool_la_file: - def __init__ (self, la_filename): - self.__la_filename = la_filename - #remove path and .la suffix - self.linkname = str(os.path.split(la_filename)[-1])[:-3] - if self.linkname.startswith("lib"): - self.linkname = self.linkname[3:] - # The name that we can dlopen(3). - self.dlname = None - # Names of this library - self.library_names = None - # The name of the static archive. - self.old_library = None - # Libraries that this one depends upon. - self.dependency_libs = None - # Version information for libIlmImf. - self.current = None - self.age = None - self.revision = None - # Is this an already installed library? - self.installed = None - # Should we warn about portability when linking against -modules? - self.shouldnotlink = None - # Files to dlopen/dlpreopen - self.dlopen = None - self.dlpreopen = None - # Directory that this library needs to be installed in: - self.libdir = '/usr/lib' - if not self.__parse(): - raise "file %s not found!!" %(la_filename) - - def __parse(self): - "Retrieve the variables from a file" - if not os.path.isfile(self.__la_filename): return 0 - la_file=open(self.__la_filename, 'r') - for line in la_file: - ln = line.strip() - if not ln: continue - if ln[0]=='#': continue - (key, value) = str(ln).split('=', 1) - key = key.strip() - value = value.strip() - if value == "no": value = False - elif value == "yes": value = True - else: - try: value = int(value) - except ValueError: value = value.strip("'") - setattr(self, key, value) - la_file.close() - return 1 - - def get_libs(self): - """return linkflags for this lib""" - libs = [] - if self.dependency_libs: - libs = str(self.dependency_libs).strip().split() - if libs == None: - libs = [] - # add la lib and libdir - libs.insert(0, "-l%s" % self.linkname.strip()) - libs.insert(0, "-L%s" % self.libdir.strip()) - return libs - - def __str__(self): - return '''\ -dlname = "%(dlname)s" -library_names = "%(library_names)s" -old_library = "%(old_library)s" -dependency_libs = "%(dependency_libs)s" -version = %(current)s.%(age)s.%(revision)s -installed = "%(installed)s" -shouldnotlink = "%(shouldnotlink)s" -dlopen = "%(dlopen)s" -dlpreopen = "%(dlpreopen)s" -libdir = "%(libdir)s"''' % self.__dict__ - -class libtool_config: - def __init__ (self, la_filename): - self.__libtool_la_file = libtool_la_file(la_filename) - tmp = self.__libtool_la_file - self.__version = [int(tmp.current), int(tmp.age), int(tmp.revision)] - self.__sub_la_files = [] - self.__sub_la_files.append(la_filename) - self.__libs = None - - def __cmp__(self, other): - """make it compareable with X.Y.Z versions (Y and Z are optional)""" - if not other: - return 1 - othervers = [int(s) for s in str(other).split(".")] - selfvers = self.__version - - if selfvers > othervers: - return 1 - if selfvers < othervers: - return -1 - return 0 - - def __str__(self): - return "\n".join([ - str(self.__libtool_la_file), - ' '.join(self.__libtool_la_file.get_libs()), - '* New getlibs:', - ' '.join(self.get_libs()) - ]) - - def __get_la_libs(self, la_filename): - return libtool_la_file(la_filename).get_libs() - - def get_libs(self): - """return the complete uniqe linkflags that do not - contain .la files anymore""" - libs_list = list(self.__libtool_la_file.get_libs()) - libs_map = {} - while len(libs_list) > 0: - entry = libs_list.pop(0) - if entry: - if str(entry).endswith(".la"): - ## prevents duplicate .la checks - if entry not in self.__sub_la_files: - self.__sub_la_files.append(entry) - libs_list.extend(self.__get_la_libs(entry)) - else: - libs_map[entry]=1 - self.__libs = libs_map.keys() - return self.__libs - - def get_libs_only_L(self): - if not self.__libs: self.get_libs() - libs = self.__libs - libs = filter(lambda s: str(s).startswith('-L'), libs) - return libs - - def get_libs_only_l(self): - if not self.__libs: self.get_libs() - libs = self.__libs - libs = filter(lambda s: str(s).startswith('-l'), libs) - return libs - - def get_libs_only_other(self): - if not self.__libs: self.get_libs() - libs = self.__libs - libs = filter(lambda s: not (str(s).startswith('-L') or str(s).startswith('-l')), libs) - return libs - -def useCmdLine(): - """parse cmdline args and control build""" - usage = '''Usage: %prog [options] PathToFile.la -example: %prog --atleast-version=2.0.0 /usr/lib/libIlmImf.la -nor: %prog --libs /usr/lib/libamarok.la''' - parser = optparse.OptionParser(usage) - a = parser.add_option - a("--version", dest = "versionNumber", - action = "store_true", default = False, - help = "output version of libtool-config" - ) - a("--debug", dest = "debug", - action = "store_true", default = False, - help = "enable debug" - ) - a("--libs", dest = "libs", - action = "store_true", default = False, - help = "output all linker flags" - ) - a("--libs-only-l", dest = "libs_only_l", - action = "store_true", default = False, - help = "output -l flags" - ) - a("--libs-only-L", dest = "libs_only_L", - action = "store_true", default = False, - help = "output -L flags" - ) - a("--libs-only-other", dest = "libs_only_other", - action = "store_true", default = False, - help = "output other libs (e.g. -pthread)" - ) - a("--atleast-version", dest = "atleast_version", - default=None, - help = "return 0 if the module is at least version ATLEAST_VERSION" - ) - a("--exact-version", dest = "exact_version", - default=None, - help = "return 0 if the module is exactly version EXACT_VERSION" - ) - a("--max-version", dest = "max_version", - default=None, - help = "return 0 if the module is at no newer than version MAX_VERSION" - ) - - (options, args) = parser.parse_args() - if len(args) != 1 and not options.versionNumber: - parser.error("incorrect number of arguments") - if options.versionNumber: - print "libtool-config version %s" % REVISION - return 0 - ltf = libtool_config(args[0]) - if options.debug: - print(ltf) - if options.atleast_version: - if ltf >= options.atleast_version: return 0 - sys.exit(1) - if options.exact_version: - if ltf == options.exact_version: return 0 - sys.exit(1) - if options.max_version: - if ltf <= options.max_version: return 0 - sys.exit(1) - - def p(x): - print " ".join(x) - if options.libs: p(ltf.get_libs()) - elif options.libs_only_l: p(ltf.get_libs_only_l()) - elif options.libs_only_L: p(ltf.get_libs_only_L()) - elif options.libs_only_other: p(ltf.get_libs_only_other()) - return 0 - -if __name__ == '__main__': - useCmdLine() - - -taskgen(apply_link_libtool) -feature("libtool")(apply_link_libtool) -after('apply_link')(apply_link_libtool) -taskgen(apply_libtool) -feature("libtool")(apply_libtool) -before('apply_core')(apply_libtool) diff -Nru a2jmidid-8~dfsg0/wafadmin/Tools/lua.py a2jmidid-9/wafadmin/Tools/lua.py --- a2jmidid-8~dfsg0/wafadmin/Tools/lua.py 2008-07-21 22:39:47.000000000 +0000 +++ a2jmidid-9/wafadmin/Tools/lua.py 1970-01-01 00:00:00.000000000 +0000 @@ -1,31 +0,0 @@ -#! /usr/bin/env python -# encoding: utf-8 - -#! /usr/bin/env python -# encoding: utf-8 -# Sebastian Schlingmann, 2008 -# Thomas Nagy, 2008 (ita) - -import TaskGen - -TaskGen.declare_chain( - name = 'luac', - action = '${LUAC} -s -o ${TGT} ${SRC}', - ext_in = '.lua', - ext_out = '.luac', - reentrant = 0, - install = 'LUADIR', # env variable -) - -class lua_taskgen(TaskGen.task_gen): - def __init__(self): - TaskGen.task_gen.__init__(self) - self.chmod = 0755 - self.inst_var = '' - self.inst_dir = '' - -def detect(conf): - luac = conf.find_program('luac', var='LUAC') - if not luac: conf.fatal('cannot find the compiler "luac"') - - diff -Nru a2jmidid-8~dfsg0/wafadmin/Tools/misc.py a2jmidid-9/wafadmin/Tools/misc.py --- a2jmidid-8~dfsg0/wafadmin/Tools/misc.py 2008-07-21 22:39:47.000000000 +0000 +++ a2jmidid-9/wafadmin/Tools/misc.py 1970-01-01 00:00:00.000000000 +0000 @@ -1,448 +0,0 @@ -#! /usr/bin/env python -# encoding: utf-8 - -#! /usr/bin/env python -# encoding: utf-8 -# Thomas Nagy, 2006 (ita) - -""" -Custom objects: - - execute a function everytime - - copy a file somewhere else -""" - -import shutil, re, os, types - -import TaskGen, Node, Params, Task, Common -import pproc as subprocess -from Params import fatal, debug - -def copy_func(tsk): - "Make a file copy. This might be used to make other kinds of file processing (even calling a compiler is possible)" - env = tsk.env() - infile = tsk.m_inputs[0].abspath(env) - outfile = tsk.m_outputs[0].abspath(env) - try: - shutil.copy2(infile, outfile) - except OSError, IOError: - return 1 - else: - if tsk.chmod: os.chmod(outfile, tsk.chmod) - return 0 - -def action_process_file_func(tsk): - "Ask the function attached to the task to process it" - if not tsk.fun: fatal('task must have a function attached to it for copy_func to work!') - return tsk.fun(tsk) - -class cmd_taskgen(TaskGen.task_gen): - "This object will call a command everytime" - def __init__(self, type='none'): - TaskGen.task_gen.__init__(self) - self.m_type = type - self.prio = 1 - self.fun = None - self.inst_var = '' - self.inst_dir = '' - - def apply(self): - # create a task - if not self.fun: fatal('cmdobj needs a function!') - tsk = Task.TaskCmd(self.fun, self.env) - tsk.prio = self.prio - self.m_tasks.append(tsk) - tsk.install = {'var': self.inst_var, 'dir': self.inst_dir} - -class copy_taskgen(TaskGen.task_gen): - "By default, make a file copy, if fun is provided, fun will make the copy (or call a compiler, etc)" - def __init__(self, type='none'): - TaskGen.task_gen.__init__(self) - - self.source = '' - self.target = '' - self.chmod = '' - self.fun = copy_func - - self.env = Params.g_build.env().copy() - - def apply(self): - - lst = self.to_list(self.source) - - for filename in lst: - node = self.path.find_resource(filename) - if not node: fatal('cannot find input file %s for processing' % filename) - - target = self.target - if not target or len(lst)>1: target = node.m_name - - # TODO the file path may be incorrect - newnode = self.path.find_or_declare(target) - - tsk = self.create_task('copy', self.env, 10) - tsk.set_inputs(node) - tsk.set_outputs(newnode) - tsk.m_env = self.env - tsk.fun = self.fun - tsk.chmod = self.chmod - - if not tsk.env(): - tsk.debug() - fatal('task witout an environment') - -def subst_func(tsk): - "Substitutes variables in a .in file" - - m4_re = re.compile('@(\w+)@', re.M) - - env = tsk.env() - infile = tsk.m_inputs[0].abspath(env) - outfile = tsk.m_outputs[0].abspath(env) - - file = open(infile, 'r') - code = file.read() - file.close() - - # replace all % by %% to prevent errors by % signs in the input file while string formatting - code = code.replace('%', '%%') - - s = m4_re.sub(r'%(\1)s', code) - - dict = tsk.dict - if not dict: - names = m4_re.findall(code) - for i in names: - if env[i] and type(env[i]) is types.ListType : - dict[i] = " ".join(env[i]) - else: dict[i] = env[i] - - file = open(outfile, 'w') - file.write(s % dict) - file.close() - - return 0 - -class subst_taskgen(TaskGen.task_gen): - def __init__(self, type='none'): - TaskGen.task_gen.__init__(self) - self.fun = subst_func - self.dict = {} - self.prio = 8 - - self.inst_var = '' - self.inst_dir = '' - - def apply(self): - - lst = self.to_list(self.source) - - for filename in lst: - node = self.path.find_resource(filename) - if not node: fatal('cannot find input file %s for processing' % filename) - - newnode = node.change_ext('') - - if self.dict and not self.env['DICT_HASH']: - self.env = self.env.copy() - self.env['DICT_HASH'] = hash(str(self.dict)) # <- pretty sure it wont work (ita) - - tsk = self.create_task('copy', self.env, self.prio) - tsk.set_inputs(node) - tsk.set_outputs(newnode) - tsk.m_env = self.env - tsk.fun = self.fun - tsk.dict = self.dict - tsk.dep_vars = ['DICT_HASH'] - tsk.install = {'var': self.inst_var, 'dir': self.inst_dir} - - if not tsk.env(): - tsk.debug() - fatal('task without an environment') - - - -#################### -## command-output #### -#################### - -class CmdArg(object): - """Represents a command-output argument that is based on input or output files or directories""" - pass - -class CmdFileArg(CmdArg): - def __init__(self, file_name, template=None): - CmdArg.__init__(self) - self.file_name = file_name - if template is None: - self.template = '%s' - else: - self.template = template - self.node = None - -class CmdInputFileArg(CmdFileArg): - def find_node(self, base_path): - assert isinstance(base_path, Node.Node) - self.node = base_path.find_resource(self.file_name) - if self.node is None: - Params.fatal("Input file %s not found in " % (self.file_name, base_path)) - - def get_path(self, env, absolute): - if absolute: - return self.template % self.node.abspath(env) - else: - return self.template % self.node.srcpath(env) - -class CmdOutputFileArg(CmdFileArg): - def find_node(self, base_path): - assert isinstance(base_path, Node.Node) - self.node = base_path.find_or_declare(self.file_name) - if self.node is None: - Params.fatal("Output file %s not found in " % (self.file_name, base_path)) - def get_path(self, env, absolute): - if absolute: - return self.template % self.node.abspath(env) - else: - return self.template % self.node.bldpath(env) - -class CmdDirArg(CmdArg): - def __init__(self, dir_name): - CmdArg.__init__(self) - self.dir_name = dir_name - self.node = None - def find_node(self, base_path): - assert isinstance(base_path, Node.Node) - self.node = base_path.find_dir(self.dir_name) - if self.node is None: - Params.fatal("Directory %s not found in " % (self.dir_name, base_path)) - -class CmdInputDirArg(CmdDirArg): - def get_path(self, dummy_env, dummy_absolute): - return self.node.abspath() - -class CmdOutputDirArg(CmdFileArg): - def get_path(self, env, dummy_absolute): - return self.node.abspath(env) - - -class command_output(Task.Task): - m_color = "BLUE" - def __init__(self, env, priority, command, command_node, command_args, stdin, stdout, cwd, os_env): - Task.Task.__init__(self, 'command-output', env, prio=priority, normal=1) - assert isinstance(command, (str, Node.Node)) - self.command = command - self.command_args = command_args - self.stdin = stdin - self.stdout = stdout - self.cwd = cwd - self.os_env = os_env - - if command_node is not None: self.dep_nodes = [command_node] - self.dep_vars = [] # additional environment variables to look - - def run(self): - task = self - assert len(task.m_inputs) > 0 - - def input_path(node, template): - if task.cwd is None: - return template % node.bldpath(task.env()) - else: - return template % node.abspath() - def output_path(node, template): - fun = node.abspath - if task.cwd is None: fun = node.bldpath - return template % fun(task.env()) - - if isinstance(task.command, Node.Node): - argv = [input_path(task.command, '%s')] - else: - argv = [task.command] - - for arg in task.command_args: - if isinstance(arg, str): - argv.append(arg) - else: - assert isinstance(arg, CmdArg) - argv.append(arg.get_path(task.env(), (task.cwd is not None))) - - if task.stdin: - stdin = file(input_path(task.stdin, '%s')) - else: - stdin = None - - if task.stdout: - stdout = file(output_path(task.stdout, '%s'), "w") - else: - stdout = None - - if task.cwd is None: - cwd = ('None (actually %r)' % os.getcwd()) - else: - cwd = repr(task.cwd) - Params.debug("command-output: cwd=%s, stdin=%r, stdout=%r, argv=%r" % - (cwd, stdin, stdout, argv)) - - if task.os_env is None: - os_env = os.environ - else: - os_env = task.os_env - command = subprocess.Popen(argv, stdin=stdin, stdout=stdout, cwd=task.cwd, env=os_env) - return command.wait() - -class cmd_output_taskgen(TaskGen.task_gen): - - def __init__(self, *k): - TaskGen.task_gen.__init__(self, *k) - - self.stdin = None - self.stdout = None - - # the command to execute - self.command = None - - # whether it is an external command; otherwise it is assumed - # to be an executable binary or script that lives in the - # source or build tree. - self.command_is_external = False - - # extra parameters (argv) to pass to the command (excluding - # the command itself) - self.argv = [] - - # task priority - self.prio = 100 - - # dependencies to other objects -> this is probably not what you want (ita) - # values must be 'task_gen' instances (not names!) - self.dependencies = [] - - # dependencies on env variable contents - self.dep_vars = [] - - # input files that are implicit, i.e. they are not - # stdin, nor are they mentioned explicitly in argv - self.hidden_inputs = [] - - # output files that are implicit, i.e. they are not - # stdout, nor are they mentioned explicitly in argv - self.hidden_outputs = [] - - # change the subprocess to this cwd (must use obj.input_dir() or output_dir() here) - self.cwd = None - - # OS environment variables to pass to the subprocess - # if None, use the default environment variables unchanged - self.os_env = None - - - def apply(self): - if self.command is None: - Params.fatal("command-output missing command") - if self.command_is_external: - cmd = self.command - cmd_node = None - else: - cmd_node = self.path.find_resource(self.command) - assert cmd_node is not None, ('''Could not find command '%s' in source tree. -Hint: if this is an external command, -use command_is_external=True''') % (self.command,) - cmd = cmd_node - - if self.cwd is None: - cwd = None - else: - assert isinstance(cwd, CmdDirArg) - self.cwd.find_node(self.path) - - args = [] - inputs = [] - outputs = [] - - for arg in self.argv: - if isinstance(arg, CmdArg): - arg.find_node(self.path) - if isinstance(arg, CmdInputFileArg): - inputs.append(arg.node) - if isinstance(arg, CmdOutputFileArg): - outputs.append(arg.node) - - if self.stdout is None: - stdout = None - else: - assert isinstance(self.stdout, basestring) - stdout = self.path.find_or_declare(self.stdout) - if stdout is None: - Params.fatal("File %s not found" % (self.stdout,)) - outputs.append(stdout) - - if self.stdin is None: - stdin = None - else: - assert isinstance(self.stdin, basestring) - stdin = self.path.find_resource(self.stdin) - if stdin is None: - Params.fatal("File %s not found" % (self.stdin,)) - inputs.append(stdin) - - for hidden_input in self.to_list(self.hidden_inputs): - node = self.path.find_resource(hidden_input) - if node is None: - Params.fatal("File %s not found in dir %s" % (hidden_input, self.path)) - inputs.append(node) - - for hidden_output in self.to_list(self.hidden_outputs): - node = self.path.find_or_declare(hidden_output) - if node is None: - Params.fatal("File %s not found in dir %s" % (hidden_output, self.path)) - outputs.append(node) - - if not inputs: - Params.fatal("command-output objects must have at least one input file") - if not outputs: - Params.fatal("command-output objects must have at least one output file") - - task = command_output(self.env, self.prio, - cmd, cmd_node, self.argv, - stdin, stdout, cwd, self.os_env) - self.m_tasks.append(task) - - task.set_inputs(inputs) - task.set_outputs(outputs) - task.dep_vars = self.to_list(self.dep_vars) - - - for dep in self.dependencies: - assert dep is not self - if not dep.m_posted: - dep.post() - for dep_task in dep.m_tasks: - task.set_run_after(dep_task) - - def input_file(self, file_name, template='%s'): - """Returns an object to be used as argv element that instructs - the task to use a file from the input vector at the given - position as argv element.""" - return CmdInputFileArg(file_name, template) - - def output_file(self, file_name, template='%s'): - """Returns an object to be used as argv element that instructs - the task to use a file from the output vector at the given - position as argv element.""" - return CmdOutputFileArg(file_name, template) - - def input_dir(self, dir_name): - """Returns an object to be used as argv element that instructs - the task to use a directory path from the input vector at the given - position as argv element.""" - return CmdInputDirArg(dir_name) - - def output_dir(self, dir_name): - """Returns an object to be used as argv element that instructs - the task to use a directory path from the output vector at the given - position as argv element.""" - return CmdOutputDirArg(dir_name) - -Task.task_type_from_func('copy', vars=[], func=action_process_file_func) -TaskGen.task_gen.classes['command-output'] = cmd_output_taskgen - - diff -Nru a2jmidid-8~dfsg0/wafadmin/Tools/msvc.py a2jmidid-9/wafadmin/Tools/msvc.py --- a2jmidid-8~dfsg0/wafadmin/Tools/msvc.py 2008-07-21 22:39:47.000000000 +0000 +++ a2jmidid-9/wafadmin/Tools/msvc.py 1970-01-01 00:00:00.000000000 +0000 @@ -1,433 +0,0 @@ -#! /usr/bin/env python -# encoding: utf-8 - -#! /usr/bin/env python -# encoding: utf-8 -# Carlos Rafael Giani, 2006 (dv) -# Tamas Pal, 2007 (folti) -# Visual C support - beta, needs more testing - -import os, sys, re, string, optparse -import Utils, Params, TaskGen, Runner, Configure, Task -from Params import debug, error, fatal, warning -from Utils import quote_whitespace -from TaskGen import taskgen, after, before, feature - -import ccroot -from libtool import read_la_file -from os.path import exists - -def msvc_linker(task): - """Special linker for MSVC with support for embedding manifests into DLL's - and executables compiled by Visual Studio 2005 or probably later. Without - the manifest file, the binaries are unusable. - See: http://msdn2.microsoft.com/en-us/library/ms235542(VS.80).aspx - Problems with this tool: it is always called whether MSVC creates manifests or not.""" - e = task.env() - linker = e['LINK'] - srcf = e['LINK_SRC_F'] - trgtf = e['LINK_TGT_F'] - linkflags = e.get_flat('LINKFLAGS') - libdirs = e.get_flat('_LIBDIRFLAGS') - libs = e.get_flat('_LIBFLAGS') - - subsystem='' - if task.m_subsystem: - subsystem='/subsystem:%s' % task.m_subsystem - outfile=task.m_outputs[0].bldpath(e) - manifest=outfile+'.manifest' - # pdb file containing the debug symbols (if compiled with /Zi or /ZI and linked with /debug - pdbnode=task.m_outputs[0].change_ext('.pdb') - pdbfile=pdbnode.bldpath(e) - - objs=" ".join(['"%s"' % a.abspath(e) for a in task.m_inputs]) - - cmd="%s %s %s%s %s%s %s %s %s" % (linker,subsystem,srcf,objs,trgtf,outfile, linkflags, libdirs,libs) - ret=Runner.exec_command(cmd) - if ret: return ret - - # check for the pdb file. if exists, add to the list of outputs - if os.path.exists(pdbfile): - task.m_outputs.append(pdbnode) - - if os.path.exists(manifest): - debug('manifesttool', 'msvc') - mtool = e['MT'] - if not mtool: - return 0 - mode='' - # embedding mode. Different for EXE's and DLL's. - # see: http://msdn2.microsoft.com/en-us/library/ms235591(VS.80).aspx - if task.m_type == 'program': - mode='1' - elif task.m_type == 'shlib': - mode='2' - - debug('embedding manifest','msvcobj') - flags = e['MTFLAGS'] - if flags: - flags=string.join(flags,' ') - else: - flags='' - - cmd='%s %s -manifest "%s" -outputresource:"%s";#%s' % (mtool, flags, - manifest, outfile, mode) - ret=Runner.exec_command(cmd) - return ret - -# importlibs provided by MSVC/Platform SDK. Do NOT search them.... -g_msvc_systemlibs = """ -aclui activeds ad1 adptif adsiid advapi32 asycfilt authz bhsupp bits bufferoverflowu cabinet -cap certadm certidl ciuuid clusapi comctl32 comdlg32 comsupp comsuppd comsuppw comsuppwd comsvcs -credui crypt32 cryptnet cryptui d3d8thk daouuid dbgeng dbghelp dciman32 ddao35 ddao35d -ddao35u ddao35ud delayimp dhcpcsvc dhcpsapi dlcapi dnsapi dsprop dsuiext dtchelp -faultrep fcachdll fci fdi framedyd framedyn gdi32 gdiplus glauxglu32 gpedit gpmuuid -gtrts32w gtrtst32hlink htmlhelp httpapi icm32 icmui imagehlp imm32 iphlpapi iprop -kernel32 ksguid ksproxy ksuser libcmt libcmtd libcpmt libcpmtd loadperf lz32 mapi -mapi32 mgmtapi minidump mmc mobsync mpr mprapi mqoa mqrt msacm32 mscms mscoree -msdasc msimg32 msrating mstask msvcmrt msvcurt msvcurtd mswsock msxml2 mtx mtxdm -netapi32 nmapinmsupp npptools ntdsapi ntdsbcli ntmsapi ntquery odbc32 odbcbcp -odbccp32 oldnames ole32 oleacc oleaut32 oledb oledlgolepro32 opends60 opengl32 -osptk parser pdh penter pgobootrun pgort powrprof psapi ptrustm ptrustmd ptrustu -ptrustud qosname rasapi32 rasdlg rassapi resutils riched20 rpcndr rpcns4 rpcrt4 rtm -rtutils runtmchk scarddlg scrnsave scrnsavw secur32 sensapi setupapi sfc shell32 -shfolder shlwapi sisbkup snmpapi sporder srclient sti strsafe svcguid tapi32 thunk32 -traffic unicows url urlmon user32 userenv usp10 uuid uxtheme vcomp vcompd vdmdbg -version vfw32 wbemuuid webpost wiaguid wininet winmm winscard winspool winstrm -wintrust wldap32 wmiutils wow32 ws2_32 wsnmp32 wsock32 wst wtsapi32 xaswitch xolehlp -""".split() - - -def find_lt_names_msvc(self, libname, is_static=False): - """ - Win32/MSVC specific code to glean out information from libtool la files. - this function is not attached to the task_gen class - """ - lt_names=[ - 'lib%s.la' % libname, - '%s.la' % libname, - ] - - for path in self.libpaths: - for la in lt_names: - laf=os.path.join(path,la) - dll=None - if exists(laf): - ltdict=read_la_file(laf) - lt_libdir=None - if ltdict.has_key('libdir') and ltdict['libdir'] != '': - lt_libdir=ltdict['libdir'] - if not is_static and ltdict.has_key('library_names') and ltdict['library_names'] != '': - dllnames=ltdict['library_names'].split() - dll=dllnames[0].lower() - dll=re.sub('\.dll$', '', dll) - return (lt_libdir, dll, False) - elif ltdict.has_key('old_library') and ltdict['old_library'] != '': - olib=ltdict['old_library'] - if exists(os.path.join(path,olib)): - return (path, olib, True) - elif lt_libdir != '' and exists(os.path.join(lt_libdir,olib)): - return (lt_libdir, olib, True) - else: - return (None, olib, True) - else: - fatal('invalid libtool object file: %s' % laf) - return (None, None, None) - -def libname_msvc(self, libname, is_static=False): - lib=libname.lower() - lib=re.sub('\.lib$','',lib) - - if lib in g_msvc_systemlibs: - return lib+'.lib' - - lib=re.sub('^lib','',lib) - - if lib == 'm': - return None - - (lt_path, lt_libname, lt_static) = find_lt_names_msvc(self, lib, is_static) - - if lt_path != None and lt_libname != None: - if lt_static == True: - # file existance check has been made by find_lt_names - return os.path.join(lt_path,lt_libname) - - if lt_path != None: - _libpaths=[lt_path] + self.libpaths - else: - _libpaths=self.libpaths - - static_libs=[ - '%ss.lib' % lib, - 'lib%ss.lib' % lib, - '%s.lib' %lib, - 'lib%s.lib' % lib, - ] - - dynamic_libs=[ - 'lib%s.dll.lib' % lib, - 'lib%s.dll.a' % lib, - '%s.dll.lib' % lib, - '%s.dll.a' % lib, - 'lib%s_d.lib' % lib, - '%s_d.lib' % lib, - '%s.lib' %lib, - ] - - libnames=static_libs - if not is_static: - libnames=dynamic_libs + static_libs - - for path in _libpaths: - for libn in libnames: - if os.path.exists(os.path.join(path,libn)): - debug('lib found: %s' % os.path.join(path,libn), 'msvc') - return libn - - return None - -def apply_msvc_obj_vars(self): - debug('apply_msvc_obj_vars called for msvcobj', 'msvc') - env = self.env - app = env.append_unique - - cpppath_st = env['CPPPATH_ST'] - lib_st = env['LIB_ST'] - staticlib_st = env['STATICLIB_ST'] - libpath_st = env['LIBPATH_ST'] - staticlibpath_st = env['STATICLIBPATH_ST'] - - self.addflags('CPPFLAGS', self.cppflags) - - for i in env['RPATH']: app('LINKFLAGS', i) - for i in env['LIBPATH']: - app('LINKFLAGS', libpath_st % i) - if not self.libpaths.count(i): - self.libpaths.append(i) - for i in env['LIBPATH']: - app('LINKFLAGS', staticlibpath_st % i) - if not self.libpaths.count(i): - self.libpaths.append(i) - - # i doubt that anyone will make a fully static binary anyway - if not env['FULLSTATIC']: - if env['STATICLIB'] or env['LIB']: - app('LINKFLAGS', env['SHLIB_MARKER']) - - if env['STATICLIB']: - app('LINKFLAGS', env['STATICLIB_MARKER']) - for i in env['STATICLIB']: - debug('libname: %s' % i,'msvc') - libname = libname_msvc(self, i, True) - debug('libnamefixed: %s' % libname,'msvc') - if libname != None: - app('LINKFLAGS', libname) - - if self.env['LIB']: - for i in env['LIB']: - debug('libname: %s' % i,'msvc') - libname = libname_msvc(self, i) - debug('libnamefixed: %s' % libname,'msvc') - if libname != None: - app('LINKFLAGS', libname) - -def apply_link_msvc(self): - # if we are only building .o files, tell which ones we built - # FIXME remove the "type" thing - # FIXME simplify this piece of code (about the same is in ccroot.py) - if self.m_type == 'objects': - self.out_nodes = [] - app = self.out_nodes.append - for t in self.compiled_tasks: app(t.m_outputs[0]) - return - - # use a custom linker is specified (self.link) - link = getattr(self, 'link', None) - if not link: - if self.m_type == 'staticlib': link = 'msvc_ar_link_static' - elif 'cxx' in self.features: link = 'msvc_cxx_link' - else: link = 'msvc_cc_link' - linktask = self.create_task(link, self.env) - - outputs = [t.m_outputs[0] for t in self.compiled_tasks] - linktask.set_inputs(outputs) - linktask.set_outputs(self.path.find_build(get_target_name(self))) - - link_task.m_type = self.m_type - link_task.m_subsystem = getattr(self, 'subsystem', '') - self.link_task = linktask - -def init_msvc(self): - "all methods (msvc and non-msvc) are to be executed, but we remove the ones we do not want" - if self.env['MSVC']: - self.meths.remove('apply_link') - else: - for x in ['apply_link_msvc', 'apply_msvc_obj_vars']: - self.meths.remove(x) - self.libpaths = getattr(self, 'libpaths', '') - -static_link_str = '${STLIBLINK} ${LINK_SRC_F}${SRC} ${LINK_TGT_F}${TGT}' -Task.simple_task_type('msvc_ar_link_static', static_link_str, color='YELLOW', prio=101) -Task.task_type_from_func('msvc_cc_link', vars=['LINK', 'LINK_SRC_F', 'LINK_TGT_F', 'LINKFLAGS', '_LIBDIRFLAGS', '_LIBFLAGS', 'MT', 'MTFLAGS'] , color='YELLOW', func=msvc_linker, prio=101) -Task.task_type_from_func('msvc_cxx_link', vars=['LINK', 'LINK_SRC_F', 'LINK_TGT_F', 'LINKFLAGS', '_LIBDIRFLAGS', '_LIBFLAGS', 'MT', 'MTFLAGS'] , color='YELLOW', func=msvc_linker, prio=101) - -rc_str='${RC} ${RCFLAGS} /fo ${TGT} ${SRC}' -Task.simple_task_type('rc', rc_str, color='GREEN', prio=50) - -import winres - -def detect(conf): - # due to path format limitations, limit operation only to native Win32. Yeah it sucks. - if sys.platform != 'win32': - conf.fatal('MSVC module only works under native Win32 Python! cygwin is not supported yet') - - comp = conf.find_program('CL', var='CXX') - if not comp: conf.fatal('CL was not found (compiler)') - - link = conf.find_program('LINK') - if not link: conf.fatal('LINK was not found (linker)') - - stliblink = conf.find_program('LIB') - if not stliblink: return - - manifesttool = conf.find_program('MT') - - v = conf.env - - # c/c++ compiler - check for whitespace, and if so, add quotes - v['CC'] = quote_whitespace(comp) - v['CXX'] = v['CC'] - v['MSVC'] = 1 # this is deprecated. use CXX_NAME/CC_NAME instead - v['CXX_NAME'] = 'msvc' - v['CC_NAME'] = 'msvc' - - v['CPPFLAGS'] = ['/W3', '/nologo', '/EHsc', '/errorReport:prompt'] - v['CCDEFINES'] = ['WIN32'] # command-line defines - v['CXXDEFINES'] = ['WIN32'] # command-line defines - - v['_CCINCFLAGS'] = [] - v['_CCDEFFLAGS'] = [] - v['_CXXINCFLAGS'] = [] - v['_CXXDEFFLAGS'] = [] - - v['CC_SRC_F'] = '' - v['CC_TGT_F'] = '/c /Fo' - v['CXX_SRC_F'] = '' - v['CXX_TGT_F'] = '/c /Fo' - - v['CPPPATH_ST'] = '/I%s' # template for adding include paths - - # Subsystem specific flags - v['CPPFLAGS_CONSOLE'] = ['/SUBSYSTEM:CONSOLE'] - v['CPPFLAGS_NATIVE'] = ['/SUBSYSTEM:NATIVE'] - v['CPPFLAGS_POSIX'] = ['/SUBSYSTEM:POSIX'] - v['CPPFLAGS_WINDOWS'] = ['/SUBSYSTEM:WINDOWS'] - v['CPPFLAGS_WINDOWSCE'] = ['/SUBSYSTEM:WINDOWSCE'] - - # CRT specific flags - v['CPPFLAGS_CRT_MULTITHREADED'] = ['/MT'] - v['CPPFLAGS_CRT_MULTITHREADED_DLL'] = ['/MD'] - v['CPPDEFINES_CRT_MULTITHREADED'] = ['_MT'] - v['CPPDEFINES_CRT_MULTITHREADED_DLL'] = ['_MT', '_DLL'] - - v['CPPFLAGS_CRT_MULTITHREADED_DBG'] = ['/MTd'] - v['CPPFLAGS_CRT_MULTITHREADED_DLL_DBG'] = ['/MDd'] - v['CPPDEFINES_CRT_MULTITHREADED_DBG'] = ['_DEBUG', '_MT'] - v['CPPDEFINES_CRT_MULTITHREADED_DLL_DBG'] = ['_DEBUG', '_MT', '_DLL'] - - # compiler debug levels - v['CCFLAGS'] = ['/TC'] - v['CCFLAGS_OPTIMIZED'] = ['/O2', '/DNDEBUG'] - v['CCFLAGS_RELEASE'] = ['/O2', '/DNDEBUG'] - v['CCFLAGS_DEBUG'] = ['/Od', '/RTC1', '/D_DEBUG', '/ZI'] - v['CCFLAGS_ULTRADEBUG'] = ['/Od', '/RTC1', '/D_DEBUG', '/ZI'] - - v['CXXFLAGS'] = ['/TP'] - v['CXXFLAGS_OPTIMIZED'] = ['/O2', '/DNDEBUG'] - v['CXXFLAGS_RELEASE'] = ['/O2', '/DNDEBUG'] - v['CXXFLAGS_DEBUG'] = ['/Od', '/RTC1', '/D_DEBUG', '/ZI'] - v['CXXFLAGS_ULTRADEBUG'] = ['/Od', '/RTC1', '/D_DEBUG', '/ZI'] - - # linker - v['STLIBLINK'] = '\"%s\"' % stliblink - v['LINK'] = '\"%s\"' % link - v['LIB'] = [] - - v['LINK_TGT_F'] = '/OUT:' - v['LINK_SRC_F'] = ' ' - - v['LIB_ST'] = '%s.lib' # template for adding libs - v['LIBPATH_ST'] = '/LIBPATH:%s' # template for adding libpaths - v['STATICLIB_ST'] = '%s.lib' - v['STATICLIBPATH_ST'] = '/LIBPATH:%s' - v['CCDEFINES_ST'] = '/D%s' - v['CXXDEFINES_ST'] = '/D%s' - v['_LIBDIRFLAGS'] = '' - v['_LIBFLAGS'] = '' - - v['SHLIB_MARKER'] = '' - v['STATICLIB_MARKER'] = '' - - conf.check_tool('winres') - - if not conf.env['WINRC']: - warning('Resource compiler not found. Compiling resource file is disabled','msvc') - - # manifest tool. Not required for VS 2003 and below. Must have for VS 2005 and later - if manifesttool: - v['MT'] = quote_whitespace (manifesttool) - v['MTFLAGS']=['/NOLOGO'] - - v['LINKFLAGS'] = ['/NOLOGO', '/MACHINE:X86', '/ERRORREPORT:PROMPT'] - - try: - debug_level = Params.g_options.debug_level.upper() - except AttributeError: - debug_level = ccroot.DEBUG_LEVELS.CUSTOM - v['CCFLAGS'] += v['CCFLAGS_'+debug_level] - v['CXXFLAGS'] += v['CXXFLAGS_'+debug_level] - v['LINKFLAGS'] += v['LINKFLAGS_'+debug_level] - - conf.add_os_flags('CFLAGS', 'CCFLAGS') - conf.add_os_flags('CPPFLAGS') - conf.add_os_flags('CXXFLAGS') - conf.add_os_flags('LINKFLAGS') - - # shared library - v['shlib_CCFLAGS'] = [''] - v['shlib_CXXFLAGS'] = [''] - v['shlib_LINKFLAGS']= ['/DLL'] - v['shlib_PATTERN'] = '%s.dll' - - # static library - v['staticlib_LINKFLAGS'] = [''] - v['staticlib_PATTERN'] = '%s.lib' - - # program - v['program_PATTERN'] = '%s.exe' - -def set_options(opt): - try: - opt.add_option('-d', '--debug-level', - action = 'store', - default = ccroot.DEBUG_LEVELS.DEBUG, - help = "Specify the debug level, does nothing if CFLAGS is set in the environment. [Allowed Values: '%s']" % "', '".join(ccroot.DEBUG_LEVELS.ALL), - choices = ccroot.DEBUG_LEVELS.ALL, - dest = 'debug_level') - except optparse.OptionConflictError: - pass # maybe already defined by another C-compiler - - -taskgen(apply_msvc_obj_vars) -feature('cc', 'cxx')(apply_msvc_obj_vars) -after('apply_obj_vars_cc')(apply_msvc_obj_vars) -after('apply_obj_vars_cxx')(apply_msvc_obj_vars) -taskgen(apply_link_msvc) -feature('cc', 'cxx')(apply_link_msvc) -after('apply_core')(apply_link_msvc) -before('apply_obj_vars_cc')(apply_link_msvc) -before('apply_obj_vars_cxx')(apply_link_msvc) -taskgen(init_msvc) -feature('cc', 'cxx')(init_msvc) -before('apply_core')(init_msvc) diff -Nru a2jmidid-8~dfsg0/wafadmin/Tools/nasm.py a2jmidid-9/wafadmin/Tools/nasm.py --- a2jmidid-8~dfsg0/wafadmin/Tools/nasm.py 2008-07-21 22:39:47.000000000 +0000 +++ a2jmidid-9/wafadmin/Tools/nasm.py 1970-01-01 00:00:00.000000000 +0000 @@ -1,53 +0,0 @@ -#! /usr/bin/env python -# encoding: utf-8 - -#! /usr/bin/env python -# encoding: utf-8 -# Thomas Nagy, 2008 - -""" -Nasm processing -""" - -import os -import TaskGen, Task -from TaskGen import taskgen, before, extension - -nasm_str = '${NASM} ${NASM_FLAGS} ${NASM_INCLUDES} ${SRC} -o ${TGT}' - -EXT_NASM = ['.s', '.S', '.asm', '.ASM', '.spp', '.SPP'] - -def apply_nasm_vars(self): - - # flags - if hasattr(self, 'nasm_flags'): - for flag in self.to_list(self.nasm_flags): - self.env.append_value('NASM_FLAGS', flag) - - # includes - well, if we suppose it works with c processing - if hasattr(self, 'includes'): - for inc in self.to_list(self.includes): - self.env.append_value('NASM_INCLUDES', '-I %s' % inc.srcpath(self.env)) - -def nasm_file(self, node): - o_node = node.change_ext('.o') - - task = self.create_task('nasm') - task.set_inputs(node) - task.set_outputs(o_node) - - self.compiled_tasks.append(task) - - self.meths.add('apply_nasm_vars') - -# create our action here -Task.simple_task_type('nasm', nasm_str, color='BLUE', prio=40) - -def detect(conf): - nasm = conf.find_program('nasm', var='NASM') - if not nasm: conf.fatal("could not find nasm, install it or set PATH env var.") - - -taskgen(apply_nasm_vars) -before('apply_link')(apply_nasm_vars) -extension(EXT_NASM)(nasm_file) diff -Nru a2jmidid-8~dfsg0/wafadmin/Tools/ocaml.py a2jmidid-9/wafadmin/Tools/ocaml.py --- a2jmidid-8~dfsg0/wafadmin/Tools/ocaml.py 2008-07-21 22:39:47.000000000 +0000 +++ a2jmidid-9/wafadmin/Tools/ocaml.py 1970-01-01 00:00:00.000000000 +0000 @@ -1,397 +0,0 @@ -#! /usr/bin/env python -# encoding: utf-8 - -#!/usr/bin/env python -# encoding: utf-8 -# Thomas Nagy, 2006 (ita) - -"ocaml support" - -import os, re -import Params, TaskGen, Scan, Utils, Task -from Params import error, fatal -from TaskGen import taskgen, feature, before, after, extension - -EXT_MLL = ['.mll'] -EXT_MLY = ['.mly'] -EXT_MLI = ['.mli'] -EXT_MLC = ['.c'] -EXT_ML = ['.ml'] - -open_re = re.compile('open ([a-zA-Z]+);;', re.M) - -def filter_comments(filename): - f = open(filename, 'r') - txt = f.read() - f.close() - buf = [] - - i = 0 - max = len(txt) - while i < max: - c = txt[i] - # skip a string - if c == '"': - i += 1 - c = '' - while i < max: - p = c - c = txt[i] - i += 1 - if i == max: return buf - if c == '"': - cnt = 0 - while i < cnt and i < max: - #print "cntcnt = ", str(cnt), self.txt[self.i-2-cnt] - if txt[i-2-cnt] == '\\': cnt+=1 - else: break - #print "cnt is ", str(cnt) - if (cnt%2)==0: break - # skip a char - unfortunately caml is a bit special t' - elif c == "'": - i += 1 - if i == max: return buf - c = txt[i] - if c == '\\': - i += 1 - if i == max: return buf - c = txt[i] - if c == 'x': - i += 2 # skip two chars - i += 1 - if i == max: return buf - c = txt[i] - #if c != '\'': print "uh-oh, invalid character" - - # skip a comment - elif c == '(': - if i == max: break - c = txt[i+1] - # eat (* *) comments - if c == '*': - i += 1 - nesting = 1 - prev = 0 - while i < max: - c = txt[i] - if c == '*': - prev = 1 - elif c == ')' and prev: - if prev: - nesting -= 1 - if nesting == 0: break - elif c == '(': - prev = 0 - if i == max: return buf - i += 1 - c = txt[i] - if c == '*': nesting += 1 - else: - prev = 0 - i += 1 - # a valid char, add it to the buffer - else: - buf.append(c) - i += 1 - return buf - -def new_may_start(self): - if not getattr(self, 'order', ''): - - # now reorder the m_inputs given the task dependencies - if getattr(self, 'bytecode', 0): alltasks = self.obj.bytecode_tasks - else: alltasks = self.obj.native_tasks - - # this part is difficult, we do not have a total order on the tasks - # if the dependencies are wrong, this may not stop - seen = [] - pendant = []+alltasks - while pendant: - task = pendant.pop(0) - if task in seen: continue - for x in task.get_run_after(): - if not x in seen: - pendant.append(task) - break - else: - seen.append(task) - self.m_inputs = [x.m_outputs[0] for x in seen] - self.order=1 - return Task.Task.may_start(self) - -class ocaml_scanner(Scan.scanner): - def __init__(self): - Scan.scanner.__init__(self) - def may_start(self, task): - if getattr(task, 'flag_deps', ''): return 1 - - # the evil part is that we can only compute the dependencies after the - # source files can be read (this means actually producing the source files) - if getattr(task, 'bytecode', ''): alltasks = task.obj.bytecode_tasks - else: alltasks = task.obj.native_tasks - - task.signature() # ensure that files are scanned - unfortunately - tree = Params.g_build - env = task.env() - for node in task.m_inputs: - lst = tree.node_deps[node.variant(env)][node.id] - for depnode in lst: - for t in alltasks: - if t == task: continue - if depnode in t.m_inputs: - task.set_run_after(t) - task.obj.flag_deps = 'ok' - - # TODO necessary to get the signature right - for now - delattr(task, 'sign_all') - task.signature() - - return 1 - - def scan(self, task, node): - #print "scan is called" - code = "".join(filter_comments(node.abspath(task.env()))) - - global open_re - names=[] - import_iterator = open_re.finditer(code) - if import_iterator: - for import_match in import_iterator: - names.append(import_match.group(1)) - found_lst = [] - raw_lst = [] - for name in names: - nd = None - for x in task.incpaths: - nd = x.find_resource(name.lower()+'.ml') - if nd: - found_lst.append(nd) - break - else: - raw_lst.append(name) - - return (found_lst, raw_lst) - -g_caml_scanner = ocaml_scanner() - -def get_target_name(self, bytecode): - if bytecode: - if self.islibrary: - return self.target+'.cma' - else: - return self.target+'.run' - else: - if self.m_type == 'c_object': return self.target+'.o' - - if self.islibrary: - return self.target+'.cmxa' - else: - return self.target - -native_lst=['native', 'all', 'c_object'] -bytecode_lst=['bytecode', 'all'] -class ocaml_taskgen(TaskGen.task_gen): - s_default_ext = ['.mli', '.mll', '.mly', '.ml'] - def __init__(self, *k, **kw): - TaskGen.task_gen.__init__(self) - - self.m_type = kw.get('type', 'native') - self.m_source = '' - self.m_target = '' - self.islibrary = kw.get('library', 0) - self._incpaths_lst = [] - self._bld_incpaths_lst = [] - self._mlltasks = [] - self._mlytasks = [] - - self.mlitasks = [] - self.native_tasks = [] - self.bytecode_tasks = [] - self.linktasks = [] - - self.bytecode_env = None - self.native_env = None - - - self.compiled_tasks = [] - self.includes = '' - self.uselib = '' - - self.out_nodes = [] - - self.are_deps_set = 0 - - if not self.env: self.env = Params.g_build.env() - - if not self.m_type in ['bytecode','native','all','c_object']: - print 'type for camlobj is undefined '+self.m_type - self.m_type='all' - - if self.m_type in native_lst: - self.native_env = self.env.copy() - self.native_env['OCAMLCOMP'] = self.native_env['OCAMLOPT'] - self.native_env['OCALINK'] = self.native_env['OCAMLOPT'] - if self.m_type in bytecode_lst: - self.bytecode_env = self.env.copy() - self.bytecode_env['OCAMLCOMP'] = self.bytecode_env['OCAMLC'] - self.bytecode_env['OCALINK'] = self.bytecode_env['OCAMLC'] - - if self.islibrary: - self.bytecode_env['OCALINKFLAGS'] = '-a' - self.native_env['OCALINKFLAGS'] = '-a' - - if self.m_type == 'c_object': - self.native_env['OCALINK'] = self.native_env['OCALINK']+' -output-obj' - - self.features.append('ocaml') - -TaskGen.add_feature('ocaml', ['apply_core']) - -def apply_incpaths_ml(self): - inc_lst = self.includes.split() - lst = self._incpaths_lst - tree = Params.g_build - for dir in inc_lst: - node = self.path.find_dir(dir) - if not node: - error("node not found: " + str(dir)) - continue - Params.g_build.rescan(node) - if not node in lst: lst.append(node) - self._bld_incpaths_lst.append(node) - # now the nodes are added to self._incpaths_lst - -def apply_vars_ml(self): - for i in self._incpaths_lst: - if self.bytecode_env: - self.bytecode_env.append_value('OCAMLPATH', '-I %s' % i.srcpath(self.env)) - self.bytecode_env.append_value('OCAMLPATH', '-I %s' % i.bldpath(self.env)) - - if self.native_env: - self.native_env.append_value('OCAMLPATH', '-I %s' % i.bldpath(self.env)) - self.native_env.append_value('OCAMLPATH', '-I %s' % i.srcpath(self.env)) - - varnames = ['INCLUDES', 'OCAMLFLAGS', 'OCALINKFLAGS', 'OCALINKFLAGS_OPT'] - for name in self.uselib.split(): - for vname in varnames: - cnt = self.env[vname+'_'+name] - if cnt: - if self.bytecode_env: self.bytecode_env.append_value(vname, cnt) - if self.native_env: self.native_env.append_value(vname, cnt) - -def apply_link_ml(self): - - if self.bytecode_env: - linktask = Task.g_task_types['ocalink']('ocalink', self.bytecode_env) - linktask.bytecode = 1 - linktask.set_outputs(self.path.find_build(get_target_name(self, bytecode=1))) - linktask.obj = self - self.linktasks.append(linktask) - if self.native_env: - linktask = Task.g_task_types['ocalinkopt']('ocalinkopt', self.native_env) - linktask.set_outputs(self.path.find_build(get_target_name(self, bytecode=0))) - linktask.obj = self - self.linktasks.append(linktask) - - self.out_nodes += linktask.m_outputs - - # we produce a .o file to be used by gcc - if self.m_type == 'c_object': self.compiled_tasks.append(linktask) - -def mll_hook(self, node): - mll_task = self.create_task('ocamllex', self.native_env) - mll_task.set_inputs(node) - mll_task.set_outputs(node.change_ext('.ml')) - self.mlltasks.append(mll_task) - - self.allnodes.append(mll_task.m_outputs[0]) - -def mly_hook(self, node): - mly_task = self.create_task('ocamlyacc', self.native_env) - mly_task.set_inputs(node) - mly_task.set_outputs([node.change_ext('.ml'), node.change_ext('.mli')]) - self._mlytasks.append(mly_task) - self.allnodes.append(mly_task.m_outputs[0]) - - task = self.create_task('ocamlcmi', self.native_env) - task.set_inputs(mly_task.m_outputs[1]) - task.set_outputs(mly_task.m_outputs[1].change_ext('.cmi')) - -def mli_hook(self, node): - task = self.create_task('ocamlcmi', self.native_env) - task.set_inputs(node) - task.set_outputs(node.change_ext('.cmi')) - self.mlitasks.append(task) - -def mlc_hook(self, node): - task = self.create_task('ocamlcc', self.native_env) - task.set_inputs(node) - task.set_outputs(node.change_ext('.o')) - - self.out_nodes += task.m_outputs - -def ml_hook(self, node): - if self.native_env: - task = self.create_task('ocaml', self.native_env) - task.set_inputs(node) - task.set_outputs(node.change_ext('.cmx')) - task.m_scanner = g_caml_scanner - task.obj = self - task.incpaths = self._bld_incpaths_lst - self.native_tasks.append(task) - if self.bytecode_env: - task = self.create_task('ocaml', self.bytecode_env) - task.set_inputs(node) - task.m_scanner = g_caml_scanner - task.obj = self - task.bytecode = 1 - task.incpaths = self._bld_incpaths_lst - task.set_outputs(node.change_ext('.cmo')) - self.bytecode_tasks.append(task) - -b = Task.simple_task_type -b('ocaml', '${OCAMLCOMP} ${OCAMLPATH} ${OCAMLFLAGS} ${INCLUDES} -c -o ${TGT} ${SRC}', color='GREEN', prio=60) -b('ocamlcmi', '${OCAMLC} ${OCAMLPATH} ${INCLUDES} -o ${TGT} -c ${SRC}', color='BLUE', prio=40) -b('ocamlcc', 'cd ${TGT[0].bld_dir(env)} && ${OCAMLOPT} ${OCAMLFLAGS} ${OCAMLPATH} ${INCLUDES} -c ${SRC[0].abspath(env)}', color='GREEN', prio=60) -b('ocamllex', '${OCAMLLEX} ${SRC} -o ${TGT}', color='BLUE', prio=20) -b('ocamlyacc', '${OCAMLYACC} -b ${TGT[0].bldbase(env)} ${SRC}', color='BLUE', prio=20) - -act = b('ocalink', '${OCALINK} -o ${TGT} ${INCLUDES} ${OCALINKFLAGS} ${SRC}', color='YELLOW', prio=99) -act.may_start = new_may_start -act = b('ocalinkopt', '${OCALINK} -o ${TGT} ${INCLUDES} ${OCALINKFLAGS_OPT} ${SRC}', color='YELLOW', prio=99) -act.may_start = new_may_start - - -def detect(conf): - opt = conf.find_program('ocamlopt', var='OCAMLOPT') - occ = conf.find_program('ocamlc', var='OCAMLC') - if (not opt) or (not occ): - fatal('The objective caml compiler was not found:\ninstall it or make it available in your PATH') - - conf.env['OCAMLC'] = occ - conf.env['OCAMLOPT'] = opt - conf.env['OCAMLLEX'] = conf.find_program('ocamllex', var='OCAMLLEX') - conf.env['OCAMLYACC'] = conf.find_program('ocamlyacc', var='OCAMLYACC') - conf.env['OCAMLFLAGS'] = '' - conf.env['OCALINK'] = '' - conf.env['OCAMLLIB'] = os.popen(conf.env['OCAMLC']+' -where').read().strip()+os.sep - conf.env['LIBPATH_OCAML'] = os.popen(conf.env['OCAMLC']+' -where').read().strip()+os.sep - conf.env['CPPPATH_OCAML'] = os.popen(conf.env['OCAMLC']+' -where').read().strip()+os.sep - conf.env['LIB_OCAML'] = 'camlrun' - conf.env['OCALINKFLAGS'] = '' - - -taskgen(apply_incpaths_ml) -feature('ocaml')(apply_incpaths_ml) -before('apply_vars_ml')(apply_incpaths_ml) -taskgen(apply_vars_ml) -feature('ocaml')(apply_vars_ml) -before('apply_core')(apply_vars_ml) -taskgen(apply_link_ml) -feature('ocaml')(apply_link_ml) -after('apply_core')(apply_link_ml) -extension(EXT_MLL)(mll_hook) -extension(EXT_MLY)(mly_hook) -extension(EXT_MLI)(mli_hook) -extension(EXT_MLC)(mlc_hook) -extension(EXT_ML)(ml_hook) diff -Nru a2jmidid-8~dfsg0/wafadmin/Tools/osx.py a2jmidid-9/wafadmin/Tools/osx.py --- a2jmidid-8~dfsg0/wafadmin/Tools/osx.py 2008-07-21 22:39:47.000000000 +0000 +++ a2jmidid-9/wafadmin/Tools/osx.py 1970-01-01 00:00:00.000000000 +0000 @@ -1,120 +0,0 @@ -#! /usr/bin/env python -# encoding: utf-8 - -#! /usr/bin/env python -# encoding: utf-8 -# Thomas Nagy 2008 - -"""MacOSX related tools - -To compile an executable into a Mac application bundle (a .app), set its 'mac_app' attribute - obj.mac_app = True - -To make a bundled shared library (a .bundle), set the 'mac_bundle' attribute: - obj.mac_bundle = True -""" - -import os, shutil -import TaskGen, Task -from TaskGen import taskgen, feature, after, before -from Params import error, debug, fatal, warning - -def create_task_macapp(self): - if self.m_type == 'program' and self.link_task: - apptask = self.create_task('macapp', self.env) - apptask.set_inputs(self.link_task.m_outputs) - apptask.set_outputs(self.link_task.m_outputs[0].change_ext('.app')) - self.m_apptask = apptask - -def apply_link_osx(self): - """Use env['MACAPP'] to force *all* executables to be transformed into Mac applications - or use obj.mac_app = True to build specific targets as Mac apps""" - if self.env['MACAPP'] or getattr(self, 'mac_app', False): - self.create_task_macapp() - -def apply_bundle(self): - """the uselib system cannot modify a few things, use env['MACBUNDLE'] to force all shlibs into mac bundles - or use obj.mac_bundle = True for specific targets only""" - if not 'shlib' in self.features: return - if self.env['MACBUNDLE'] or getattr(self, 'mac_bundle', False): - self.env['shlib_PATTERN'] = '%s.bundle' - uselib = self.to_list(self.uselib) - if not 'MACBUNDLE' in uselib: uselib.append('MACBUNDLE') - -def apply_bundle_remove_dynamiclib(self): - if not 'shlib' in self.features: return - if self.env['MACBUNDLE'] or getattr(self, 'mac_bundle', False): - self.env["LINKFLAGS"].remove("-dynamiclib") - self.env.append_value("LINKFLAGS", "-bundle") - - - -app_dirs = ['Contents', os.path.join('Contents','MacOS'), os.path.join('Contents','Resources')] - -app_info = ''' - - - - - CFBundlePackageType - APPL - CFBundleGetInfoString - Created by Waf - CFBundleSignature - ???? - NOTE - THIS IS A GENERATED FILE, DO NOT MODIFY - CFBundleExecutable - %s - - -''' - -def app_build(task): - global app_dirs - env = task.env() - - i = 0 - for p in task.m_outputs: - srcfile = p.srcpath(env) - - debug("creating directories") - try: - os.mkdir(srcfile) - [os.makedirs(os.path.join(srcfile, d)) for d in app_dirs] - except (OSError, IOError): - pass - - # copy the program to the contents dir - srcprg = task.m_inputs[i].srcpath(env) - dst = os.path.join(srcfile, 'Contents', 'MacOS') - debug("copy %s to %s" % (srcprg, dst)) - shutil.copy(srcprg, dst) - - # create info.plist - debug("generate Info.plist") - # TODO: Support custom info.plist contents. - - f = file(os.path.join(srcfile, "Contents", "Info.plist"), "w") - f.write(app_info % os.path.basename(srcprg)) - f.close() - - i += 1 - - return 0 - -x = Task.task_type_from_func('macapp', vars=[], func=app_build) -x.prio = 300 - - -taskgen(create_task_macapp) -taskgen(apply_link_osx) -after('apply_link')(apply_link_osx) -feature('cc', 'cxx')(apply_link_osx) -taskgen(apply_bundle) -before('apply_link')(apply_bundle) -before('apply_lib_vars')(apply_bundle) -feature('cc', 'cxx')(apply_bundle) -taskgen(apply_bundle_remove_dynamiclib) -after('apply_link')(apply_bundle_remove_dynamiclib) -feature('cc', 'cxx')(apply_bundle_remove_dynamiclib) diff -Nru a2jmidid-8~dfsg0/wafadmin/Tools/perl.py a2jmidid-9/wafadmin/Tools/perl.py --- a2jmidid-8~dfsg0/wafadmin/Tools/perl.py 2008-07-21 22:39:47.000000000 +0000 +++ a2jmidid-9/wafadmin/Tools/perl.py 1970-01-01 00:00:00.000000000 +0000 @@ -1,125 +0,0 @@ -#! /usr/bin/env python -# encoding: utf-8 - -#! /usr/bin/env python -# encoding: utf-8 -# andersg at 0x63.nu 2007 - -import os -import pproc as subprocess -import Params, Task -from TaskGen import extension, taskgen, feature, before - -xsubpp_str = '${PERL} ${XSUBPP} -noprototypes -typemap ${EXTUTILS_TYPEMAP} ${SRC} > ${TGT}' -EXT_XS = ['.xs'] - -def init_pyext(self): - self.uselib = self.to_list(self.uselib) - if not 'PERL' in self.uselib: self.uselib.append('PERL') - if not 'PERLEXT' in self.uselib: self.uselib.append('PERLEXT') - self.env['shlib_PATTERN'] = self.env['perlext_PATTERN'] - - -def xsubpp_file(self, node): - gentask = self.create_task('xsubpp') - gentask.set_inputs(node) - outnode = node.change_ext('.c') - gentask.set_outputs(outnode) - - self.allnodes.append(outnode) - -Task.simple_task_type('xsubpp', xsubpp_str, color='BLUE', prio=10) - -def check_perl_version(conf, minver=None): - """ - Checks if perl is installed. - - If installed the variable PERL will be set in environment. - - Perl binary can be overridden by --with-perl-binary config variable - - """ - res = True - - if not getattr(Params.g_options, 'perlbinary', None): - perl = conf.find_program("perl", var="PERL") - if not perl: - return False - else: - perl = Params.g_options.perlbinary - conf.env['PERL'] = perl - - version = os.popen(perl + " -e'printf \"%vd\", $^V'").read() - if not version: - res = False - version = "Unknown" - elif not minver is None: - ver = tuple(map(int, version.split("."))) - if ver < minver: - res = False - - if minver is None: - cver = "" - else: - cver = ".".join(map(str,minver)) - conf.check_message("perl", cver, res, version) - return res - -def check_perl_module(conf, module): - """ - Check if specified perlmodule is installed. - - Minimum version can be specified by specifying it after modulename - like this: - - conf.check_perl_module("Some::Module 2.92") - """ - cmd = [conf.env['PERL'], '-e', 'use %s' % module] - r = subprocess.call(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE) == 0 - conf.check_message("perl module %s" % module, "", r) - return r - -def check_perl_ext_devel(conf): - """ - Check for configuration needed to build perl extensions. - - Sets different xxx_PERLEXT variables in the environment. - - Also sets the ARCHDIR_PERL variable useful as installation path, - which can be overridden by --with-perl-archdir option. - """ - if not conf.env['PERL']: - return False - - perl = conf.env['PERL'] - - conf.env["LINKFLAGS_PERLEXT"] = os.popen(perl + " -MConfig -e'print $Config{lddlflags}'").read() - conf.env["CPPPATH_PERLEXT"] = os.popen(perl + " -MConfig -e'print \"$Config{archlib}/CORE\"'").read() - conf.env["CCFLAGS_PERLEXT"] = os.popen(perl + " -MConfig -e'print \"$Config{ccflags} $Config{cccdlflags}\"'").read() - - conf.env["XSUBPP"] = os.popen(perl + " -MConfig -e'print \"$Config{privlib}/ExtUtils/xsubpp$Config{exe_ext}\"'").read() - conf.env["EXTUTILS_TYPEMAP"] = os.popen(perl + " -MConfig -e'print \"$Config{privlib}/ExtUtils/typemap\"'").read() - - if not getattr(Params.g_options, 'perlarchdir', None): - conf.env["ARCHDIR_PERL"] = os.popen(perl + " -MConfig -e'print $Config{sitearch}'").read() - else: - conf.env["ARCHDIR_PERL"] = getattr(Params.g_options, 'perlarchdir') - - conf.env['perlext_PATTERN'] = '%s.' + os.popen(perl + " -MConfig -e'print $Config{dlext}'").read() - - return True - -def detect(conf): - conf.hook(check_perl_version) - conf.hook(check_perl_ext_devel) - conf.hook(check_perl_module) - -def set_options(opt): - opt.add_option("--with-perl-binary", type="string", dest="perlbinary", help = 'Specify alternate perl binary', default=None) - opt.add_option("--with-perl-archdir", type="string", dest="perlarchdir", help = 'Specify directory where to install arch specific files', default=None) - - -taskgen(init_pyext) -before('apply_incpaths')(init_pyext) -feature('perlext')(init_pyext) -extension(EXT_XS)(xsubpp_file) diff -Nru a2jmidid-8~dfsg0/wafadmin/Tools/preproc.py a2jmidid-9/wafadmin/Tools/preproc.py --- a2jmidid-8~dfsg0/wafadmin/Tools/preproc.py 2008-07-21 22:39:47.000000000 +0000 +++ a2jmidid-9/wafadmin/Tools/preproc.py 1970-01-01 00:00:00.000000000 +0000 @@ -1,724 +0,0 @@ -#! /usr/bin/env python -# encoding: utf-8 - -#! /usr/bin/env python -# encoding: utf-8 -# Thomas Nagy, 2006-2008 (ita) - -#C/C++ preprocessor for finding dependencies -#TODO: more varargs, pragma once - -import re, sys, os, string, types -if __name__ == '__main__': - sys.path = ['.', '..'] + sys.path -import Params -from Params import debug, error, warning -import traceback - -class PreprocError(Exception): - pass - -go_absolute = 0 -"set to 1 to track headers on files in /usr/include - else absolute paths are ignored" - -standard_includes = ['/usr/include'] -if sys.platform == "win32": - standard_includes = [] - -g_findall = 1 -'search harder for project includes' - -use_trigraphs = 0 -'apply the trigraph rules first' - -strict_quotes = 0 -"Keep <> for system includes (do not search for those includes)" - -g_optrans = { -'not':'!', -'and':'&&', -'bitand':'&', -'and_eq':'&=', -'or':'||', -'bitor':'|', -'or_eq':'|=', -'xor':'^', -'xor_eq':'^=', -'compl':'~', -} -"these ops are for c++, to reset, set an empty dict" - -# ignore #warning and #error -re_lines = re.compile(\ - '^[ \t]*(#|%:)[ \t]*(ifdef|ifndef|if|else|elif|endif|include|import|define|undef|pragma)[ \t]*(.*)\r*$', - re.IGNORECASE | re.MULTILINE) -re_mac = re.compile("^[a-zA-Z_]\w*") -re_fun = re.compile('^[a-zA-Z_][a-zA-Z0-9_]*[(]') -re_pragma_once = re.compile('^\s*once\s*', re.IGNORECASE) -re_nl = re.compile('\\\\\r*\n', re.MULTILINE) -re_cpp = re.compile(\ - r"""(/\*[^*]*\*+([^/*][^*]*\*+)*/)|//[^\n]*|("(\\.|[^"\\])*"|'(\\.|[^'\\])*'|.[^/"'\\]*)""", - re.MULTILINE) -trig_def = [('??'+a, b) for a, b in zip("=-/!'()<>", r'#~\|^[]{}')] -chr_esc = {'0':0, 'a':7, 'b':8, 't':9, 'n':10, 'f':11, 'v':12, 'r':13, '\\':92, "'":39} - -NUM = 'i' -OP = 'O' -IDENT = 'T' -STR = 's' -CHAR = 'c' - -tok_types = [NUM, STR, IDENT, OP] -exp_types = [ - r"""0[xX](?P[a-fA-F0-9]+)(?P[uUlL]*)|L*?'(?P(\\.|[^\\'])+)'|(?P\d+)[Ee](?P[+-]*?\d+)(?P[fFlL]*)|(?P\d*\.\d+)([Ee](?P[+-]*?\d+))?(?P[fFlL]*)|(?P\d+\.\d*)([Ee](?P[+-]*?\d+))?(?P[fFlL]*)|(?P0*)(?P\d+)(?P[uUlL]*)""", - r'L?"([^"\\]|\\.)*"', - r'[a-zA-Z_]\w*', - r'%:%:|<<=|>>=|\.\.\.|<<|<%|<:|<=|>>|>=|\+\+|\+=|--|->|-=|\*=|/=|%:|%=|%>|==|&&|&=|\|\||\|=|\^=|:>|!=|##|[\(\)\{\}\[\]<>\?\|\^\*\+&=:!#;,%/\-\?\~\.]', -] -re_clexer = re.compile('|'.join(["(?P<%s>%s)" % (name, part) for name, part in zip(tok_types, exp_types)]), re.M) - -accepted = 'a' -ignored = 'i' -undefined = 'u' -skipped = 's' - -def repl(m): - s = m.group(1) - if s is not None: return ' ' - s = m.group(3) - if s is None: return '' - return s - -def filter_comments(filename): - # return a list of tuples : keyword, line - f = open(filename, "r") - code = f.read() - f.close() - if use_trigraphs: - for (a, b) in trig_def: code = code.split(a).join(b) - code = re_nl.sub('', code) - code = re_cpp.sub(repl, code) - return [(m.group(2), m.group(3)) for m in re.finditer(re_lines, code)] - -prec = {} -# op -> number, needed for such expressions: #if 1 && 2 != 0 -ops = ['* / %', '+ -', '<< >>', '< <= >= >', '== !=', '& | ^', '&& ||', ','] -for x in range(len(ops)): - syms = ops[x] - for u in syms.split(): - prec[u] = x - -def reduce_nums(val_1, val_2, val_op): - #print val_1, val_2, val_op - # pass two values, return a value - - # now perform the operation, make certain a and b are numeric - try: a = 0 + val_1 - except TypeError: a = int(val_1) - try: b = 0 + val_2 - except TypeError: b = int(val_2) - - d = val_op - if d == '%': c = a%b - elif d=='+': c = a+b - elif d=='-': c = a-b - elif d=='*': c = a*b - elif d=='/': c = a/b - elif d=='^': c = a^b - elif d=='|': c = a|b - elif d=='||': c = int(a or b) - elif d=='&': c = a&b - elif d=='&&': c = int(a and b) - elif d=='==': c = int(a == b) - elif d=='!=': c = int(a != b) - elif d=='<=': c = int(a <= b) - elif d=='<': c = int(a < b) - elif d=='>': c = int(a > b) - elif d=='>=': c = int(a >= b) - elif d=='^': c = int(a^b) - elif d=='<<': c = a<>': c = a>>b - else: c = 0 - return c - -def get_expr(lst, defs, ban): - - if not lst: return ([], [], []) - - (p, v) = lst[0] - if p == NUM: - return (p, v, lst[1:]) - - elif p == STR: - try: - (p2, v2) = lst[1] - if p2 == STR: return (p, v+v2, lst[2:]) - except IndexError: pass - - return (p, v, lst[1:]) - - elif p == OP: - if v in ['+', '-', '!', '~', '#']: - (p2, v2, lst2) = get_expr(lst[1:], defs, ban) - - if v == '#': - if p2 != IDENT: raise PreprocError, "ident expected %s" % str(lst) - return get_expr([(STR, v2)]+lst2, defs, ban) - - if p2 != NUM: raise PreprocError, "num expected %s" % str(lst) - - if v == '+': return (p2, v2, lst2) - elif v == '-': return (p2, - int(v2), lst2) - elif v == '!': return (p2, int(not int(v2)), lst2) - elif v == '~': return (p2, ~ int(v2), lst2) - - return (p2, v2, lst2) - - elif v == '(': - count_par = 0 - i = 0 - for _, v in lst: - if v == ')': - count_par -= 1 - if count_par == 0: break - elif v == '(': count_par += 1 - i += 1 - else: - raise PreprocError, "rparen expected %s" % str(lst) - - ret = process_tokens(lst[1:i], defs, ban) - if len(ret) == 1: - (p, v) = ret[0] - return (p, v, lst[i+1:]) - else: - #return (None, lst1, lst[i+1:]) - raise PreprocError, "cannot reduce %s" % str(lst) - - elif p == IDENT: - if len(lst)>1: - (p2, v2) = lst[1] - if v2 == "##": - # token pasting, reevaluate the identifier obtained - (p3, v3) = lst[2] - if p3 != IDENT and p3 != NUM and p3 != OP: - raise PreprocError, "%s: ident expected after '##'" % str(lst) - return get_expr([(p, v+v3)]+lst[3:], defs, ban) - - if v.lower() == 'defined': - (p2, v2) = lst[1] - off = 2 - if v2 == '(': - (p3, v3) = lst[2] - if p3 != IDENT: raise PreprocError, 'expected an identifier after a "defined("' - (p2, v2) = lst[3] - if v2 != ')': raise PreprocError, 'expected a ")" after a "defined(x"' - off = 4 - elif p2 != IDENT: - raise PreprocError, 'expected a "(" or an identifier after a defined' - - x = 0 - if v2 in defs: x = 1 - #return get_expr([(NUM, x)] + lst[off:], defs, ban) - return (NUM, x, lst[off:]) - - elif not v in defs or v in ban: - if "waf_include" in ban: return (p, v, lst[1:]) - else: return (NUM, 0, lst[1:]) - - # tokenize on demand - if type(defs[v]) is types.StringType: - v, k = extract_macro(defs[v]) - defs[v] = k - macro_def = defs[v] - - if not macro_def[0]: - # simple macro, substitute, and reevaluate - lst = macro_def[1] + lst[1:] - return get_expr(lst, defs, ban) - else: - # collect the arguments for the funcall - params = [] - i = 1 - p2, v2 = lst[i] - if p2 != OP or v2 != '(': raise PreprocError, "invalid function call '%s'" % v - - one_param = [] - count_paren = 0 - try: - while 1: - i += 1 - p2, v2 = lst[i] - - if p2 == OP and count_paren == 0: - if v2 == '(': - one_param.append((p2, v2)) - count_paren += 1 - elif v2 == ')': - if one_param: params.append(one_param) - lst = lst[i+1:] - break - elif v2 == ',': - if not one_param: raise PreprocError, "empty param in funcall %s" % p - params.append(one_param) - one_param = [] - else: - one_param.append((p2, v2)) - else: - one_param.append((p2, v2)) - if v2 == '(': count_paren += 1 - elif v2 == ')': count_paren -= 1 - - except IndexError, e: - #raise PreprocError, 'invalid function call %s: missing ")"' % p - raise - - # substitute the arguments within the define expression - accu = [] - table = macro_def[0] - for p2, v2 in macro_def[1]: - if p2 == IDENT and v2 in table: accu += params[table[v2]] - else: - if v2 == '__VA_ARGS__': - # first collect the tokens - va_toks = [] - st = len(macro_def[0]) - pt = len(params) - for x in params[pt-st+1:]: - va_toks.extend(x) - va_toks.append((OP, ',')) - if va_toks: va_toks.pop() # extra comma - if len(accu)>1: - (p3, v3) = accu[-1] - (p4, v4) = accu[-2] - if v3 == '##': - # remove the token paste - accu.pop() - if v4 == ',' and pt < st: - # remove the comma - accu.pop() - accu += va_toks - else: - accu.append((p2, v2)) - - return get_expr(accu + lst, defs, ban+[v]) - -def process_tokens(lst, defs, ban): - accu = [] - while lst: - p, v, nlst = get_expr(lst, defs, ban) - if p == NUM: - if not nlst: return [(p, v)] # finished - - op1, ov1 = nlst[0] - if op1 != OP: - raise PreprocError, "op expected %s" % str(lst) - - if ov1 == '?': - i = 0 - count_par = 0 - for _, k in nlst: - if k == ')': count_par -= 1 - elif k == '(': count_par += 1 - elif k == ':' and count_par == 0: break - i += 1 - else: raise PreprocError, "ending ':' expected %s" % str(lst) - - if reduce_nums(v, 0, '+'): lst = nlst[1:i] - else: lst = nlst[i+1:] - continue - - elif ov1 == ',': - lst = nlst[1:] - continue - - p2, v2, nlst = get_expr(nlst[1:], defs, ban) - if p2 != NUM: raise PreprocError, "num expected after op %s" % str(lst) - if nlst: - # op precedence - op3, ov3 = nlst[0] - if prec[ov3] < prec[ov1]: - #print "ov3", ov3, ov1 - # as needed - p4, v4, nlst2 = get_expr(nlst[1:], defs, ban) - v5 = reduce_nums(v2, v4, ov3) - lst = [(p, v), (op1, ov1), (NUM, v5)] + nlst2 - continue - - # no op precedence or empty list, reduce the first tokens - lst = [(NUM, reduce_nums(v, v2, ov1))] + nlst - continue - - elif p == STR: - if nlst: raise PreprocError, "sequence must terminate with a string %s" % str(nlst) - return [(p, v)] - - return (None, None, []) - -def eval_macro(lst, adefs): - # look at the result, and try to return a 0/1 result - ret = process_tokens(lst, adefs, []) - if not ret: raise PreprocError, "missing tokens to evaluate %s" % str(lst) - p, v = ret[0] - return int(v) != 0 - -class c_parser(object): - def __init__(self, nodepaths=None, strpaths=None, defines=None): - #self.lines = txt.split('\n') - self.lines = [] - - if defines is None: - self.defs = {} - else: - self.defs = dict(defines) # make a copy - self.state = [] - - self.env = None # needed for the variant when searching for files - - # include paths - if strpaths is None: - self.strpaths = [] - else: - self.strpaths = strpaths - self.pathcontents = {} - - self.count_files = 0 - self.deps = [] - self.deps_paths = [] - - self.m_nodepaths = nodepaths or [] - #self.m_nodepaths.append(Params.g_build.m_root.find_dir('/usr/include')) - - self.m_nodes = [] - self.m_names = [] - - # file added - self.curfile = '' - self.ban_includes = [] - - # dynamic cache - try: - self.parse_cache = Params.g_build.parse_cache - except AttributeError: - Params.g_build.parse_cache = {} - self.parse_cache = Params.g_build.parse_cache - - def tryfind(self, filename): - self.curfile = filename - global g_findall - if self.m_nodepaths: - found = 0 - for n in self.m_nodepaths: - found = n.find_resource(filename) - if found: - break - # second pass for unreachable folders - if not found and g_findall: - lst = filename.split('/') - if len(lst)>1: - lst=lst[:-1] # take the folders only - try: cache = Params.g_build.preproc_cache - except AttributeError: Params.g_build.preproc_cache = cache = {} - key = hash( (str(self.m_nodepaths), str(lst)) ) - if not cache.get(key, None): - cache[key] = 1 - for n in self.m_nodepaths: - node = n.find_resource(filename) - if node: - found = node - break - if found: - self.m_nodes.append(found) - # Qt - if filename[-4:] != '.moc': self.addlines(found.abspath(self.env)) - if not found: - if not filename in self.m_names: - self.m_names.append(filename) - else: - found = 0 - for p in self.strpaths: - if not p in self.pathcontents.keys(): - self.pathcontents[p] = os.listdir(p) - if filename in self.pathcontents[p]: - #print "file %s found in path %s" % (filename, p) - np = os.path.join(p, filename) - # screw Qt two times - if filename[-4:] != '.moc': self.addlines(np) - self.deps_paths.append(np) - found = 1 - if not found: - pass - #error("could not find %s " % filename) - - def addlines(self, filepath): - self.count_files += 1 - if self.count_files > 30000: raise PreprocError, "recursion limit exceeded, bailing out" - pc = self.parse_cache - debug("reading file %r" % filepath, 'preproc') - - try: - lns = pc[filepath] - except KeyError: - pass - else: - self.lines = lns + self.lines - return - - try: - lines = filter_comments(filepath) - pc[filepath] = lines # memorize the lines filtered - self.lines = lines + self.lines - except IOError: - raise PreprocError, "could not read the file %s" % filepath - except Exception: - if Params.g_verbose > 0: - warning("parsing %s failed" % filepath) - traceback.print_exc() - - def start(self, node, env): - debug("scanning %s (in %s)" % (node.m_name, node.m_parent.m_name), 'preproc') - - self.env = env - variant = node.variant(env) - - self.addlines(node.abspath(env)) - if env['DEFLINES']: - self.lines = [('define', x) for x in env['DEFLINES']] + self.lines - - while self.lines: - (type, line) = self.lines.pop(0) - try: - self.process_line(type, line) - except Exception, ex: - if Params.g_verbose: - warning("line parsing failed (%s): %s" % (str(ex), line)) - traceback.print_exc() - - # debug only - def start_local(self, filename): - self.addlines(filename) - #print self.lines - while self.lines: - (type, line) = self.lines.pop(0) - try: - self.process_line(type, line) - except Exception, ex: - if Params.g_verbose: - warning("line parsing failed (%s): %s" % (str(ex), line)) - traceback.print_exc() - raise - - def process_line(self, token, line): - ve = Params.g_verbose - if ve: debug("line is %s - %s state is %s" % (token, line, self.state), 'preproc') - state = self.state - - # make certain we define the state if we are about to enter in an if block - if token in ['ifdef', 'ifndef', 'if']: - state.append(undefined) - elif token == 'endif': - state.pop() - - # skip lines when in a dead 'if' branch, wait for the endif - if not token in ['else', 'elif', 'endif']: - if skipped in self.state or ignored in self.state: - return - - if token == 'if': - ret = eval_macro(tokenize(line), self.defs) - if ret: state[-1] = accepted - else: state[-1] = ignored - elif token == 'ifdef': - m = re_mac.search(line) - if m and m.group(0) in self.defs: state[-1] = accepted - else: state[-1] = ignored - elif token == 'ifndef': - m = re_mac.search(line) - if m and m.group(0) in self.defs: state[-1] = ignored - else: state[-1] = accepted - elif token == 'include' or token == 'import': - (type, inc) = extract_include(line, self.defs) - if inc in self.ban_includes: return - if token == 'import': self.ban_includes.append(inc) - if ve: debug("include found %s (%s) " % (inc, type), 'preproc') - if type == '"' or not strict_quotes: - if not inc in self.deps: - self.deps.append(inc) - self.tryfind(inc) - elif token == 'elif': - if state[-1] == accepted: - state[-1] = skipped - elif state[-1] == ignored: - if eval_macro(tokenize(line), self.defs): - state[-1] = accepted - elif token == 'else': - if state[-1] == accepted: state[-1] = skipped - elif state[-1] == ignored: state[-1] = accepted - elif token == 'define': - m = re_mac.search(line) - if m: - name = m.group(0) - if ve: debug("define %s %s" % (name, line), 'preproc') - self.defs[name] = line - else: - raise PreprocError, "invalid define line %s" % line - elif token == 'undef': - m = re_mac.search(line) - if m and m.group(0) in self.defs: - self.defs.__delitem__(m.group(0)) - #print "undef %s" % name - elif token == 'pragma': - if re_pragma_once.search(line.lower()): - self.ban_includes.append(self.curfile) - -def extract_macro(txt): - t = tokenize(txt) - if re_fun.search(txt): - p, name = t[0] - - p, v = t[1] - if p != OP: raise PreprocError, "expected open parenthesis" - - i = 1 - pindex = 0 - params = {} - wantident = 1 - - while 1: - i += 1 - p, v = t[i] - - if wantident: - if p == IDENT: - params[v] = pindex - pindex += 1 - elif v == '...': - pass - else: - raise PreprocError, "expected ident" - else: - if v == ',': - pass - elif v == ')': - break - elif v == '...': - raise PreprocError, "not implemented" - wantident = not wantident - - return (name, [params, t[i+1:]]) - else: - (p, v) = t[0] - return (v, [[], t[1:]]) - -re_include = re.compile('^\s*(<(?P.*)>|"(?P.*)")') -def extract_include(txt, defs): - m = re_include.search(txt) - if m: - if m.group('a'): return '<', m.group('a') - if m.group('b'): return '"', m.group('b') - - # perform preprocessing and look at the result, it must match an include - tokens = process_tokens(tokens, defs, ['waf_include']) - p, v = tokens[0] - if p != STR: raise PreprocError, "could not parse include %s" % txt - return ('"', v) - -def parse_char(txt): - if not txt: raise PreprocError, "attempted to parse a null char" - if txt[0] != '\\': - return ord(txt) - c = txt[1] - if c == 'x': - if len(txt) == 4 and txt[3] in string.hexdigits: return int(txt[2:], 16) - return int(txt[2:], 16) - elif c.isdigit(): - if c == '0' and len(txt)==2: return 0 - for i in 3, 2, 1: - if len(txt) > i and txt[1:1+i].isdigit(): - return (1+i, int(txt[1:1+i], 8)) - else: - try: return chr_esc[c] - except KeyError: raise PreprocError, "could not parse char literal '%s'" % txt - -def tokenize(s): - ret = [] - for match in re_clexer.finditer(s): - m = match.group - for name in tok_types: - v = m(name) - if v: - if name == IDENT: - try: v = g_optrans[v]; name = OP - except KeyError: - # c++ specific - if v.lower() == "true": - v = 1 - name = NUM - elif v.lower() == "false": - v = 0 - name = NUM - elif name == NUM: - if m('oct'): v = int(v, 8) - elif m('hex'): v = int(m('hex'), 16) - elif m('n0'): v = m('n0') - else: - v = m('char') - if v: v = parse_char(v) - else: v = m('n2') or m('n4') - elif name == OP: - if v == '%:': v='#' - elif v == '%:%:': v='##' - - ret.append((name, v)) - break - return ret - -# quick test # -if __name__ == "__main__": - Params.g_verbose = 2 - Params.g_zones = ['preproc'] - class dum: - def __init__(self): - self.parse_cache = {} - Params.g_build = dum() - - try: arg = sys.argv[1] - except IndexError: arg = "file.c" - - paths = ['.'] - f = open(arg, "r"); txt = f.read(); f.close() - - m1 = [[], [(NUM, 1), (OP, '+'), (NUM, 2)]] - fun1 = [[(IDENT, 'x'), (IDENT, 'y')], [(IDENT, 'x'), (OP, '##'), (IDENT, 'y')]] - fun2 = [[(IDENT, 'x'), (IDENT, 'y')], [(IDENT, 'x'), (OP, '*'), (IDENT, 'y')]] - - def test(x): - y = process_tokens(tokenize(x), {'m1':m1, 'fun1':fun1, 'fun2':fun2}, []) - #print x, y - - test("0&&2<3") - test("(5>1)*6") - test("1+2+((3+4)+5)+6==(6*7)/2==1*-1*-1") - test("1,2,3*9,9") - test("1?77:88") - test("0?77:88") - test("1?1,(0?5:9):3,4") - test("defined inex") - test("defined(inex)") - test("m1*3") - test("7*m1*3") - test("fun1(m,1)") - test("fun2(2, fun1(m, 1))") - #test("foo##.##h") - - gruik = c_parser(strpaths = paths) - gruik.start_local(arg) - print "we have found the following dependencies" - print gruik.deps - print gruik.deps_paths - - #f = open(arg, "r") - #txt = f.read() - #f.close() - #print tokenize(txt) - - diff -Nru a2jmidid-8~dfsg0/wafadmin/Tools/python.py a2jmidid-9/wafadmin/Tools/python.py --- a2jmidid-8~dfsg0/wafadmin/Tools/python.py 2008-07-21 22:39:47.000000000 +0000 +++ a2jmidid-9/wafadmin/Tools/python.py 1970-01-01 00:00:00.000000000 +0000 @@ -1,387 +0,0 @@ -#! /usr/bin/env python -# encoding: utf-8 - -#! /usr/bin/env python -# encoding: utf-8 -# Thomas Nagy, 2007 (ita) -# Gustavo Carneiro (gjc), 2007 - -"Python support" - -import os, sys -import TaskGen, Utils, Params, Common, Utils, Runner -from TaskGen import extension, taskgen, before, after, feature -import pproc as subprocess - -EXT_PY = ['.py'] - -def init_pyext(self): - self.inst_var_default = 'PYTHONDIR' - self.inst_dir_default = '' - self.uselib = self.to_list(self.uselib) - if not 'PYEXT' in self.uselib: - self.uselib.append('PYEXT') - self.env['MACBUNDLE'] = True - -def pyext_shlib_ext(self): - # override shlib_PATTERN set by the osx module - self.env['shlib_PATTERN'] = self.env['pyext_PATTERN'] - - -def init_pyembed(self): - self.uselib = self.to_list(self.uselib) - if not 'PYEMBED' in self.uselib: - self.uselib.append('PYEMBED') - -def process_py(self, node): - pass - -# FIXME in theory, we should absolutely avoid subclasses like this -class py_taskgen(TaskGen.task_gen): - def __init__(self, env=None): - TaskGen.task_gen.__init__(self) - - self.inst_var_default = 'PYTHONDIR' - self.inst_dir_default = '' - self.chmod = 0644 - - def install(self): - files_to_install = [] - for filename in self.to_list(self.source): - node = self.path.find_source(filename) - if node is not None: - files_to_install.append(node.abspath()) - else: - node = self.path.find_build(filename) - if node is None: - Params.fatal("Cannot install file %s: not found in %s" - % (filename, self.path)) - else: - files_to_install.append(node.abspath(self.env)) - - installed_files = Common.install_files( - self.inst_var, self.inst_dir, files_to_install, - self.env, self.chmod) - - if not installed_files: - return - - if Params.g_commands['uninstall']: - print "* removing byte compiled python files" - for fname in installed_files: - try: - os.remove(fname + 'c') - except OSError: - pass - try: - os.remove(fname + 'o') - except OSError: - pass - else: - if self.env['PYC'] or self.env['PYO']: - print "* byte compiling python files" - - if self.env['PYC']: - program = (""" -import sys, py_compile -for pyfile in sys.argv[1:]: - py_compile.compile(pyfile, pyfile + 'c') -""") - argv = [self.env['PYTHON'], "-c", program ] - argv.extend(installed_files) - retval = subprocess.Popen(argv).wait() - if retval: - Params.fatal("bytecode compilation failed") - - - if self.env['PYO']: - program = (""" -import sys, py_compile -for pyfile in sys.argv[1:]: - py_compile.compile(pyfile, pyfile + 'o') -""") - argv = [self.env['PYTHON'], self.env['PYFLAGS_OPT'], "-c", program ] - argv.extend(installed_files) - retval = subprocess.Popen(argv).wait() - if retval: - Params.fatal("bytecode compilation failed") - - -def _get_python_variables(python_exe, variables, imports=['import sys']): - """Run a python interpreter and print some variables""" - program = list(imports) - program.append('') - for v in variables: - program.append("print repr(%s)" % v) - proc = subprocess.Popen([python_exe, "-c", '\n'.join(program)], - stdout=subprocess.PIPE) - output = proc.communicate()[0].split("\n") - if proc.returncode: - if Params.g_verbose: - Params.warning("Python program to extract python configuration variables failed:\n%s" - % '\n'.join(["line %03i: %s" % (lineno+1, line) for lineno, line in enumerate(program)])) - raise ValueError - return_values = [] - for s in output: - s = s.strip() - if not s: - continue - if s == 'None': - return_values.append(None) - elif s[0] == "'" and s[-1] == "'": - return_values.append(s[1:-1]) - elif s[0].isdigit(): - return_values.append(int(s)) - else: break - return return_values - -def check_python_headers(conf): - """Check for headers and libraries necessary to extend or embed python. - - If successful, xxx_PYEXT and xxx_PYEMBED variables are defined in the - environment (for uselib). PYEXT should be used for compiling - python extensions, while PYEMBED should be used by programs that - need to embed a python interpreter. - - Note: this test requires that check_python_version was previously - executed and successful.""" - - env = conf.env - python = env['PYTHON'] - assert python, ("python is %r !" % (python,)) - - ## On Mac OSX we need to use mac bundles for python plugins - import checks - if checks.detect_platform(None) == 'darwin': - conf.check_tool('osx') - - try: - # Get some python configuration variables using distutils - v = 'prefix SO SYSLIBS SHLIBS LIBDIR LIBPL INCLUDEPY Py_ENABLE_SHARED'.split() - (python_prefix, python_SO, python_SYSLIBS, python_SHLIBS, - python_LIBDIR, python_LIBPL, INCLUDEPY, Py_ENABLE_SHARED) = \ - _get_python_variables(python, ["get_config_var('%s')" % x for x in v], - ['from distutils.sysconfig import get_config_var']) - except ValueError: - conf.fatal("Python development headers not found (-v for details).") - - Runner.print_log("""Configuration returned from %r: -python_prefix = %r -python_SO = %r -python_SYSLIBS = %r -python_SHLIBS = %r -python_LIBDIR = %r -python_LIBPL = %r -INCLUDEPY = %r -Py_ENABLE_SHARED = %r -""" % (python, python_prefix, python_SO, python_SYSLIBS, python_SHLIBS, - python_LIBDIR, python_LIBPL, INCLUDEPY, Py_ENABLE_SHARED)) - - env['pyext_PATTERN'] = '%s'+python_SO - - # Check for python libraries for embedding - if python_SYSLIBS is not None: - for lib in python_SYSLIBS.split(): - if lib.startswith('-l'): - lib = lib[2:] # strip '-l' - env.append_value('LIB_PYEMBED', lib) - if python_SHLIBS is not None: - for lib in python_SHLIBS.split(): - if lib.startswith('-l'): - lib = lib[2:] # strip '-l' - env.append_value('LIB_PYEMBED', lib) - lib = conf.create_library_configurator() - lib.name = 'python' + env['PYTHON_VERSION'] - lib.uselib = 'PYEMBED' - lib.code = ''' -#ifdef __cplusplus -extern "C" { -#endif - void Py_Initialize(void); - void Py_Finalize(void); -#ifdef __cplusplus -} -#endif -int main(int argc, char *argv[]) { Py_Initialize(); Py_Finalize(); return 0; } -''' - if python_LIBDIR is not None: - lib.path = [python_LIBDIR] - result = lib.run() - else: - result = 0 - - ## try again with -L$python_LIBPL (some systems don't install the python library in $prefix/lib) - if not result: - if python_LIBPL is not None: - lib.path = [python_LIBPL] - result = lib.run() - else: - result = 0 - - ## try again with -L$prefix/libs, and pythonXY name rather than pythonX.Y (win32) - if not result: - lib.path = [os.path.join(python_prefix, "libs")] - lib.name = 'python' + env['PYTHON_VERSION'].replace('.', '') - result = lib.run() - - if result: - env['LIBPATH_PYEMBED'] = lib.path - env.append_value('LIB_PYEMBED', lib.name) - - ## under certain conditions, python extensions must link to - ## python libraries, not just python embedding programs. - if (sys.platform == 'win32' or sys.platform.startswith('os2') - or sys.platform == 'darwin' or Py_ENABLE_SHARED): - env['LIBPATH_PYEXT'] = env['LIBPATH_PYEMBED'] - env['LIB_PYEXT'] = env['LIB_PYEMBED'] - - # We check that pythonX.Y-config exists, and if it exists we - # use it to get only the includes, else fall back to distutils. - python_config = conf.find_program( - 'python%s-config' % ('.'.join(env['PYTHON_VERSION'].split('.')[:2])), - var='PYTHON_CONFIG') - if python_config: - includes = [] - for incstr in os.popen("%s %s --includes" % (python, python_config)).readline().strip().split(): - # strip the -I or /I - if (incstr.startswith('-I') - or incstr.startswith('/I')): - incstr = incstr[2:] - # append include path, unless already given - if incstr not in includes: - includes.append(incstr) - env['CPPPATH_PYEXT'] = list(includes) - env['CPPPATH_PYEMBED'] = list(includes) - else: - env['CPPPATH_PYEXT'] = [INCLUDEPY] - env['CPPPATH_PYEMBED'] = [INCLUDEPY] - - # Code using the Python API needs to be compiled with -fno-strict-aliasing - if env['CC']: - version = os.popen("%s --version" % env['CC']).readline() - if '(GCC)' in version: - env.append_value('CCFLAGS_PYEMBED', '-fno-strict-aliasing') - env.append_value('CCFLAGS_PYEXT', '-fno-strict-aliasing') - if env['CXX']: - version = os.popen("%s --version" % env['CXX']).readline() - if '(GCC)' in version: - env.append_value('CXXFLAGS_PYEMBED', '-fno-strict-aliasing') - env.append_value('CXXFLAGS_PYEXT', '-fno-strict-aliasing') - - # Test to see if it compiles - header = conf.create_header_configurator() - header.name = 'Python.h' - header.define = 'HAVE_PYTHON_H' - header.uselib = 'PYEXT' - header.code = "#include \nint main(int argc, char *argv[]) { Py_Initialize(); Py_Finalize(); return 0; }" - result = header.run() - if not result: - conf.fatal("Python development headers not found.") - -def check_python_version(conf, minver=None): - """ - Check if the python interpreter is found matching a given minimum version. - minver should be a tuple, eg. to check for python >= 2.4.2 pass (2,4,2) as minver. - - If successful, PYTHON_VERSION is defined as 'MAJOR.MINOR' - (eg. '2.4') of the actual python version found, and PYTHONDIR is - defined, pointing to the site-packages directory appropriate for - this python version, where modules/packages/extensions should be - installed. - """ - assert minver is None or isinstance(minver, tuple) - python = conf.env['PYTHON'] - assert python, ("python is %r !" % (python,)) - - # Get python version string - cmd = [python, "-c", "import sys\nfor x in sys.version_info: print str(x)"] - Params.debug("Running python command %r" % cmd, 'python') - proc = subprocess.Popen(cmd, stdout=subprocess.PIPE) - lines = proc.communicate()[0].split() - assert len(lines) == 5, "found %i lines, expected 5: %r" % (len(lines), lines) - pyver_tuple = (int(lines[0]), int(lines[1]), int(lines[2]), lines[3], int(lines[4])) - - # compare python version with the minimum required - result = (minver is None) or (pyver_tuple >= minver) - - if result: - # define useful environment variables - pyver = '.'.join([str(x) for x in pyver_tuple[:2]]) - conf.env['PYTHON_VERSION'] = pyver - - if 'PYTHONDIR' in os.environ: - pydir = os.environ['PYTHONDIR'] - else: - if sys.platform == 'win32': - (python_LIBDEST,) = \ - _get_python_variables(python, ["get_config_var('LIBDEST')"], - ['from distutils.sysconfig import get_config_var']) - else: - python_LIBDEST = None - if python_LIBDEST is None: - if conf.env['LIBDIR']: - python_LIBDEST = os.path.join(conf.env['LIBDIR'], "python" + pyver) - else: - python_LIBDEST = os.path.join(conf.env['PREFIX'], "lib", "python" + pyver) - pydir = os.path.join(python_LIBDEST, "site-packages") - - if hasattr(conf, 'define'): # conf.define is added by the C tool, so may not exist - conf.define('PYTHONDIR', pydir) - conf.env['PYTHONDIR'] = pydir - - # Feedback - pyver_full = '.'.join(map(str, pyver_tuple[:3])) - if minver is None: - conf.check_message_custom('Python version', '', pyver_full) - else: - minver_str = '.'.join(map(str, minver)) - conf.check_message('Python version', ">= %s" % (minver_str,), result, option=pyver_full) - - if not result: - conf.fatal("Python too old.") - -def check_python_module(conf, module_name): - """ - Check if the selected python interpreter can import the given python module. - """ - result = not subprocess.Popen([conf.env['PYTHON'], "-c", "import %s" % module_name], - stderr=subprocess.PIPE, stdout=subprocess.PIPE).wait() - conf.check_message('Python module', module_name, result) - if not result: - conf.fatal("Python module not found.") - -def detect(conf): - python = conf.find_program('python', var='PYTHON') - if not python: return - - v = conf.env - - v['PYCMD'] = '"import sys, py_compile;py_compile.compile(sys.argv[1], sys.argv[2])"' - v['PYFLAGS'] = '' - v['PYFLAGS_OPT'] = '-O' - - v['PYC'] = getattr(Params.g_options, 'pyc', 1) - v['PYO'] = getattr(Params.g_options, 'pyo', 1) - - conf.hook(check_python_version) - conf.hook(check_python_headers) - conf.hook(check_python_module) - -def set_options(opt): - opt.add_option('--nopyc', action = 'store_false', default = 1, help = 'no pyc files (configuration)', dest = 'pyc') - opt.add_option('--nopyo', action = 'store_false', default = 1, help = 'no pyo files (configuration)', dest = 'pyo') - - -taskgen(init_pyext) -before('apply_incpaths')(init_pyext) -feature('pyext')(init_pyext) -before('apply_bundle')(init_pyext) -taskgen(pyext_shlib_ext) -before('apply_link')(pyext_shlib_ext) -before('apply_lib_vars')(pyext_shlib_ext) -after('apply_bundle')(pyext_shlib_ext) -feature('pyext')(pyext_shlib_ext) -taskgen(init_pyembed) -before('apply_incpaths')(init_pyembed) -feature('pyembed')(init_pyembed) -extension(EXT_PY)(process_py) diff -Nru a2jmidid-8~dfsg0/wafadmin/Tools/qt4.py a2jmidid-9/wafadmin/Tools/qt4.py --- a2jmidid-8~dfsg0/wafadmin/Tools/qt4.py 2008-07-21 22:39:47.000000000 +0000 +++ a2jmidid-9/wafadmin/Tools/qt4.py 1970-01-01 00:00:00.000000000 +0000 @@ -1,529 +0,0 @@ -#! /usr/bin/env python -# encoding: utf-8 - -#! /usr/bin/env python -# encoding: utf-8 -# Thomas Nagy, 2006 (ita) - -""" -Qt4 support - -If QT4_ROOT is given (absolute path), the configuration will look in it first - -This module also demonstrates how to add tasks dynamically (when the build has started) -""" - -try: - from xml.sax import make_parser - from xml.sax.handler import ContentHandler -except ImportError: - has_xml = False - ContentHandler = object -else: - has_xml = True - -import os, sys -import ccroot, cxx -import Params, TaskGen, Task, Utils, Runner, Scan -from TaskGen import taskgen, feature, after, extension -from Params import error, fatal - -MOC_H = ['.h', '.hpp', '.hxx', '.hh'] -EXT_RCC = ['.qrc'] -EXT_UI = ['.ui'] -EXT_QT4 = ['.cpp', '.cc', '.cxx', '.C'] - -class MTask(Task.Task): - "A cpp task that may create a moc task dynamically" - def __init__(self, action_name, env, parent, priority=10): - Task.Task.__init__(self, action_name, env, priority) - self.moc_done = 0 - self.parent = parent - - def may_start(self): - - if self.moc_done: - # if there is a moc task, delay the computation of the file signature - for t in self.get_run_after(): - if not t.m_hasrun: - return 0 - # the moc file enters in the dependency calculation - # so we need to recompute the signature when the moc file is present - self.signature() - return Task.Task.may_start(self) - else: - self.add_moc_tasks() - return 0 - - def add_moc_tasks(self): - - tree = Params.g_build - parn = self.parent - node = self.m_inputs[0] - - # to know if there is a moc file to create - try: - self.signature() - except Scan.ScannerError: - pass - else: - # remove the signature, it must be recomputed - delattr(self, 'sign_all') - - moctasks=[] - mocfiles=[] - variant = node.variant(parn.env) - try: - tmp_lst = tree.raw_deps[variant][node.id] - tree.raw_deps[variant][node.id] = [] - except KeyError: - tmp_lst = [] - for d in tmp_lst: - if not d.endswith('.moc'): continue - # paranoid check - if d in mocfiles: - error("paranoia owns") - continue - # process that base.moc only once - mocfiles.append(d) - - # find the extension - this search is done only once - ext = '' - try: ext = Params.g_options.qt_header_ext - except AttributeError: pass - - if not ext: - base2 = d[:-4] - path = node.m_parent.srcpath(parn.env) - for i in MOC_H: - try: - # TODO we could use find_resource - os.stat(os.path.join(path, base2+i)) - except OSError: - pass - else: - ext = i - break - if not ext: fatal("no header found for %s which is a moc file" % str(d)) - - # next time we will not search for the extension (look at the 'for' loop below) - h_node = node.m_parent.find_resource(base2+i) - m_node = h_node.change_ext('.moc') - tree.node_deps[variant][m_node.id] = (h_node,) - - # create the task - task = Task.g_task_types['moc']('moc', parn.env, normal=0) - task.set_inputs(h_node) - task.set_outputs(m_node) - - generator = Params.g_build.generator - generator.outstanding.insert(0, task) - generator.total += 1 - - moctasks.append(task) - - # remove raw deps except the moc files to save space (optimization) - tmp_lst = tree.raw_deps[variant][node.id] = mocfiles - - # look at the file inputs, it is set right above - lst = tree.node_deps[variant].get(node.id, ()) - for d in lst: - name = d.m_name - if name.endswith('.moc'): - task = Task.g_task_types['moc']('moc', parn.env, normal=0) - task.set_inputs(tree.node_deps[variant][d.id][0]) # 1st element in a tuple - task.set_outputs(d) - - generator = Params.g_build.generator - generator.outstanding.insert(0, task) - generator.total += 1 - - moctasks.append(task) - - # simple scheduler dependency: run the moc task before others - self.m_run_after = moctasks - self.moc_done = 1 - - run = Task.g_task_types['cxx'].__dict__["run"] - -def translation_update(task): - outs=[a.abspath(task.env) for a in task.m_outputs] - outs=" ".join(outs) - lupdate = task.env['QT_LUPDATE'] - - for x in task.m_inputs: - file = x.abspath(task.env) - cmd = "%s %s -ts %s" % (lupdate, file, outs) - Params.pprint('BLUE', cmd) - Runner.exec_command(cmd) - -class XMLHandler(ContentHandler): - def __init__(self): - self.buf = [] - self.files = [] - def startElement(self, name, attrs): - if name == 'file': - self.buf = [] - def endElement(self, name): - if name == 'file': - self.files.append(''.join(self.buf)) - def characters(self, cars): - self.buf.append(cars) - -class rcc_scanner(Scan.scanner): - "scanner for d files" - def __init__(self): - Scan.scanner.__init__(self) - - def scan(self, task, node): - "add the dependency on the files referenced in the qrc" - parser = make_parser() - curHandler = XMLHandler() - parser.setContentHandler(curHandler) - fi = open(task.m_inputs[0].abspath(task.env())) - parser.parse(fi) - fi.close() - - nodes = [] - names = [] - root = task.m_inputs[0].m_parent - for x in curHandler.files: - x = x.encode('utf8') - nd = root.find_resource(x) - if nd: nodes.append(nd) - else: names.append(x) - - return (nodes, names) - -if has_xml: - g_rcc_scanner = rcc_scanner() - -def create_rcc_task(self, node): - "hook for rcc files" - - rcnode = node.change_ext('_rc.cpp') - - rcctask = self.create_task('rcc', self.env) - rcctask.m_inputs = [node] - rcctask.m_outputs = [rcnode] - - if has_xml: - rcctask.m_scanner = g_rcc_scanner - - cpptask = self.create_task('cxx', self.env) - cpptask.m_inputs = [rcnode] - cpptask.m_outputs = [rcnode.change_ext('.o')] - - self.compiled_tasks.append(cpptask) - - return cpptask - -def create_uic_task(self, node): - "hook for uic tasks" - uictask = self.create_task('ui4', self.env) - uictask.m_inputs = [node] - uictask.m_outputs = [node.change_ext('.h')] - -class qt4_taskgen(cxx.cpp_taskgen): - def __init__(self, *kw): - cxx.cpp_taskgen.__init__(self, *kw) - self.link_task = None - self.lang = '' - self.langname = '' - self.update = 0 - self.features.append('qt4') - -def apply_qt4(self): - if self.lang: - lst=[] - trans=[] - for l in self.to_list(self.lang): - t = Task.g_task_types['ts2qm']('ts2qm', self.env, 4) - t.set_inputs(self.path.find_resource(l+'.ts')) - t.set_outputs(t.m_inputs[0].change_ext('.qm')) - lst.append(t.m_outputs[0]) - - if self.update: - trans.append(t.m_inputs[0]) - - if self.update and Params.g_options.trans_qt4: - # we need the cpp files given, except the rcc task we create after - u = Task.TaskCmd(translation_update, self.env, 2) - u.m_inputs = [a.m_inputs[0] for a in self.compiled_tasks] - u.m_outputs = trans - - if self.langname: - t = Task.g_task_types['qm2rcc']('qm2rcc', self.env, 50) - t.set_inputs(lst) - t.set_outputs(self.path.find_build(self.langname+'.qrc')) - t.path = self.path - k = create_rcc_task(self, t.m_outputs[0]) - self.link_task.m_inputs.append(k.m_outputs[0]) - - lst = [] - for flag in self.to_list(self.env['CXXFLAGS']): - if len(flag) < 2: continue - if flag[0:2] == '-D' or flag[0:2] == '-I': - lst.append(flag) - self.env['MOC_FLAGS'] = lst - -def find_sources_in_dirs(self, dirnames, excludes=[], exts=[]): - "the .ts files are added to self.lang" - lst=[] - excludes = self.to_list(excludes) - #make sure dirnames is a list helps with dirnames with spaces - dirnames = self.to_list(dirnames) - - ext_lst = exts or self.mappings.keys() + TaskGen.task_gen.mappings.keys() - - for name in dirnames: - anode = self.path.find_dir(name) - Params.g_build.rescan(anode) - - for name in Params.g_build.cache_dir_contents[anode.id]: - (base, ext) = os.path.splitext(name) - if ext in ext_lst: - if not name in lst: - if name in excludes: continue - lst.append((anode.relpath(self.path) or '.') + '/' + name) - elif ext == '.ts': - self.lang += ' '+base - - lst.sort() - self.source = self.source+' '+(" ".join(lst)) -setattr(qt4_taskgen, 'find_sources_in_dirs', find_sources_in_dirs) - -def cxx_hook(self, node): - # create the compilation task: cpp or cc - task = MTask('cxx', self.env, self) - self.m_tasks.append(task) - try: obj_ext = self.obj_ext - except AttributeError: obj_ext = '_%d.o' % self.idx - - task.m_scanner = ccroot.g_c_scanner - task.defines = self.scanner_defines - - task.m_inputs = [node] - task.m_outputs = [node.change_ext(obj_ext)] - self.compiled_tasks.append(task) - -def process_qm2rcc(task): - outfile = task.m_outputs[0].abspath(task.env()) - f = open(outfile, 'w') - f.write('\n\n') - for k in task.m_inputs: - f.write(' ') - #f.write(k.m_name) - f.write(k.relpath(task.path)) - f.write('\n') - f.write('\n') - f.close() - -b = Task.simple_task_type -b('moc', '${QT_MOC} ${MOC_FLAGS} ${SRC} ${MOC_ST} ${TGT}', color='BLUE', vars=['QT_MOC', 'MOC_FLAGS'], prio=100) -b('rcc', '${QT_RCC} -name ${SRC[0].m_name} ${SRC[0].abspath(env)} ${RCC_ST} -o ${TGT}', color='BLUE', prio=60) -b('ui4', '${QT_UIC} ${SRC} -o ${TGT}', color='BLUE', prio=60) -b('ts2qm', '${QT_LRELEASE} ${QT_LRELEASE_FLAGS} ${SRC} -qm ${TGT}', color='BLUE', prio=40) - -Task.task_type_from_func('qm2rcc', vars=[], func=process_qm2rcc, color='BLUE', prio=50) - -def detect_qt4(conf): - env = conf.env - opt = Params.g_options - - qtlibs = getattr(opt, 'qtlibs', '') - qtincludes = getattr(opt, 'qtincludes', '') - qtbin = getattr(opt, 'qtbin', '') - useframework = getattr(opt, 'use_qt4_osxframework', True) - qtdir = getattr(opt, 'qtdir', '') - - if not qtdir: qtdir = os.environ.get('QT4_ROOT', '') - - if not qtdir: - try: - lst = os.listdir('/usr/local/Trolltech/') - lst.sort() - lst.reverse() - qtdir = '/usr/local/Trolltech/%s/' % lst[0] - - except OSError: - pass - - if not qtdir: - try: - path = os.environ['PATH'].split(':') - for qmk in ['qmake-qt4', 'qmake4', 'qmake']: - qmake = conf.find_program(qmk, path) - if qmake: - version = os.popen(qmake+" -query QT_VERSION").read().strip().split('.') - if version[0] == "4": - qtincludes = os.popen(qmake+" -query QT_INSTALL_HEADERS").read().strip() - qtdir = os.popen(qmake + " -query QT_INSTALL_PREFIX").read().strip()+"/" - qtbin = os.popen(qmake + " -query QT_INSTALL_BINS").read().strip()+"/" - break - except OSError: - pass - - # check for the qt includes first - if not qtincludes: qtincludes = qtdir + 'include/' - env['QTINCLUDEPATH']=qtincludes - - lst = [qtincludes, '/usr/share/qt4/include/', '/opt/qt4/include'] - test = conf.create_header_enumerator() - test.name = 'QtGui/QFont' - test.path = lst - test.mandatory = 1 - ret = test.run() - - - # check for the qtbinaries - if not qtbin: qtbin = qtdir + 'bin/' - - binpath = [qtbin, '/usr/share/qt4/bin/'] + os.environ['PATH'].split(':') - def find_bin(lst, var): - for f in lst: - ret = conf.find_program(f, path_list=binpath) - if ret: - env[var]=ret - break - - find_bin(['uic-qt3', 'uic3'], 'QT_UIC3') - - find_bin(['uic-qt4', 'uic'], 'QT_UIC') - version = os.popen(env['QT_UIC'] + " -version 2>&1").read().strip() - version = version.replace('Qt User Interface Compiler ','') - version = version.replace('User Interface Compiler for Qt', '') - if version.find(" 3.") != -1: - conf.check_message('uic version', '(too old)', 0, option='(%s)'%version) - sys.exit(1) - conf.check_message('uic version', '', 1, option='(%s)'%version) - - find_bin(['moc-qt4', 'moc'], 'QT_MOC') - find_bin(['rcc'], 'QT_RCC') - find_bin(['lrelease-qt4', 'lrelease'], 'QT_LRELEASE') - find_bin(['lupdate-qt4', 'lupdate'], 'QT_LUPDATE') - - env['UIC3_ST']= '%s -o %s' - env['UIC_ST'] = '%s -o %s' - env['MOC_ST'] = '-o' - env['QT_LRELEASE_FLAGS'] = ['-silent'] - - # check for the qt libraries - if not qtlibs: qtlibs = qtdir + 'lib' - - vars = "Qt3Support QtCore QtGui QtNetwork QtOpenGL QtSql QtSvg QtTest QtXml QtWebKit".split() - - framework_ok = False - if sys.platform == "darwin" and useframework: - for i in vars: - e = conf.create_framework_configurator() - e.path = [qtlibs] - e.name = i - e.remove_dot_h = 1 - e.run() - - if not i == 'QtCore': - # strip -F flag so it don't get reduant - for r in env['CCFLAGS_' + i.upper()]: - if r.startswith('-F'): - env['CCFLAGS_' + i.upper()].remove(r) - break - - incflag = '-I%s' % os.path.join(qtincludes, i) - if not incflag in env["CCFLAGS_" + i.upper ()]: - env['CCFLAGS_' + i.upper ()] += [incflag] - if not incflag in env["CXXFLAGS_" + i.upper ()]: - env['CXXFLAGS_' + i.upper ()] += [incflag] - - # now we add some static depends. - if conf.is_defined("HAVE_QTOPENGL"): - if not '-framework OpenGL' in env["LINKFLAGS_QTOPENGL"]: - env["LINKFLAGS_QTOPENGL"] += ['-framework OpenGL'] - - if conf.is_defined("HAVE_QTGUI"): - if not '-framework AppKit' in env["LINKFLAGS_QTGUI"]: - env["LINKFLAGS_QTGUI"] += ['-framework AppKit'] - if not '-framework ApplicationServices' in env["LINKFLAGS_QTGUI"]: - env["LINKFLAGS_QTGUI"] += ['-framework ApplicationServices'] - - framework_ok = True - - if not framework_ok: # framework_ok is false either when the platform isn't OSX, Qt4 shall not be used as framework, or Qt4 could not be found as framework - vars_debug = [a+'_debug' for a in vars] - - for i in vars_debug+vars: - #conf.check_pkg(i, pkgpath=qtlibs) - pkgconf = conf.create_pkgconfig_configurator() - pkgconf.name = i - pkgconf.pkgpath = '%s:%s/pkgconfig:/usr/lib/qt4/lib/pkgconfig:/opt/qt4/lib/pkgconfig:/usr/lib/qt4/lib:/opt/qt4/lib' % (qtlibs, qtlibs) - pkgconf.run() - - - # the libpaths are set nicely, unfortunately they make really long command-lines - # remove the qtcore ones from qtgui, etc - def process_lib(vars_, coreval): - for d in vars_: - var = d.upper() - if var == 'QTCORE': continue - - value = env['LIBPATH_'+var] - if value: - core = env[coreval] - accu = [] - for lib in value: - if lib in core: continue - accu.append(lib) - env['LIBPATH_'+var] = accu - - process_lib(vars, 'LIBPATH_QTCORE') - process_lib(vars_debug, 'LIBPATH_QTCORE_DEBUG') - - # rpath if wanted - if Params.g_options.want_rpath: - def process_rpath(vars_, coreval): - for d in vars_: - var = d.upper() - value = env['LIBPATH_'+var] - if value: - core = env[coreval] - accu = [] - for lib in value: - if var != 'QTCORE': - if lib in core: - continue - accu.append('-Wl,--rpath='+lib) - env['RPATH_'+var] = accu - process_rpath(vars, 'LIBPATH_QTCORE') - process_rpath(vars_debug, 'LIBPATH_QTCORE_DEBUG') - - env['QTLOCALE'] = str(env['PREFIX'])+'/share/locale' - -def detect(conf): - if sys.platform=='win32': fatal('Qt4.py will not work on win32 for now - ask the author') - detect_qt4(conf) - -def set_options(opt): - try: opt.add_option('--want-rpath', type='int', default=1, dest='want_rpath', help='set rpath to 1 or 0 [Default 1]') - except Exception: pass - - opt.add_option('--header-ext', - type='string', - default='', - help='header extension for moc files', - dest='qt_header_ext') - - for i in "qtdir qtincludes qtlibs qtbin".split(): - opt.add_option('--'+i, type='string', default='', dest=i) - - if sys.platform == "darwin": - opt.add_option('--no-qt4-framework', action="store_false", help='do not use the framework version of Qt4 in OS X', dest='use_qt4_osxframework',default=True) - - opt.add_option('--translate', action="store_true", help="collect translation strings", dest="trans_qt4", default=False) - - -extension(EXT_RCC)(create_rcc_task) -extension(EXT_UI)(create_uic_task) -taskgen(apply_qt4) -feature('qt4')(apply_qt4) -after('apply_link')(apply_qt4) -extension(EXT_QT4)(cxx_hook) diff -Nru a2jmidid-8~dfsg0/wafadmin/Tools/suncc.py a2jmidid-9/wafadmin/Tools/suncc.py --- a2jmidid-8~dfsg0/wafadmin/Tools/suncc.py 2008-07-21 22:39:47.000000000 +0000 +++ a2jmidid-9/wafadmin/Tools/suncc.py 1970-01-01 00:00:00.000000000 +0000 @@ -1,112 +0,0 @@ -#! /usr/bin/env python -# encoding: utf-8 - -#! /usr/bin/env python -# encoding: utf-8 -# Thomas Nagy, 2006 (ita) -# Ralf Habacker, 2006 (rh) - -import os, optparse -import Utils, Params, Configure -import ccroot, ar -from Configure import conftest - -def find_scc(conf): - v = conf.env - cc = None - if v['CC']: cc = v['CC'] - elif 'CC' in os.environ: cc = os.environ['CC'] - #if not cc: cc = conf.find_program('gcc', var='CC') - if not cc: cc = conf.find_program('cc', var='CC') - if not cc: conf.fatal('suncc was not found') - v['CC'] = cc - - #TODO: Has anyone a better idea to check if this is a sun cc? - ret = os.popen("%s -flags" % cc).close() - if ret: - v['CC_NAME'] = 'sun' - conf.check_message('suncc', '', not ret) - return - -def scc_common_flags(conf): - v = conf.env - - # CPPFLAGS CCDEFINES _CCINCFLAGS _CCDEFFLAGS _LIBDIRFLAGS _LIBFLAGS - - v['CC_SRC_F'] = '' - v['CC_TGT_F'] = '-c -o ' - v['CPPPATH_ST'] = '-I%s' # template for adding include paths - - # linker - if not v['LINK_CC']: v['LINK_CC'] = v['CC'] - v['CCLNK_SRC_F'] = '' - v['CCLNK_TGT_F'] = '-o ' - - v['LIB_ST'] = '-l%s' # template for adding libs - v['LIBPATH_ST'] = '-L%s' # template for adding libpaths - v['STATICLIB_ST'] = '-l%s' - v['STATICLIBPATH_ST'] = '-L%s' - v['CCDEFINES_ST'] = '-D%s' - - - v['SHLIB_MARKER'] = '-Bdynamic' - v['STATICLIB_MARKER'] = '-Bstatic' - - # program - v['program_PATTERN'] = '%s' - - # shared library - v['shlib_CCFLAGS'] = ['-Kpic', '-DPIC'] - v['shlib_LINKFLAGS'] = ['-G'] - v['shlib_PATTERN'] = 'lib%s.so' - - # static lib - v['staticlib_LINKFLAGS'] = ['-Bstatic'] - v['staticlib_PATTERN'] = 'lib%s.a' - -def scc_modifier_debug(conf): - v = conf.env - - # compiler debug levels - v['CCFLAGS'] = ['-O'] - if conf.check_flags('-O2'): - v['CCFLAGS_OPTIMIZED'] = ['-O2'] - v['CCFLAGS_RELEASE'] = ['-O2'] - if conf.check_flags('-g -DDEBUG'): - v['CCFLAGS_DEBUG'] = ['-g', '-DDEBUG'] - if conf.check_flags('-g3 -O0 -DDEBUG'): - v['CCFLAGS_ULTRADEBUG'] = ['-g3', '-O0', '-DDEBUG'] - - # see the option below - try: - debug_level = Params.g_options.debug_level.upper() - except AttributeError: - debug_level = ccroot.DEBUG_LEVELS.CUSTOM - v.append_value('CCFLAGS', v['CCFLAGS_'+debug_level]) - -detect = ''' -find_scc -find_cpp -find_ar -scc_common_flags -cc_load_tools -cc_check_features -gcc_modifier_debug -cc_add_flags -''' - -def set_options(opt): - try: - opt.add_option('-d', '--debug-level', - action = 'store', - default = ccroot.DEBUG_LEVELS.RELEASE, - help = "Specify the debug level, does nothing if CFLAGS is set in the environment. [Allowed Values: '%s']" % "', '".join(ccroot.DEBUG_LEVELS.ALL), - choices = ccroot.DEBUG_LEVELS.ALL, - dest = 'debug_level') - except optparse.OptionConflictError: - pass - - -conftest(find_scc) -conftest(scc_common_flags) -conftest(scc_modifier_debug) diff -Nru a2jmidid-8~dfsg0/wafadmin/Tools/sunc++.py a2jmidid-9/wafadmin/Tools/sunc++.py --- a2jmidid-8~dfsg0/wafadmin/Tools/sunc++.py 2008-07-21 22:39:47.000000000 +0000 +++ a2jmidid-9/wafadmin/Tools/sunc++.py 1970-01-01 00:00:00.000000000 +0000 @@ -1,103 +0,0 @@ -#! /usr/bin/env python -# encoding: utf-8 - -#! /usr/bin/env python -# encoding: utf-8 -# Thomas Nagy, 2006 (ita) -# Ralf Habacker, 2006 (rh) - -import os, optparse -import Utils, Params, Configure -import ccroot, ar -from Configure import conftest - -def find_sxx(conf): - v = conf.env - cc = None - if v['CXX']: cc = v['CXX'] - elif 'CXX' in os.environ: cc = os.environ['CXX'] - #if not cc: cc = conf.find_program('g++', var='CXX') - if not cc: cc = conf.find_program('c++', var='CXX') - if not cc: conf.fatal('sunc++ was not found') - v['CXX'] = cc - v['CXX_NAME'] = 'sun' - -def sxx_common_flags(conf): - v = conf.env - - # CPPFLAGS CXXDEFINES _CXXINCFLAGS _CXXDEFFLAGS _LIBDIRFLAGS _LIBFLAGS - - v['CXX_SRC_F'] = '' - v['CXX_TGT_F'] = '-c -o ' - v['CPPPATH_ST'] = '-I%s' # template for adding include paths - - # linker - if not v['LINK_CXX']: v['LINK_CXX'] = v['CXX'] - v['CXXLNK_SRC_F'] = '' - v['CXXLNK_TGT_F'] = '-o ' - - v['LIB_ST'] = '-l%s' # template for adding libs - v['LIBPATH_ST'] = '-L%s' # template for adding libpaths - v['STATICLIB_ST'] = '-l%s' - v['STATICLIBPATH_ST'] = '-L%s' - v['CXXDEFINES_ST'] = '-D%s' - - v['SHLIB_MARKER'] = '-Bdynamic' - v['STATICLIB_MARKER'] = '-Bstatic' - - # program - v['program_PATTERN'] = '%s' - - # shared library - v['shlib_CXXFLAGS'] = ['-Kpic', '-DPIC'] - v['shlib_LINKFLAGS'] = ['-G'] - v['shlib_PATTERN'] = 'lib%s.so' - - # static lib - v['staticlib_LINKFLAGS'] = ['-Bstatic'] - v['staticlib_PATTERN'] = 'lib%s.a' - -def sxx_modifier_debug(conf, kind='cpp'): - v = conf.env - v['CXXFLAGS'] = [''] - if conf.check_flags('-O2', kind=kind): - v['CXXFLAGS_OPTIMIZED'] = ['-O2'] - v['CXXFLAGS_RELEASE'] = ['-O2'] - if conf.check_flags('-g -DDEBUG', kind=kind): - v['CXXFLAGS_DEBUG'] = ['-g', '-DDEBUG'] - if conf.check_flags('-g3 -O0 -DDEBUG', kind=kind): - v['CXXFLAGS_ULTRADEBUG'] = ['-g3', '-O0', '-DDEBUG'] - - try: - debug_level = Params.g_options.debug_level.upper() - except AttributeError: - debug_level = ccroot.DEBUG_LEVELS.CUSTOM - v.append_value('CXXFLAGS', v['CXXFLAGS_'+debug_level]) - -detect = ''' -find_sxx -find_cpp -find_ar -sxx_common_flags -cxx_load_tools -cxx_check_features -sxx_modifier_debug -cxx_add_flags -''' - -def set_options(opt): - try: - opt.add_option('-d', '--debug-level', - action = 'store', - default = ccroot.DEBUG_LEVELS.RELEASE, - help = "Specify the debug level, does nothing if CXXFLAGS is set in the environment. [Allowed Values: '%s']" % "', '".join(ccroot.DEBUG_LEVELS.ALL), - choices = ccroot.DEBUG_LEVELS.ALL, - dest = 'debug_level') - - except optparse.OptionConflictError: - pass - - -conftest(find_sxx) -conftest(sxx_common_flags) -conftest(sxx_modifier_debug) diff -Nru a2jmidid-8~dfsg0/wafadmin/Tools/tex.py a2jmidid-9/wafadmin/Tools/tex.py --- a2jmidid-8~dfsg0/wafadmin/Tools/tex.py 2008-07-21 22:39:47.000000000 +0000 +++ a2jmidid-9/wafadmin/Tools/tex.py 1970-01-01 00:00:00.000000000 +0000 @@ -1,257 +0,0 @@ -#! /usr/bin/env python -# encoding: utf-8 - -#! /usr/bin/env python -# encoding: utf-8 -# Thomas Nagy, 2006 (ita) - -"TeX/LaTeX/PDFLaTeX support" - -import os, re -import Utils, Params, TaskGen, Task, Runner, Scan -from Params import error, warning, debug, fatal - -re_tex = re.compile(r'\\(?Pinclude|import|bringin){(?P[^{}]*)}', re.M) -class tex_scanner(Scan.scanner): - def __init__(self): - Scan.scanner.__init__(self) - - def scan(self, task, node): - env = task.env() - - nodes = [] - names = [] - if not node: return (nodes, names) - - fi = open(node.abspath(env), 'r') - code = fi.read() - fi.close() - - curdirnode = task.curdirnode - abs = curdirnode.abspath() - for match in re_tex.finditer(code): - path = match.group('file') - if path: - for k in ['', '.tex', '.ltx']: - # add another loop for the tex include paths? - debug("trying %s%s" % (path, k), 'tex') - try: - os.stat(abs+os.sep+path+k) - except OSError: - continue - found = path+k - node = curdirnode.find_resource(found) - if node: - nodes.append(node) - else: - debug('could not find %s' % path, 'tex') - names.append(path) - - debug("found the following : %s and names %s" % (nodes, names), 'tex') - return (nodes, names) - -g_tex_scanner = tex_scanner() - -g_bibtex_re = re.compile('bibdata', re.M) -def tex_build(task, command='LATEX'): - env = task.env() - - if env['PROMPT_LATEX']: - Runner.set_exec('noredir') - com = '%s %s' % (env[command], env.get_flat(command+'FLAGS')) - else: - com = '%s %s %s' % (env[command], env.get_flat(command+'FLAGS'), '-interaction=batchmode') - - node = task.m_inputs[0] - reldir = node.bld_dir(env) - srcfile = node.srcpath(env) - - lst = [] - for c in Utils.split_path(reldir): - if c: lst.append('..') - sr = os.path.join(*(lst + [srcfile])) - sr2 = os.path.join(*(lst + [node.m_parent.srcpath(env)])) - - aux_node = node.change_ext('.aux') - idx_node = node.change_ext('.idx') - - hash = '' - old_hash = '' - - nm = aux_node.m_name - docuname = nm[ : len(nm) - 4 ] # 4 is the size of ".aux" - - latex_compile_cmd = 'cd %s && TEXINPUTS=%s:$TEXINPUTS %s %s' % (reldir, sr2, com, sr) - warning('first pass on %s' % command) - ret = Runner.exec_command(latex_compile_cmd) - if ret: return ret - - # look in the .aux file if there is a bibfile to process - try: - file = open(aux_node.abspath(env), 'r') - ct = file.read() - file.close() - except (OSError, IOError): - error('erreur bibtex scan') - else: - fo = g_bibtex_re.findall(ct) - - # yes, there is a .aux file to process - if fo: - bibtex_compile_cmd = 'cd %s && BIBINPUTS=%s:$BIBINPUTS %s %s' % (reldir, sr2, env['BIBTEX'], docuname) - - warning('calling bibtex') - ret = Runner.exec_command(bibtex_compile_cmd) - if ret: - error('error when calling bibtex %s' % bibtex_compile_cmd) - return ret - - # look on the filesystem if there is a .idx file to process - try: - idx_path = idx_node.abspath(env) - os.stat(idx_path) - except OSError: - error('erreur file.idx scan') - else: - makeindex_compile_cmd = 'cd %s && %s %s' % (reldir, env['MAKEINDEX'], idx_path) - warning('calling makeindex') - ret = Runner.exec_command(makeindex_compile_cmd) - if ret: - error('error when calling makeindex %s' % makeindex_compile_cmd) - return ret - - i = 0 - while i < 10: - # prevent against infinite loops - one never knows - i += 1 - - # watch the contents of file.aux - old_hash = hash - try: - hash = Params.h_file(aux_node.abspath(env)) - except KeyError: - error('could not read aux.h -> %s' % aux_node.abspath(env)) - pass - - # debug - #print "hash is, ", hash, " ", old_hash - - # stop if file.aux does not change anymore - if hash and hash == old_hash: break - - # run the command - warning('calling %s' % command) - ret = Runner.exec_command(latex_compile_cmd) - if ret: - error('error when calling %s %s' % (command, latex_compile_cmd)) - return ret - - # 0 means no error - return 0 - -latex_vardeps = ['LATEX', 'LATEXFLAGS'] -def latex_build(task): - return tex_build(task, 'LATEX') - -pdflatex_vardeps = ['PDFLATEX', 'PDFLATEXFLAGS'] -def pdflatex_build(task): - return tex_build(task, 'PDFLATEX') - -g_texobjs = ['latex','pdflatex'] -class tex_taskgen(TaskGen.task_gen): - s_default_ext = ['.tex', '.ltx'] - def __init__(self, *k, **kw): - TaskGen.task_gen.__init__(self, *k) - - global g_texobjs - self.m_type = kw['type'] - if not self.m_type in g_texobjs: - fatal('type %s not supported for texobj' % type) - self.outs = '' # example: "ps pdf" - self.prompt = 1 # prompt for incomplete files (else the batchmode is used) - self.deps = '' - def apply(self): - - tree = Params.g_build - outs = self.outs.split() - self.env['PROMPT_LATEX'] = self.prompt - - deps_lst = [] - - if self.deps: - deps = self.to_list(self.deps) - for filename in deps: - n = self.path.find_resource(filename) - if not n in deps_lst: deps_lst.append(n) - - for filename in self.source.split(): - base, ext = os.path.splitext(filename) - if not ext in self.s_default_ext: continue - - node = self.path.find_resource(filename) - if not node: fatal('cannot find %s' % filename) - - if self.m_type == 'latex': - task = self.create_task('latex', self.env) - task.set_inputs(node) - task.set_outputs(node.change_ext('.dvi')) - elif self.m_type == 'pdflatex': - task = self.create_task('pdflatex', self.env) - task.set_inputs(node) - task.set_outputs(node.change_ext('.pdf')) - else: - fatal('no type or invalid type given in tex object (should be latex or pdflatex)') - - task.m_scanner = g_tex_scanner - task.m_env = self.env - task.curdirnode = self.path - - # add the manual dependencies - if deps_lst: - variant = node.variant(self.env) - try: - lst = tree.node_deps[variant][node.id] - for n in deps_lst: - if not n in lst: - lst.append(n) - except KeyError: - tree.node_deps[variant][node.id] = deps_lst - - if self.m_type == 'latex': - if 'ps' in outs: - pstask = self.create_task('dvips', self.env) - pstask.set_inputs(task.m_outputs) - pstask.set_outputs(node.change_ext('.ps')) - if 'pdf' in outs: - pdftask = self.create_task('dvipdf', self.env) - pdftask.set_inputs(task.m_outputs) - pdftask.set_outputs(node.change_ext('.pdf')) - elif self.m_type == 'pdflatex': - if 'ps' in outs: - pstask = self.create_task('pdf2ps', self.env) - pstask.set_inputs(task.m_outputs) - pstask.set_outputs(node.change_ext('.ps')) - -def detect(conf): - v = conf.env - for p in 'tex latex pdflatex bibtex dvips dvipdf ps2pdf makeindex'.split(): - conf.find_program(p, var=p.upper()) - v[p.upper()+'FLAGS'] = '' - v['DVIPSFLAGS'] = '-Ppdf' - -b = Task.simple_task_type -b('tex', '${TEX} ${TEXFLAGS} ${SRC}', color='BLUE', prio=60) -b('bibtex', '${BIBTEX} ${BIBTEXFLAGS} ${SRC}', color='BLUE', prio=60) -b('dvips', '${DVIPS} ${DVIPSFLAGS} ${SRC} -o ${TGT}', color='BLUE', prio=60) -b('dvipdf', '${DVIPDF} ${DVIPDFFLAGS} ${SRC} ${TGT}', color='BLUE', prio=60) -b('pdf2ps', '${PDF2PS} ${PDF2PSFLAGS} ${SRC} ${TGT}', color='BLUE', prio=60) - -a = b('latex', '${TEX} ${TEXFLAGS} ${SRC}', prio=40) -a.m_vars = latex_vardeps -a.run = latex_build - -a = b('pdflatex', '${TEX} ${TEXFLAGS} ${SRC}', prio=40) -a.m_vars = pdflatex_vardeps -a.run = pdflatex_build - - diff -Nru a2jmidid-8~dfsg0/wafadmin/Tools/vala.py a2jmidid-9/wafadmin/Tools/vala.py --- a2jmidid-8~dfsg0/wafadmin/Tools/vala.py 2008-07-21 22:39:47.000000000 +0000 +++ a2jmidid-9/wafadmin/Tools/vala.py 1970-01-01 00:00:00.000000000 +0000 @@ -1,174 +0,0 @@ -#! /usr/bin/env python -# encoding: utf-8 - -#! /usr/bin/env python -# encoding: utf-8 -# Ali Sabil, 2007 - -import os.path, shutil -import Task, Runner, Utils, Params -from TaskGen import extension - -from pproc import Popen, PIPE - -EXT_VALA = ['.vala', '.gs'] - -class valac(Task.Task): - def __init__(self, *args, **kwargs): - Task.Task.__init__(self, *args, **kwargs) - self.prio = 80 - - def get_str(self): - "string to display to the user" - src_str = " ".join([a.m_name for a in self.m_inputs]) - return "%s: %s\n" % (self.__class__.__name__, src_str) - - def run(self): - task = self # TODO cleanup - env = task.env() - inputs = [a.srcpath(env) for a in task.m_inputs] - valac = env['VALAC'] - vala_flags = env.get_flat('VALAFLAGS') - top_src = Params.g_build.m_srcnode.abspath() - top_bld = Params.g_build.m_srcnode.abspath(env) - - if env['VALAC_VERSION'] > (0, 1, 6): - cmd = [valac, '-C', '--quiet', vala_flags] - else: - cmd = [valac, '-C', vala_flags] - - if task.threading: - cmd.append('--thread') - - if task.output_type in ('shlib', 'staticlib'): - cmd.append('--library ' + task.target) - cmd.append('--basedir ' + top_src) - cmd.append('-d ' + top_bld) - #cmd.append('-d %s' % Params.g_build.m_srcnode.abspath(bld.env())) - #cmd.append('-d %s' % Params.g_build.m_bldnode.bldpath(env)) - else: - output_dir = task.m_outputs[0].bld_dir(env) - cmd.append('-d %s' % output_dir) - - for vapi_dir in task.vapi_dirs: - cmd.append('--vapidir=%s' % vapi_dir) - - for package in task.packages: - cmd.append('--pkg %s' % package) - - cmd.append(" ".join(inputs)) - result = Runner.exec_command(" ".join(cmd)) - - if task.output_type in ('shlib', 'staticlib'): - # generate the .deps file - if task.packages: - filename = os.path.join(task.m_outputs[0].bld_dir(env), "%s.deps" % task.target) - deps = open(filename, 'w') - for package in task.packages: - deps.write(package + '\n') - deps.close() - - # handle vala 0.1.6 who doesn't honor --directory for the generated .vapi - try: - src_vapi = os.path.join(top_bld, "..", "%s.vapi" % task.target) - dst_vapi = task.m_outputs[0].bld_dir(env) - shutil.move(src_vapi, dst_vapi) - except IOError: - pass - # handle vala >= 0.1.7 who has a weid definition for --directory - try: - src_vapi = os.path.join(top_bld, "%s.vapi" % task.target) - dst_vapi = task.m_outputs[0].bld_dir(env) - shutil.move(src_vapi, dst_vapi) - except IOError: - pass - - # handle vala >= 0.2.0 who doesn't honor --directory for the generated .gidl - try: - src_gidl = os.path.join(top_bld, "%s.gidl" % task.target) - dst_gidl = task.m_outputs[0].bld_dir(env) - shutil.move(src_gidl, dst_gidl) - except IOError: - pass - return result -Task.g_task_types["valac"] = valac - -def vala_file(self, node): - valatask = getattr(self, "valatask", None) - # there is only one vala task and it compiles all vala files .. :-/ - if not valatask: - valatask = self.create_task('valac') - self.valatask = valatask - valatask.output_type = self.m_type - valatask.packages = [] - valatask.vapi_dirs = [] - valatask.target = self.target - valatask.threading = False - - if hasattr(self, 'packages'): - valatask.packages = Utils.to_list(self.packages) - - if hasattr(self, 'vapi_dirs'): - vapi_dirs = Utils.to_list(self.vapi_dirs) - for vapi_dir in vapi_dirs: - try: - valatask.vapi_dirs.append(self.path.find_dir(vapi_dir).abspath()) - valatask.vapi_dirs.append(self.path.find_dir(vapi_dir).abspath(self.env)) - except AttributeError: - Params.warning("Unable to locate Vala API directory: '%s'" % vapi_dir) - - if hasattr(self, 'threading'): - valatask.threading = self.threading - self.uselib = self.to_list(self.uselib) - if not 'GTHREAD' in self.uselib: - self.uselib.append('GTHREAD') - - env = valatask.env() - - output_nodes = [] - output_nodes.append(node.change_ext('.c')) - output_nodes.append(node.change_ext('.h')) - if self.m_type != 'program': - output_nodes.append(self.path.find_build('%s.vapi' % self.target)) - if env['VALAC_VERSION'] > (0, 1, 7): - output_nodes.append(self.path.find_build('%s.gidl' % self.target)) - if valatask.packages: - output_nodes.append(self.path.find_build('%s.deps' % self.target)) - - valatask.m_inputs.append(node) - valatask.m_outputs.extend(output_nodes) - self.allnodes.append(node.change_ext('.c')) - -def detect(conf): - min_version = (0, 1, 6) - min_version_str = "%d.%d.%d" % min_version - - valac = conf.find_program('valac', var='VALAC') - if not valac: - conf.fatal("valac not found") - return - - if not conf.env["HAVE_GTHREAD"]: - conf.check_pkg('gthread-2.0', destvar='GTHREAD', mandatory=False) - - try: - output = Popen([valac, "--version"], stdout=PIPE).communicate()[0] - version = output.split(' ', 1)[-1].strip().split(".") - version = [int(atom) for atom in version] - valac_version = tuple(version) - except Exception: - valac_version = (0, 0, 0) - - conf.check_message('program version', - 'valac >= ' + min_version_str, - valac_version >= min_version, - "%d.%d.%d" % valac_version) - - if valac_version < min_version: - conf.fatal("valac version too old to be used with this tool") - return - - conf.env['VALAC_VERSION'] = valac_version - conf.env['VALAFLAGS'] = '' - -extension(EXT_VALA)(vala_file) diff -Nru a2jmidid-8~dfsg0/wafadmin/Tools/winres.py a2jmidid-9/wafadmin/Tools/winres.py --- a2jmidid-8~dfsg0/wafadmin/Tools/winres.py 2008-07-21 22:39:47.000000000 +0000 +++ a2jmidid-9/wafadmin/Tools/winres.py 1970-01-01 00:00:00.000000000 +0000 @@ -1,59 +0,0 @@ -#! /usr/bin/env python -# encoding: utf-8 - -#! /usr/bin/env python -# encoding: utf-8 -# Brant Young, 2007 - -"This hook is called when the class cpp/cc task generator encounters a '.rc' file: X{.rc -> [.res|.rc.o]}" - -import os, sys -import TaskGen, Task -from Utils import quote_whitespace -from TaskGen import extension - -EXT_WINRC = ['.rc'] - -winrc_str = '${WINRC} ${_CPPDEFFLAGS} ${_CXXDEFFLAGS} ${_CCDEFFLAGS} ${WINRCFLAGS} ${_CPPINCFLAGS} ${_CXXINCFLAGS} ${_CCINCFLAGS} ${WINRC_TGT_F}${TGT} ${WINRC_SRC_F}${SRC}' - -def rc_file(self, node): - obj_ext = '.rc.o' - if self.env['WINRC_TGT_F'] == '/fo ': obj_ext = '.res' - - rctask = self.create_task('winrc') - rctask.set_inputs(node) - rctask.set_outputs(node.change_ext(obj_ext)) - - # make linker can find compiled resource files - self.compiled_tasks.append(rctask) - -# create our action, for use with rc file -Task.simple_task_type('winrc', winrc_str, color='BLUE', prio=40) - -def detect(conf): - v = conf.env - - cc = os.path.basename(''.join(v['CC']).lower()) - cxx = os.path.basename(''.join(v['CXX']).lower()) - - # find rc.exe - if cc in ['gcc', 'cc', 'g++', 'c++']: - winrc = conf.find_program('windres', var='WINRC') - v['WINRC_TGT_F'] = '-o ' - v['WINRC_SRC_F'] = '-i ' - elif cc == 'cl.exe' or cxx == 'cl.exe': - winrc = conf.find_program('RC', var='WINRC') - v['WINRC_TGT_F'] = '/fo ' - v['WINRC_SRC_F'] = ' ' - else: - return 0 - - if not winrc: - conf.fatal('winrc was not found!!') - else: - v['WINRC'] = quote_whitespace(winrc) - - v['WINRCFLAGS'] = '' - - -extension(EXT_WINRC)(rc_file) diff -Nru a2jmidid-8~dfsg0/wafadmin/UnitTest.py a2jmidid-9/wafadmin/UnitTest.py --- a2jmidid-8~dfsg0/wafadmin/UnitTest.py 2008-07-21 22:39:47.000000000 +0000 +++ a2jmidid-9/wafadmin/UnitTest.py 1970-01-01 00:00:00.000000000 +0000 @@ -1,185 +0,0 @@ -#! /usr/bin/env python -# encoding: utf-8 - -#! /usr/bin/env python -# encoding: utf-8 -# Carlos Rafael Giani, 2006 - -""" -Unit tests run in the shutdown() method, and for c/c++ programs - -One should NOT have to give parameters to programs to execute - -In the shutdown method, add the following code: - - >>> def shutdown(): - ... ut = UnitTest.unit_test() - ... ut.run() - ... ut.print_results() - - -Each object to use as a unit test must be a program and must have X{obj.unit_test=1} -""" -import os, sys -import Params, TaskGen, Utils -import pproc as subprocess - -class unit_test(object): - "Unit test representation" - def __init__(self): - self.returncode_ok = 0 # Unit test returncode considered OK. All returncodes differing from this one - # will cause the unit test to be marked as "FAILED". - - # The following variables are filled with data by run(). - - # print_results() uses these for printing the unit test summary, - # but if there is need for direct access to the results, - # they can be retrieved here, after calling run(). - - self.num_tests_ok = 0 # Number of successful unit tests - self.num_tests_failed = 0 # Number of failed unit tests - self.num_tests_err = 0 # Tests that have not even run - self.total_num_tests = 0 # Total amount of unit tests - self.max_label_length = 0 # Maximum label length (pretty-print the output) - - self.unit_tests = {} # Unit test dictionary. Key: the label (unit test filename relative - # to the build dir), value: unit test filename with absolute path - self.unit_test_results = {} # Dictionary containing the unit test results. - # Key: the label, value: result (true = success false = failure) - self.unit_test_erroneous = {} # Dictionary indicating erroneous unit tests. - # Key: the label, value: true = unit test has an error false = unit test is ok - self.change_to_testfile_dir = False #True if the test file needs to be executed from the same dir - self.want_to_see_test_output = False #True to see the stdout from the testfile (for example check suites) - self.want_to_see_test_error = False #True to see the stderr from the testfile (for example check suites) - self.run_if_waf_does = 'check' #build was the old default - - def run(self): - "Run the unit tests and gather results (note: no output here)" - - self.num_tests_ok = 0 - self.num_tests_failed = 0 - self.num_tests_err = 0 - self.total_num_tests = 0 - self.max_label_length = 0 - - self.unit_tests = {} - self.unit_test_results = {} - self.unit_test_erroneous = {} - - # If waf is not building, don't run anything - if not Params.g_commands[self.run_if_waf_does]: return - - # Gather unit tests to call - for obj in TaskGen.g_allobjs: - if not hasattr(obj,'unit_test'): continue - unit_test = getattr(obj,'unit_test') - if not unit_test: continue - try: - if obj.m_type == 'program': - output = obj.path - filename = os.path.join(output.abspath(obj.env), obj.target) - srcdir = output.abspath() - label = os.path.join(output.bldpath(obj.env), obj.target) - self.max_label_length = max(self.max_label_length, len(label)) - self.unit_tests[label] = (filename, srcdir) - except KeyError: - pass - self.total_num_tests = len(self.unit_tests) - # Now run the unit tests - col1=Params.g_colors['GREEN'] - col2=Params.g_colors['NORMAL'] - Params.pprint('GREEN', 'Running the unit tests') - count = 0 - result = 1 - - curdir = os.getcwd() # store the current dir (only if self.change_to_testfile_dir) - for label, file_and_src in self.unit_tests.iteritems(): - filename = file_and_src[0] - srcdir = file_and_src[1] - count += 1 - line = Utils.progress_line(count, self.total_num_tests, col1, col2) - if Params.g_options.progress_bar and line: - sys.stdout.write(line) - sys.stdout.flush() - try: - if self.change_to_testfile_dir: - os.chdir(srcdir) - - kwargs = dict() - if not self.want_to_see_test_output: - kwargs['stdout'] = subprocess.PIPE # PIPE for ignoring output - if not self.want_to_see_test_error: - kwargs['stderr'] = subprocess.PIPE # PIPE for ignoring output - pp = subprocess.Popen(filename, **kwargs) - pp.wait() - - if self.change_to_testfile_dir: - os.chdir(curdir) - - result = int(pp.returncode == self.returncode_ok) - - if result: - self.num_tests_ok += 1 - else: - self.num_tests_failed += 1 - - self.unit_test_results[label] = result - self.unit_test_erroneous[label] = 0 - except OSError: - self.unit_test_erroneous[label] = 1 - self.num_tests_err += 1 - except KeyboardInterrupt: - pass - if Params.g_options.progress_bar: sys.stdout.write(Params.g_cursor_on) - - def print_results(self): - "Pretty-prints a summary of all unit tests, along with some statistics" - - # If waf is not building, don't output anything - if not Params.g_commands[self.run_if_waf_does]: return - - p = Params.pprint - # Early quit if no tests were performed - if self.total_num_tests == 0: - p('YELLOW', 'No unit tests present') - return - p('GREEN', 'Running unit tests') - print - - for label, filename in self.unit_tests.iteritems(): - err = 0 - result = 0 - - try: err = self.unit_test_erroneous[label] - except KeyError: pass - - try: result = self.unit_test_results[label] - except KeyError: pass - - n = self.max_label_length - len(label) - if err: n += 4 - elif result: n += 7 - else: n += 3 - - line = '%s %s' % (label, '.' * n) - - print line, - if err: p('RED', 'ERROR') - elif result: p('GREEN', 'OK') - else: p('YELLOW', 'FAILED') - - percentage_ok = float(self.num_tests_ok) / float(self.total_num_tests) * 100.0 - percentage_failed = float(self.num_tests_failed) / float(self.total_num_tests) * 100.0 - percentage_erroneous = float(self.num_tests_err) / float(self.total_num_tests) * 100.0 - - print ''' -Successful tests: %i (%.1f%%) -Failed tests: %i (%.1f%%) -Erroneous tests: %i (%.1f%%) - -Total number of tests: %i -''' % (self.num_tests_ok, percentage_ok, self.num_tests_failed, percentage_failed, - self.num_tests_err, percentage_erroneous, self.total_num_tests) - p('GREEN', 'Unit tests finished') - - diff -Nru a2jmidid-8~dfsg0/wafadmin/Utils.py a2jmidid-9/wafadmin/Utils.py --- a2jmidid-8~dfsg0/wafadmin/Utils.py 2008-07-21 22:39:47.000000000 +0000 +++ a2jmidid-9/wafadmin/Utils.py 1970-01-01 00:00:00.000000000 +0000 @@ -1,247 +0,0 @@ -#! /usr/bin/env python -# encoding: utf-8 -import sys -if sys.hexversion < 0x020400f0: from sets import Set as set -#! /usr/bin/env python -# encoding: utf-8 -# Thomas Nagy, 2005 (ita) - -"Utility functions" - -import os, sys, imp, types, string, time, errno -from UserDict import UserDict -import Params -from Constants import * - -try: - from fnv import new as md5 -except ImportError: - try: - import hashlib - md5 = hashlib.md5 - except ImportError: - import md5 - md5 = md5.md5 - -g_trace = 0 -g_debug = 0 -g_error = 0 - -g_ind_idx = 0 -g_ind = ['\\', '|', '/', '-'] -"the rotation thing" - -def test_full(): - try: - f=open('.waf-full','w') - f.write('test') - f.close() - os.unlink('.waf-full') - except IOError, e: - import errno - if e.errno == errno.ENOSPC: - Params.fatal('filesystem full', e.errno) - else: - Params.fatal(str(e), e.errno) - -class ordered_dict(UserDict): - def __init__(self, dict = None): - self.allkeys = [] - UserDict.__init__(self, dict) - - def __delitem__(self, key): - self.allkeys.remove(key) - UserDict.__delitem__(self, key) - - def __setitem__(self, key, item): - if key not in self.allkeys: self.allkeys.append(key) - UserDict.__setitem__(self, key, item) - -listdir = os.listdir -if sys.platform == "win32": - def listdir_win32(s): - if not os.path.isdir(s): - e = OSError() - e.errno = errno.ENOENT - raise e - return os.listdir(s) - listdir = listdir_win32 - -def to_int(s): - lst = s.split(".") - -def waf_version(mini = 0x010000, maxi = 0x100000): - "throws an exception if the waf version is wrong" - ver = HEXVERSION - try: min_val = mini + 0 - except TypeError: min_val = int(mini.replace('.', '0'), 16) - - if min_val > ver: - Params.fatal("waf version should be at least %s (%x found)" % (mini, ver)) - - try: max_val = maxi + 0 - except TypeError: max_val = int(maxi.replace('.', '0'), 16) - - if max_val < ver: - Params.fatal("waf version should be at most %s (%x found)" % (maxi, ver)) - -def python_24_guard(): - if sys.hexversion<0x20400f0: - raise ImportError,"Waf requires Python >= 2.3 but the raw source requires Python 2.4" - -def reset(): - import Params, TaskGen, Environment - Params.g_build = None - TaskGen.g_allobjs = [] - Environment.g_cache_max = {} - -def to_list(sth): - if type(sth) is types.ListType: - return sth - else: - return sth.split() - -g_loaded_modules = {} -"index modules by absolute path" - -g_module=None -"the main module is special" - -def load_module(file_path, name=WSCRIPT_FILE): - "this function requires an absolute path" - try: - return g_loaded_modules[file_path] - except KeyError: - pass - - module = imp.new_module(name) - - try: - file = open(file_path, 'r') - except (IOError, OSError): - Params.fatal('The file %s could not be opened!' % file_path) - - d = module.__dict__ - import Common - d['install_files'] = Common.install_files - d['install_as'] = Common.install_as - d['symlink_as'] = Common.symlink_as - - module_dir = os.path.dirname(file_path) - sys.path.insert(0, module_dir) - exec file in module.__dict__ - sys.path.remove(module_dir) - if file: file.close() - - g_loaded_modules[file_path] = module - - return module - -def set_main_module(file_path): - "Load custom options, if defined" - global g_module - g_module = load_module(file_path, 'wscript_main') - - # remark: to register the module globally, use the following: - # sys.modules['wscript_main'] = g_module - -def to_hashtable(s): - "used for importing env files" - tbl = {} - lst = s.split('\n') - for line in lst: - if not line: continue - mems = line.split('=') - tbl[mems[0]] = mems[1] - return tbl - -def get_term_cols(): - "console width" - return 80 -try: - import struct, fcntl, termios -except ImportError: - pass -else: - if sys.stdout.isatty(): - def myfun(): - dummy_lines, cols = struct.unpack("HHHH", \ - fcntl.ioctl(sys.stdout.fileno(),termios.TIOCGWINSZ , \ - struct.pack("HHHH", 0, 0, 0, 0)))[:2] - return cols - # we actually try the function once to see if it is suitable - try: - myfun() - except IOError: - pass - else: - get_term_cols = myfun - -def progress_line(state, total, col1, col2): - n = len(str(total)) - - global g_ind, g_ind_idx - g_ind_idx += 1 - ind = g_ind[g_ind_idx % 4] - - if hasattr(Params.g_build, 'ini'): - ini = Params.g_build.ini - else: - ini = Params.g_build.ini = time.time() - - pc = (100.*state)/total - eta = time.strftime('%H:%M:%S', time.gmtime(time.time() - ini)) - fs = "[%%%dd/%%%dd][%%s%%2d%%%%%%s][%s][" % (n, n, ind) - left = fs % (state, total, col1, pc, col2) - right = '][%s%s%s]' % (col1, eta, col2) - - cols = get_term_cols() - len(left) - len(right) + 2*len(col1) + 2*len(col2) - if cols < 7: cols = 7 - - ratio = int((cols*state)/total) - 1 - - bar = ('='*ratio+'>').ljust(cols) - msg = Params.g_progress % (left, bar, right) - - return msg - -def split_path(path): - if not path: return [''] - return path.split('/') - -if sys.platform == 'win32': - def split_path(path): - h,t = os.path.splitunc(path) - if not h: return __split_dirs(t) - return [h] + __split_dirs(t)[1:] - - def __split_dirs(path): - h,t = os.path.split(path) - if not h: return [t] - if h == path: return [h.replace('\\', '')] - if not t: return __split_dirs(h) - else: return __split_dirs(h) + [t] - -_quote_define_name_translation = None -"lazily construct a translation table for mapping invalid characters to valid ones" - -def quote_define_name(path): - "Converts a string to a constant name, foo/zbr-xpto.h -> FOO_ZBR_XPTO_H" - global _quote_define_name_translation - if _quote_define_name_translation is None: - invalid_chars = [chr(x) for x in xrange(256)] - for valid in string.digits + string.uppercase: invalid_chars.remove(valid) - _quote_define_name_translation = string.maketrans(''.join(invalid_chars), '_'*len(invalid_chars)) - - return string.translate(string.upper(path), _quote_define_name_translation) - -def quote_whitespace(path): - return (path.strip().find(' ') > 0 and '"%s"' % path or path).replace('""', '"') - -def trimquotes(s): - if not s: return '' - s = s.rstrip() - if s[0] == "'" and s[-1] == "'": return s[1:-1] - return s - - diff -Nru a2jmidid-8~dfsg0/wscript a2jmidid-9/wscript --- a2jmidid-8~dfsg0/wscript 2012-07-05 01:14:23.000000000 +0000 +++ a2jmidid-9/wscript 1970-01-01 00:00:00.000000000 +0000 @@ -1,198 +0,0 @@ -#! /usr/bin/env python -# encoding: utf-8 - -import os -from Configure import g_maxlen -import Params -import time -import Task -import re - -APPNAME='a2jmidid' -VERSION='8' - -# these variables are mandatory ('/' are converted automatically) -srcdir = '.' -blddir = 'build' - -def create_gitversion_gen(bld, header='gitversion.h', define=None): - cmd = '../gitversion_regenerate.sh ${TGT}' - if define: - cmd += " " + define - cls = Task.simple_task_type('gitversion', cmd, color='BLUE') - cls.must_run = lambda self: True - #cls.before = 'cxx' - - def sg(self): - rt = Params.h_file(self.m_outputs[0].abspath(self.env())) - return rt - cls.signature = sg - - #def se(self): - # r = sg(self) - # return (r, r, r, r, r) - #cls.cache_sig = property(sg, None) - cls.cache_sig = None - - tsk = cls('gitversion', bld.env().copy()) - tsk.m_inputs = [] - tsk.m_outputs = [bld.path.find_or_declare(header)] - tsk.prio = 1 # execute this task first - -def display_msg(msg, status = None, color = None): - sr = msg - global g_maxlen - g_maxlen = max(g_maxlen, len(msg)) - if status: - print "%s :" % msg.ljust(g_maxlen), - Params.pprint(color, status) - else: - print "%s" % msg.ljust(g_maxlen) - -def set_options(opt): - opt.tool_options('compiler_cc') - opt.add_option('--enable-pkg-config-dbus-service-dir', action='store_true', default=False, help='force D-Bus service install dir to be one returned by pkg-config') - opt.add_option('--disable-dbus', action='store_true', default=False, help="Don't enable D-Bus support even if required dependencies are present") - opt.add_option('--mandir', type='string', help="Manpage directory [Default: /share/man]") - -def configure(conf): - conf.check_tool('compiler_cc') - - conf.check_pkg('alsa', mandatory=True) - conf.check_pkg('jack', vnum="0.109.0", mandatory=True) - if not Params.g_options.disable_dbus: - conf.check_pkg('dbus-1', mandatory=False, pkgvars=['session_bus_services_dir']) - conf.env['DBUS_ENABLED'] = 'LIB_DBUS-1' in conf.env - else: - conf.env['DBUS_ENABLED'] = False - - conf.env['LIB_DL'] = ['dl'] - - #conf.check_header('expat.h', mandatory=True) - #conf.env['LIB_EXPAT'] = ['expat'] - conf.check_header('getopt.h', mandatory=True) - - if conf.env['DBUS_ENABLED']: - if Params.g_options.enable_pkg_config_dbus_service_dir: - conf.env['DBUS_SERVICES_DIR'] = conf.env['DBUS-1_SESSION_BUS_SERVICES_DIR'][0] - else: - conf.env['DBUS_SERVICES_DIR'] = os.path.normpath(conf.env['PREFIX'] + '/share/dbus-1/services') - - conf.check_tool('misc') # dbus service file subst tool - - if Params.g_options.mandir: - conf.env['MANDIR'] = Params.g_options.mandir - else: - conf.env['MANDIR'] = conf.env['PREFIX'] + '/share/man' - - conf.define('A2J_VERSION', VERSION) - conf.write_config_header('config.h') - - gitrev = None - if os.access('gitversion.h', os.R_OK): - data = file('gitversion.h').read() - m = re.match(r'^#define GIT_VERSION "([^"]*)"$', data) - if m != None: - gitrev = m.group(1) - - print - display_msg("==================") - version_msg = "a2jmidid-" + VERSION - if gitrev: - version_msg += " exported from " + gitrev - else: - version_msg += " git revision will checked and eventually updated during build" - print version_msg - print - - display_msg("Install prefix", conf.env['PREFIX'], 'CYAN') - if conf.env['DBUS_ENABLED']: - have_dbus_status = "yes" - else: - have_dbus_status = "no" - display_msg("D-Bus support", have_dbus_status) - if conf.env['DBUS_ENABLED']: - display_msg('D-Bus service install directory', conf.env['DBUS_SERVICES_DIR'], 'CYAN') - if conf.env['DBUS_SERVICES_DIR'] != conf.env['DBUS-1_SESSION_BUS_SERVICES_DIR'][0]: - print - print Params.g_colors['RED'] + "WARNING: D-Bus session services directory as reported by pkg-config is" - print Params.g_colors['RED'] + "WARNING:", - print Params.g_colors['CYAN'] + conf.env['DBUS-1_SESSION_BUS_SERVICES_DIR'][0] - print Params.g_colors['RED'] + 'WARNING: but service file will be installed in' - print Params.g_colors['RED'] + "WARNING:", - print Params.g_colors['CYAN'] + conf.env['DBUS_SERVICES_DIR'] - print Params.g_colors['RED'] + 'WARNING: You may need to adjust your D-Bus configuration after installing' - print 'WARNING: You can override dbus service install directory' - print 'WARNING: with --enable-pkg-config-dbus-service-dir option to this script' - print Params.g_colors['NORMAL'], - print - -def build(bld): - if not os.access('gitversion.h', os.R_OK): - create_gitversion_gen(bld) - - prog = bld.create_obj('cc', 'program') - prog.source = [ - 'a2jmidid.c', - 'log.c', - 'port.c', - 'port_thread.c', - 'port_hash.c', - 'paths.c', - #'conf.c', - 'jack.c', - 'list.c', - ] - - if bld.env()['DBUS_ENABLED']: - prog.source.append('dbus.c') - prog.source.append('dbus_iface_introspectable.c') - prog.source.append('dbus_iface_control.c') - prog.source.append('sigsegv.c') - - prog.includes = '.' # make waf dependency tracking work - prog.target = 'a2jmidid' - prog.uselib = 'ALSA JACK DL' - if bld.env()['DBUS_ENABLED']: - prog.uselib += " DBUS-1" - prog = bld.create_obj('cc', 'program') - prog.source = 'a2jmidi_bridge.c' - prog.target = 'a2jmidi_bridge' - prog.uselib = 'ALSA JACK' - - prog = bld.create_obj('cc', 'program') - prog.source = 'j2amidi_bridge.c' - prog.target = 'j2amidi_bridge' - prog.uselib = 'ALSA JACK' - - if bld.env()['DBUS_ENABLED']: - # process org.gna.home.a2jmidid.service.in -> org.gna.home.a2jmidid.service - obj = bld.create_obj('subst') - obj.source = 'org.gna.home.a2jmidid.service.in' - obj.target = 'org.gna.home.a2jmidid.service' - obj.dict = {'BINDIR': bld.env()['PREFIX'] + '/bin'} - obj.inst_var = bld.env()['DBUS_SERVICES_DIR'] - obj.inst_dir = '/' - - install_files('PREFIX', 'bin', 'a2j_control', chmod=0755) - install_files('PREFIX', 'bin', 'a2j', chmod=0755) - - # install man pages - man_pages = [ - "a2jmidi_bridge.1", - "a2jmidid.1", - "j2amidi_bridge.1", - ] - - if bld.env()['DBUS_ENABLED']: - man_pages.append("a2j.1") - man_pages.append("a2j_control.1") - - for i in range(len(man_pages)): - man_pages[i] = "man/" + man_pages[i] - - install_files('MANDIR', 'man1', man_pages) - -def dist_hook(): - os.remove('gitversion_regenerate.sh') - os.system('../gitversion_regenerate.sh gitversion.h')