summaryrefslogtreecommitdiff
path: root/build
diff options
context:
space:
mode:
authorLorry <lorry@roadtrain.codethink.co.uk>2012-08-22 14:29:52 +0100
committerLorry <lorry@roadtrain.codethink.co.uk>2012-08-22 14:29:52 +0100
commitf1bdf13786f0752c0846cf36f0d91e4fc6747929 (patch)
tree4223b2035bf2240d681a53822808b3c7f687b905 /build
downloadsubversion-tarball-f1bdf13786f0752c0846cf36f0d91e4fc6747929.tar.gz
Tarball conversion
Diffstat (limited to 'build')
-rwxr-xr-xbuild/PrintPath136
-rw-r--r--build/ac-macros/apache.m4178
-rw-r--r--build/ac-macros/apr.m4151
-rw-r--r--build/ac-macros/apr_memcache.m498
-rw-r--r--build/ac-macros/aprutil.m4146
-rw-r--r--build/ac-macros/berkeley-db.m4246
-rw-r--r--build/ac-macros/ctypesgen.m478
-rw-r--r--build/ac-macros/find_apr.m4168
-rw-r--r--build/ac-macros/find_apu.m4178
-rw-r--r--build/ac-macros/gssapi.m482
-rw-r--r--build/ac-macros/java.m4215
-rw-r--r--build/ac-macros/kwallet.m4117
-rw-r--r--build/ac-macros/neon.m4168
-rw-r--r--build/ac-macros/sasl.m4102
-rw-r--r--build/ac-macros/serf.m499
-rw-r--r--build/ac-macros/sqlite.m4249
-rw-r--r--build/ac-macros/svn-macros.m4204
-rw-r--r--build/ac-macros/swig.m4297
-rw-r--r--build/ac-macros/zlib.m474
-rwxr-xr-xbuild/buildcheck.sh161
-rwxr-xr-xbuild/config.guess1505
-rwxr-xr-xbuild/config.sub1739
-rwxr-xr-xbuild/find_python.sh37
-rw-r--r--build/generator/__init__.py0
-rwxr-xr-xbuild/generator/extractor.py70
-rwxr-xr-xbuild/generator/ezt.py875
-rw-r--r--build/generator/gen_base.py1195
-rw-r--r--build/generator/gen_make.py585
-rw-r--r--build/generator/gen_msvc_dsp.py173
-rw-r--r--build/generator/gen_vcnet_vcproj.py280
-rw-r--r--build/generator/gen_win.py1684
-rw-r--r--build/generator/swig/__init__.py77
-rwxr-xr-xbuild/generator/swig/checkout_swig_header.py85
-rwxr-xr-xbuild/generator/swig/external_runtime.py113
-rwxr-xr-xbuild/generator/swig/header_wrappers.py354
-rw-r--r--build/generator/templates/build_locale.ezt36
-rw-r--r--build/generator/templates/build_zlib.ezt134
-rw-r--r--build/generator/templates/makefile.ezt158
-rw-r--r--build/generator/templates/msvc_dsp.ezt95
-rw-r--r--build/generator/templates/msvc_dsw.ezt32
-rw-r--r--build/generator/templates/neon.dsp.ezt96
-rw-r--r--build/generator/templates/neon.vcproj.ezt85
-rw-r--r--build/generator/templates/neon.vcxproj.ezt63
-rw-r--r--build/generator/templates/serf.dsp.ezt96
-rw-r--r--build/generator/templates/serf.vcproj.ezt85
-rw-r--r--build/generator/templates/serf.vcxproj.ezt64
-rw-r--r--build/generator/templates/svn_config.dsp.ezt97
-rw-r--r--build/generator/templates/svn_config.vcproj.ezt94
-rw-r--r--build/generator/templates/svn_config.vcxproj.ezt72
-rw-r--r--build/generator/templates/svn_locale.dsp.ezt85
-rw-r--r--build/generator/templates/svn_locale.vcproj.ezt52
-rw-r--r--build/generator/templates/svn_locale.vcxproj.ezt57
-rw-r--r--build/generator/templates/vcnet_sln.ezt64
-rw-r--r--build/generator/templates/vcnet_vc7_sln.ezt40
-rw-r--r--build/generator/templates/vcnet_vcproj.ezt162
-rw-r--r--build/generator/templates/vcnet_vcxproj.ezt118
-rw-r--r--build/generator/templates/vcnet_vcxproj_filters.ezt47
-rw-r--r--build/generator/templates/zlib.dsp.ezt99
-rw-r--r--build/generator/templates/zlib.vcproj.ezt85
-rw-r--r--build/generator/templates/zlib.vcxproj.ezt63
-rw-r--r--build/generator/util/__init__.py0
-rw-r--r--build/generator/util/executable.py67
-rw-r--r--build/get-py-info.py158
-rwxr-xr-xbuild/getversion.py109
-rw-r--r--build/hudson/README20
-rw-r--r--build/hudson/jobs/subversion-1.6.x-solaris/config.xml114
-rw-r--r--build/hudson/jobs/subversion-1.6.x-ubuntu/config.xml112
-rw-r--r--build/hudson/jobs/subversion-doxygen/config.xml102
-rw-r--r--build/hudson/jobs/subversion-javadoc/config.xml102
-rw-r--r--build/hudson/jobs/subversion-trunk-solaris/config.xml110
-rw-r--r--build/hudson/jobs/subversion-trunk-ubuntu/config.xml106
-rwxr-xr-xbuild/install-sh520
-rw-r--r--build/libtool.m47831
-rwxr-xr-xbuild/ltmain.sh9636
-rw-r--r--build/ltoptions.m4369
-rw-r--r--build/ltsugar.m4123
-rw-r--r--build/ltversion.m423
-rw-r--r--build/lt~obsolete.m498
-rwxr-xr-xbuild/run_ctypesgen.sh89
-rwxr-xr-xbuild/run_tests.py626
-rwxr-xr-xbuild/strip-po-charset.py43
-rwxr-xr-xbuild/transform_libtool_scripts.sh111
-rwxr-xr-xbuild/transform_sql.py181
-rw-r--r--build/win32/empty.c2
-rw-r--r--build/win32/make_dist.conf.template29
-rw-r--r--build/win32/make_dist.py513
-rw-r--r--build/win32/make_gem.rb96
-rw-r--r--build/win32/svn.icobin0 -> 10134 bytes
-rw-r--r--build/win32/svn.rc82
-rw-r--r--build/win32/vc6-build.bat.in179
90 files changed, 35549 insertions, 0 deletions
diff --git a/build/PrintPath b/build/PrintPath
new file mode 100755
index 0000000..9a46b34
--- /dev/null
+++ b/build/PrintPath
@@ -0,0 +1,136 @@
+#!/bin/sh
+#
+#
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements. See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership. The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied. See the License for the
+# specific language governing permissions and limitations
+# under the License.
+#
+#
+# Look for program[s] somewhere in $PATH.
+#
+# Options:
+# -s
+# Do not print out full pathname. (silent)
+# -pPATHNAME
+# Look in PATHNAME instead of $PATH
+#
+# Usage:
+# PrintPath [-s] [-pPATHNAME] program [program ...]
+#
+# Initially written by Jim Jagielski for the Apache configuration mechanism
+# (with kudos to Kernighan/Pike)
+#
+# This script falls under the Apache License.
+# See http://www.apache.org/docs/LICENSE
+
+##
+# Some "constants"
+##
+pathname=$PATH
+echo="yes"
+
+##
+# Find out what OS we are running for later on
+##
+os=`(uname) 2>/dev/null`
+
+##
+# Parse command line
+##
+for args in $*
+do
+ case $args in
+ -s ) echo="no" ;;
+ -p* ) pathname="`echo $args | sed 's/^..//'`" ;;
+ * ) programs="$programs $args" ;;
+ esac
+done
+
+##
+# Now we make the adjustments required for OS/2 and everyone
+# else :)
+#
+# First of all, all OS/2 programs have the '.exe' extension.
+# Next, we adjust PATH (or what was given to us as PATH) to
+# be whitespace separated directories.
+# Finally, we try to determine the best flag to use for
+# test/[] to look for an executable file. OS/2 just has '-r'
+# but with other OSs, we do some funny stuff to check to see
+# if test/[] knows about -x, which is the preferred flag.
+##
+
+if [ "x$os" = "xOS/2" ]
+then
+ ext=".exe"
+ pathname=`echo -E $pathname |
+ sed 's/^;/.;/
+ s/;;/;.;/g
+ s/;$/;./
+ s/;/ /g
+ s/\\\\/\\//g' `
+ test_exec_flag="-r"
+else
+ ext="" # No default extensions
+ pathname=`echo $pathname |
+ sed 's/^:/.:/
+ s/::/:.:/g
+ s/:$/:./
+ s/:/ /g' `
+ # Here is how we test to see if test/[] can handle -x
+ testfile="pp.t.$$"
+
+ cat > $testfile <<ENDTEST
+#!/bin/sh
+if [ -x / ] || [ -x /bin ] || [ -x /bin/ls ]; then
+ exit 0
+fi
+exit 1
+ENDTEST
+
+ if `/bin/sh $testfile 2>/dev/null`; then
+ test_exec_flag="-x"
+ else
+ test_exec_flag="-r"
+ fi
+ rm -f $testfile
+fi
+
+for program in $programs
+do
+ for path in $pathname
+ do
+ if [ $test_exec_flag $path/${program}${ext} ] && \
+ [ ! -d $path/${program}${ext} ]; then
+ if [ "x$echo" = "xyes" ]; then
+ echo $path/${program}${ext}
+ fi
+ exit 0
+ fi
+
+# Next try without extension (if one was used above)
+ if [ "x$ext" != "x" ]; then
+ if [ $test_exec_flag $path/${program} ] && \
+ [ ! -d $path/${program} ]; then
+ if [ "x$echo" = "xyes" ]; then
+ echo $path/${program}
+ fi
+ exit 0
+ fi
+ fi
+ done
+done
+exit 1
+
diff --git a/build/ac-macros/apache.m4 b/build/ac-macros/apache.m4
new file mode 100644
index 0000000..196ad79
--- /dev/null
+++ b/build/ac-macros/apache.m4
@@ -0,0 +1,178 @@
+dnl ===================================================================
+dnl Licensed to the Apache Software Foundation (ASF) under one
+dnl or more contributor license agreements. See the NOTICE file
+dnl distributed with this work for additional information
+dnl regarding copyright ownership. The ASF licenses this file
+dnl to you under the Apache License, Version 2.0 (the
+dnl "License"); you may not use this file except in compliance
+dnl with the License. You may obtain a copy of the License at
+dnl
+dnl http://www.apache.org/licenses/LICENSE-2.0
+dnl
+dnl Unless required by applicable law or agreed to in writing,
+dnl software distributed under the License is distributed on an
+dnl "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+dnl KIND, either express or implied. See the License for the
+dnl specific language governing permissions and limitations
+dnl under the License.
+dnl ===================================================================
+dnl
+dnl Macros to find an Apache installation
+dnl
+dnl This will find an installed Apache.
+dnl
+dnl Note: If we don't have an installed Apache, then we can't install the
+dnl (dynamic) mod_dav_svn.so module.
+dnl
+
+AC_DEFUN(SVN_FIND_APACHE,[
+AC_REQUIRE([AC_CANONICAL_HOST])
+
+HTTPD_WANTED_MMN="$1"
+
+AC_MSG_CHECKING(for Apache module support via DSO through APXS)
+AC_ARG_WITH(apxs,
+ [AS_HELP_STRING([[--with-apxs[=FILE]]],
+ [Build shared Apache modules. FILE is the optional
+ pathname to the Apache apxs tool; defaults to
+ "apxs".])],
+[
+ if test "$withval" = "yes"; then
+ APXS=apxs
+ else
+ APXS="$withval"
+ fi
+ APXS_EXPLICIT=1
+])
+
+if test -z "$APXS"; then
+ for i in /usr/sbin /usr/local/apache/bin /usr/local/apache2/bin /usr/bin ; do
+ if test -f "$i/apxs2"; then
+ APXS="$i/apxs2"
+ break
+ fi
+ if test -f "$i/apxs"; then
+ APXS="$i/apxs"
+ break
+ fi
+ done
+fi
+
+if test -n "$APXS" && test "$APXS" != "no"; then
+ APXS_INCLUDE="`$APXS -q INCLUDEDIR`"
+ if test -r $APXS_INCLUDE/mod_dav.h; then
+ AC_MSG_RESULT(found at $APXS)
+
+ AC_MSG_CHECKING([httpd version])
+ AC_EGREP_CPP(VERSION_OKAY,
+ [
+#include "$APXS_INCLUDE/ap_mmn.h"
+#if AP_MODULE_MAGIC_AT_LEAST($HTTPD_WANTED_MMN,0)
+VERSION_OKAY
+#endif],
+ [AC_MSG_RESULT([recent enough])],
+ [AC_MSG_RESULT([apache too old: mmn must be at least $HTTPD_WANTED_MMN])
+ if test "$APXS_EXPLICIT" != ""; then
+ AC_MSG_ERROR([Apache APXS build explicitly requested, but apache version is too old])
+ fi
+ APXS=""
+ ])
+
+ elif test "$APXS_EXPLICIT" != ""; then
+ AC_MSG_ERROR([no - APXS refers to an old version of Apache
+ Unable to locate $APXS_INCLUDE/mod_dav.h])
+ else
+ AC_MSG_RESULT(no - Unable to locate $APXS_INCLUDE/mod_dav.h)
+ APXS=""
+ fi
+else
+ AC_MSG_RESULT(no)
+fi
+
+if test -n "$APXS" && test "$APXS" != "no"; then
+ AC_MSG_CHECKING([whether Apache version is compatible with APR version])
+ apr_major_version="${apr_version%%.*}"
+ case "$apr_major_version" in
+ 0)
+ apache_minor_version_wanted_regex="0"
+ ;;
+ 1)
+ apache_minor_version_wanted_regex=["[1-4]"]
+ ;;
+ 2)
+ apache_minor_version_wanted_regex=["[3-4]"]
+ ;;
+ *)
+ AC_MSG_ERROR([unknown APR version])
+ ;;
+ esac
+ old_CPPFLAGS="$CPPFLAGS"
+ CPPFLAGS="$CPPFLAGS $SVN_APR_INCLUDES"
+ AC_EGREP_CPP([apache_minor_version= *\"$apache_minor_version_wanted_regex\"],
+ [
+#include "$APXS_INCLUDE/ap_release.h"
+apache_minor_version=AP_SERVER_MINORVERSION],
+ [AC_MSG_RESULT([yes])],
+ [AC_MSG_RESULT([no])
+ AC_MSG_ERROR([Apache version incompatible with APR version])])
+ CPPFLAGS="$old_CPPFLAGS"
+fi
+
+AC_ARG_WITH(apache-libexecdir,
+ [AS_HELP_STRING([[--with-apache-libexecdir[=PATH]]],
+ [Install Apache modules to PATH instead of Apache's
+ configured modules directory; PATH "no"
+ or --without-apache-libexecdir means install
+ to LIBEXECDIR.])],
+[
+ APACHE_LIBEXECDIR="$withval"
+])
+
+if test -n "$APXS" && test "$APXS" != "no"; then
+ APXS_CC="`$APXS -q CC`"
+ APACHE_INCLUDES="$APACHE_INCLUDES -I$APXS_INCLUDE"
+
+ if test -z "$APACHE_LIBEXECDIR"; then
+ APACHE_LIBEXECDIR="`$APXS -q libexecdir`"
+ elif test "$APACHE_LIBEXECDIR" = 'no'; then
+ APACHE_LIBEXECDIR="$libexecdir"
+ fi
+
+ BUILD_APACHE_RULE=apache-mod
+ INSTALL_APACHE_RULE=install-mods-shared
+
+ case $host in
+ *-*-cygwin*)
+ APACHE_LDFLAGS="-shrext .so"
+ ;;
+ esac
+else
+ echo "=================================================================="
+ echo "WARNING: skipping the build of mod_dav_svn"
+ echo " try using --with-apxs"
+ echo "=================================================================="
+fi
+
+AC_SUBST(APXS)
+AC_SUBST(APACHE_LDFLAGS)
+AC_SUBST(APACHE_INCLUDES)
+AC_SUBST(APACHE_LIBEXECDIR)
+
+# there aren't any flags that interest us ...
+#if test -n "$APXS" && test "$APXS" != "no"; then
+# CFLAGS="$CFLAGS `$APXS -q CFLAGS CFLAGS_SHLIB`"
+#fi
+
+if test -n "$APXS_CC" && test "$APXS_CC" != "$CC" ; then
+ echo "=================================================================="
+ echo "WARNING: You have chosen to compile Subversion with a different"
+ echo " compiler than the one used to compile Apache."
+ echo ""
+ echo " Current compiler: $CC"
+ echo " Apache's compiler: $APXS_CC"
+ echo ""
+ echo "This could cause some problems."
+ echo "=================================================================="
+fi
+
+])
diff --git a/build/ac-macros/apr.m4 b/build/ac-macros/apr.m4
new file mode 100644
index 0000000..6085717
--- /dev/null
+++ b/build/ac-macros/apr.m4
@@ -0,0 +1,151 @@
+dnl ===================================================================
+dnl Licensed to the Apache Software Foundation (ASF) under one
+dnl or more contributor license agreements. See the NOTICE file
+dnl distributed with this work for additional information
+dnl regarding copyright ownership. The ASF licenses this file
+dnl to you under the Apache License, Version 2.0 (the
+dnl "License"); you may not use this file except in compliance
+dnl with the License. You may obtain a copy of the License at
+dnl
+dnl http://www.apache.org/licenses/LICENSE-2.0
+dnl
+dnl Unless required by applicable law or agreed to in writing,
+dnl software distributed under the License is distributed on an
+dnl "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+dnl KIND, either express or implied. See the License for the
+dnl specific language governing permissions and limitations
+dnl under the License.
+dnl ===================================================================
+dnl
+dnl SVN_LIB_APR(wanted_regex, alt_wanted_regex)
+dnl
+dnl 'wanted_regex' and 'alt_wanted_regex are regular expressions
+dnl that the apr version string must match.
+dnl
+dnl Check configure options and assign variables related to
+dnl the Apache Portable Runtime (APR) library.
+dnl
+
+AC_DEFUN(SVN_LIB_APR,
+[
+ APR_WANTED_REGEXES="$1"
+
+ AC_MSG_NOTICE([Apache Portable Runtime (APR) library configuration])
+
+ APR_FIND_APR("$abs_srcdir/apr", "$abs_builddir/apr", 1, [1 0])
+
+ if test $apr_found = "no"; then
+ AC_MSG_WARN([APR not found])
+ SVN_DOWNLOAD_APR
+ fi
+
+ if test $apr_found = "reconfig"; then
+ SVN_EXTERNAL_PROJECT([apr])
+ fi
+
+ dnl check APR version number against regex
+
+ AC_MSG_CHECKING([APR version])
+ apr_version="`$apr_config --version`"
+ if test $? -ne 0; then
+ AC_MSG_ERROR([apr-config --version failed])
+ fi
+ AC_MSG_RESULT([$apr_version])
+
+ APR_WANTED_REGEX_MATCH=0
+ for apr_wanted_regex in $APR_WANTED_REGEXES; do
+ if test `expr $apr_version : $apr_wanted_regex` -ne 0; then
+ APR_WANTED_REGEX_MATCH=1
+ break
+ fi
+ done
+
+ if test $APR_WANTED_REGEX_MATCH -eq 0; then
+ echo "wanted regexes are $APR_WANTED_REGEXES"
+ AC_MSG_ERROR([invalid apr version found])
+ fi
+
+ dnl Get build information from APR
+
+ CPPFLAGS="$CPPFLAGS `$apr_config --cppflags`"
+ if test $? -ne 0; then
+ AC_MSG_ERROR([apr-config --cppflags failed])
+ fi
+
+ CFLAGS="$CFLAGS `$apr_config --cflags`"
+ if test $? -ne 0; then
+ AC_MSG_ERROR([apr-config --cflags failed])
+ fi
+
+ apr_ldflags="`$apr_config --ldflags`"
+ if test $? -ne 0; then
+ AC_MSG_ERROR([apr-config --ldflags failed])
+ fi
+ LDFLAGS="$LDFLAGS `SVN_REMOVE_STANDARD_LIB_DIRS($apr_ldflags)`"
+
+ SVN_APR_INCLUDES="`$apr_config --includes`"
+ if test $? -ne 0; then
+ AC_MSG_ERROR([apr-config --includes failed])
+ fi
+
+ SVN_APR_PREFIX="`$apr_config --prefix`"
+ if test $? -ne 0; then
+ AC_MSG_ERROR([apr-config --prefix failed])
+ fi
+
+ if test "$enable_all_static" = "yes"; then
+ SVN_APR_LIBS="`$apr_config --link-ld --libs`"
+ if test $? -ne 0; then
+ AC_MSG_ERROR([apr-config --link-ld --libs failed])
+ fi
+ else
+ SVN_APR_LIBS="`$apr_config --link-ld`"
+ if test $? -ne 0; then
+ AC_MSG_ERROR([apr-config --link-ld failed])
+ fi
+ fi
+ SVN_APR_LIBS="`SVN_REMOVE_STANDARD_LIB_DIRS($SVN_APR_LIBS)`"
+
+ SVN_APR_SHLIB_PATH_VAR="`$apr_config --shlib-path-var`"
+ if test $? -ne 0; then
+ AC_MSG_ERROR([apr-config --shlib-path-var failed])
+ fi
+
+ AC_SUBST(SVN_APR_PREFIX)
+ AC_SUBST(SVN_APR_CONFIG, ["$apr_config"])
+ AC_SUBST(SVN_APR_INCLUDES)
+ AC_SUBST(SVN_APR_LIBS)
+ AC_SUBST(SVN_APR_SHLIB_PATH_VAR)
+])
+
+dnl SVN_DOWNLOAD_APR()
+dnl no apr found, print out a message telling the user what to do
+AC_DEFUN(SVN_DOWNLOAD_APR,
+[
+ echo "The Apache Portable Runtime (APR) library cannot be found."
+ echo "Please install APR on this system and supply the appropriate"
+ echo "--with-apr option to 'configure'"
+ echo ""
+ echo "or"
+ echo ""
+ echo "get it with SVN and put it in a subdirectory of this source:"
+ echo ""
+ echo " svn co \\"
+ echo " http://svn.apache.org/repos/asf/apr/apr/branches/1.3.x \\"
+ echo " apr"
+ echo ""
+ echo "Run that right here in the top level of the Subversion tree."
+ echo "Afterwards, run apr/buildconf in that subdirectory and"
+ echo "then run configure again here."
+ echo ""
+ echo "Whichever of the above you do, you probably need to do"
+ echo "something similar for apr-util, either providing both"
+ echo "--with-apr and --with-apr-util to 'configure', or"
+ echo "getting both from SVN with:"
+ echo ""
+ echo " svn co \\"
+ echo " http://svn.apache.org/repos/asf/apr/apr-util/branches/1.3.x \\"
+ echo " apr-util"
+ echo ""
+ AC_MSG_ERROR([no suitable apr found])
+])
diff --git a/build/ac-macros/apr_memcache.m4 b/build/ac-macros/apr_memcache.m4
new file mode 100644
index 0000000..384a0e0
--- /dev/null
+++ b/build/ac-macros/apr_memcache.m4
@@ -0,0 +1,98 @@
+dnl ===================================================================
+dnl Licensed to the Apache Software Foundation (ASF) under one
+dnl or more contributor license agreements. See the NOTICE file
+dnl distributed with this work for additional information
+dnl regarding copyright ownership. The ASF licenses this file
+dnl to you under the Apache License, Version 2.0 (the
+dnl "License"); you may not use this file except in compliance
+dnl with the License. You may obtain a copy of the License at
+dnl
+dnl http://www.apache.org/licenses/LICENSE-2.0
+dnl
+dnl Unless required by applicable law or agreed to in writing,
+dnl software distributed under the License is distributed on an
+dnl "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+dnl KIND, either express or implied. See the License for the
+dnl specific language governing permissions and limitations
+dnl under the License.
+dnl ===================================================================
+dnl
+dnl SVN_LIB_APR_MEMCACHE
+dnl
+dnl Check configure options and assign variables related to
+dnl the apr_memcache client library.
+dnl Sets svn_lib_apr_memcache to "yes" if memcache code is accessible
+dnl either from the standalone apr_memcache library or from apr-util.
+dnl
+
+AC_DEFUN(SVN_LIB_APR_MEMCACHE,
+[
+ apr_memcache_found=no
+
+ AC_ARG_WITH(apr_memcache,AC_HELP_STRING([--with-apr_memcache=PREFIX],
+ [Standalone apr_memcache client library]),
+ [
+ if test "$withval" = "yes" ; then
+ AC_MSG_ERROR([--with-apr_memcache requires an argument.])
+ else
+ AC_MSG_NOTICE([looking for separate apr_memcache package])
+ apr_memcache_prefix=$withval
+ save_cppflags="$CPPFLAGS"
+ CPPFLAGS="$CPPFLAGS $SVN_APR_INCLUDES $SVN_APRUTIL_INCLUDES -I$apr_memcache_prefix/include/apr_memcache-0"
+ AC_CHECK_HEADER(apr_memcache.h,[
+ save_ldflags="$LDFLAGS"
+ LDFLAGS="$LDFLAGS -L$apr_memcache_prefix/lib"
+ AC_CHECK_LIB(apr_memcache, apr_memcache_create,
+ [apr_memcache_found="standalone"])
+ LDFLAGS="$save_ldflags"])
+ CPPFLAGS="$save_cppflags"
+ fi
+ ], [
+ if test -d "$srcdir/apr_memcache"; then
+ apr_memcache_found=reconfig
+ else
+dnl Try just looking in apr-util (>= 1.3 has it already).
+ AC_MSG_NOTICE([looking for apr_memcache as part of apr-util])
+ save_cppflags="$CPPFLAGS"
+ CPPFLAGS="$CPPFLAGS $SVN_APR_INCLUDES $SVN_APRUTIL_INCLUDES"
+ AC_CHECK_HEADER(apr_memcache.h,[
+ save_ldflags="$LDFLAGS"
+ LDFLAGS="$LDFLAGS $SVN_APRUTIL_LIBS"
+ AC_CHECK_LIB(aprutil-1, apr_memcache_create,
+ [apr_memcache_found="aprutil"])
+ LDFLAGS="$save_ldflags"])
+ CPPFLAGS="$save_cppflags"
+
+ fi
+ ])
+
+
+ if test $apr_memcache_found = "reconfig"; then
+ SVN_EXTERNAL_PROJECT([apr_memcache], [--with-apr=$apr_config --with-apr-util=$apu_config])
+ apr_memcache_prefix=$prefix
+ SVN_APR_MEMCACHE_PREFIX="$apr_memcache_prefix"
+ SVN_APR_MEMCACHE_INCLUDES="-I$srcdir/memcache"
+ SVN_APR_MEMCACHE_LIBS="$abs_builddir/memcache/libapr_memcache.la"
+ fi
+
+ if test $apr_memcache_found = "standalone"; then
+ SVN_APR_MEMCACHE_PREFIX="$apr_memcache_prefix"
+ SVN_APR_MEMCACHE_INCLUDES="-I$apr_memcache_prefix/include/apr_memcache-0"
+ SVN_APR_MEMCACHE_LIBS="$apr_memcache_prefix/lib/libapr_memcache.la"
+ svn_lib_apr_memcache=yes
+ elif test $apr_memcache_found = "aprutil"; then
+dnl We are already linking apr-util everywhere, so no special treatement needed.
+ SVN_APR_MEMCACHE_PREFIX=""
+ SVN_APR_MEMCACHE_INCLUDES=""
+ SVN_APR_MEMCACHE_LIBS=""
+ svn_lib_apr_memcache=yes
+ elif test $apr_memcache_found = "reconfig"; then
+ svn_lib_apr_memcache=yes
+ else
+ svn_lib_apr_memcache=no
+ fi
+
+ AC_SUBST(SVN_APR_MEMCACHE_PREFIX)
+ AC_SUBST(SVN_APR_MEMCACHE_INCLUDES)
+ AC_SUBST(SVN_APR_MEMCACHE_LIBS)
+])
diff --git a/build/ac-macros/aprutil.m4 b/build/ac-macros/aprutil.m4
new file mode 100644
index 0000000..54f5e32
--- /dev/null
+++ b/build/ac-macros/aprutil.m4
@@ -0,0 +1,146 @@
+dnl ===================================================================
+dnl Licensed to the Apache Software Foundation (ASF) under one
+dnl or more contributor license agreements. See the NOTICE file
+dnl distributed with this work for additional information
+dnl regarding copyright ownership. The ASF licenses this file
+dnl to you under the Apache License, Version 2.0 (the
+dnl "License"); you may not use this file except in compliance
+dnl with the License. You may obtain a copy of the License at
+dnl
+dnl http://www.apache.org/licenses/LICENSE-2.0
+dnl
+dnl Unless required by applicable law or agreed to in writing,
+dnl software distributed under the License is distributed on an
+dnl "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+dnl KIND, either express or implied. See the License for the
+dnl specific language governing permissions and limitations
+dnl under the License.
+dnl ===================================================================
+dnl
+dnl SVN_LIB_APRUTIL(wanted_regex, alt_wanted_regex)
+dnl
+dnl 'wanted_regex' and 'alt_wanted_regex are regular expressions
+dnl that the aprutil version string must match.
+dnl
+dnl Check configure options and assign variables related to
+dnl the Apache Portable Runtime Utilities (APRUTIL) library.
+dnl
+dnl If there is an apr-util source directory, there *must* be a
+dnl corresponding apr source directory. APRUTIL's build system
+dnl is too tied in with apr. (You can't use an installed APR and
+dnl a source APR-util.)
+dnl
+
+
+AC_DEFUN(SVN_LIB_APRUTIL,
+[
+ APRUTIL_WANTED_REGEXES="$1"
+
+ AC_MSG_NOTICE([Apache Portable Runtime Utility (APRUTIL) library configuration])
+
+ APR_FIND_APU("$abs_srcdir/apr-util", "$abs_builddir/apr-util", 1, [1 0])
+
+ if test $apu_found = "no"; then
+ AC_MSG_WARN([APRUTIL not found])
+ SVN_DOWNLOAD_APRUTIL
+ fi
+
+ if test $apu_found = "reconfig"; then
+ SVN_EXTERNAL_PROJECT([apr-util], [--with-apr=../apr])
+ fi
+
+ dnl check APRUTIL version number against regex
+
+ AC_MSG_CHECKING([APR-UTIL version])
+ apu_version="`$apu_config --version`"
+ if test $? -ne 0; then
+ # This is a hack as suggested by Ben Collins-Sussman. It can be
+ # removed after apache 2.0.44 has been released. (The apu-config
+ # shipped in 2.0.43 contains a correct version number, but
+ # stupidly doesn't understand the --version switch.)
+ apu_version=`grep "APRUTIL_DOTTED_VERSION=" $(which $apu_config) | tr -d "APRUTIL_DOTTED_VERSION="| tr -d '"'`
+ #AC_MSG_ERROR([
+ # apu-config --version failed.
+ # Your apu-config doesn't support the --version switch, please upgrade
+ # to APR-UTIL more recent than 2002-Nov-05.])
+ fi
+ AC_MSG_RESULT([$apu_version])
+
+ APU_WANTED_REGEX_MATCH=0
+ for apu_wanted_regex in $APRUTIL_WANTED_REGEXES; do
+ if test `expr $apu_version : $apu_wanted_regex` -ne 0; then
+ APU_WANTED_REGEX_MATCH=1
+ break
+ fi
+ done
+
+ if test $APU_WANTED_REGEX_MATCH -eq 0; then
+ echo "wanted regexes are $APRUTIL_WANTED_REGEXES"
+ AC_MSG_ERROR([invalid apr-util version found])
+ fi
+
+ dnl Get libraries and thread flags from APRUTIL ---------------------
+
+ apu_ldflags="`$apu_config --ldflags`"
+ if test $? -ne 0; then
+ AC_MSG_ERROR([apu-config --ldflags failed])
+ fi
+ LDFLAGS="$LDFLAGS `SVN_REMOVE_STANDARD_LIB_DIRS($apu_ldflags)`"
+
+ SVN_APRUTIL_INCLUDES="`$apu_config --includes`"
+ if test $? -ne 0; then
+ AC_MSG_ERROR([apu-config --includes failed])
+ fi
+
+ SVN_APRUTIL_PREFIX="`$apu_config --prefix`"
+ if test $? -ne 0; then
+ AC_MSG_ERROR([apu-config --prefix failed])
+ fi
+
+ if test "$enable_all_static" = "yes"; then
+ SVN_APRUTIL_LIBS="`$apu_config --link-ld --libs`"
+ if test $? -ne 0; then
+ AC_MSG_ERROR([apu-config --link-ld --libs failed])
+ fi
+ else
+ SVN_APRUTIL_LIBS="`$apu_config --link-ld`"
+ if test $? -ne 0; then
+ AC_MSG_ERROR([apu-config --link-ld failed])
+ fi
+ fi
+ SVN_APRUTIL_LIBS="`SVN_REMOVE_STANDARD_LIB_DIRS($SVN_APRUTIL_LIBS)`"
+
+ AC_SUBST(SVN_APRUTIL_INCLUDES)
+ AC_SUBST(SVN_APRUTIL_CONFIG, ["$apu_config"])
+ AC_SUBST(SVN_APRUTIL_LIBS)
+ AC_SUBST(SVN_APRUTIL_PREFIX)
+
+ dnl What version of Expat are we using? -----------------
+ SVN_HAVE_OLD_EXPAT="`$apu_config --old-expat`"
+ if test "$SVN_HAVE_OLD_EXPAT" = "yes"; then
+ AC_DEFINE(SVN_HAVE_OLD_EXPAT, 1, [Defined if Expat 1.0 or 1.1 was found])
+ fi
+])
+
+dnl SVN_DOWNLOAD_APRUTIL()
+dnl no apr-util found, print out a message telling the user what to do
+AC_DEFUN(SVN_DOWNLOAD_APRUTIL,
+[
+ echo "The Apache Portable Runtime Utility (APRUTIL) library cannot be found."
+ echo "Either install APRUTIL on this system and supply the appropriate"
+ echo "--with-apr-util option"
+ echo ""
+ echo "or"
+ echo ""
+ echo "get it with SVN and put it in a subdirectory of this source:"
+ echo ""
+ echo " svn co \\"
+ echo " http://svn.apache.org/repos/asf/apr/apr-util/branches/1.3.x \\"
+ echo " apr-util"
+ echo ""
+ echo "Run that right here in the top level of the Subversion tree."
+ echo "Afterwards, run apr-util/buildconf in that subdirectory and"
+ echo "then run configure again here."
+ echo ""
+ AC_MSG_ERROR([no suitable APRUTIL found])
+])
diff --git a/build/ac-macros/berkeley-db.m4 b/build/ac-macros/berkeley-db.m4
new file mode 100644
index 0000000..b507d51
--- /dev/null
+++ b/build/ac-macros/berkeley-db.m4
@@ -0,0 +1,246 @@
+dnl ===================================================================
+dnl Licensed to the Apache Software Foundation (ASF) under one
+dnl or more contributor license agreements. See the NOTICE file
+dnl distributed with this work for additional information
+dnl regarding copyright ownership. The ASF licenses this file
+dnl to you under the Apache License, Version 2.0 (the
+dnl "License"); you may not use this file except in compliance
+dnl with the License. You may obtain a copy of the License at
+dnl
+dnl http://www.apache.org/licenses/LICENSE-2.0
+dnl
+dnl Unless required by applicable law or agreed to in writing,
+dnl software distributed under the License is distributed on an
+dnl "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+dnl KIND, either express or implied. See the License for the
+dnl specific language governing permissions and limitations
+dnl under the License.
+dnl ===================================================================
+dnl
+dnl SVN_LIB_BERKELEY_DB(major, minor, patch)
+dnl
+dnl Compare if the Berkeley DB specified by user or provided by APR-UTIL
+dnl is no older than the version given by MAJOR, MINOR, and PATCH.
+dnl
+dnl If we find a useable version, set the shell variable
+dnl `svn_lib_berkeley_db' to `yes'. Otherwise, set `svn_lib_berkeley_db'
+dnl to `no'.
+dnl
+dnl This macro also checks for the `--with-berkeley-db=ARG' flag;
+dnl if given, the macro will use the ARG specified, and the
+dnl configuration script will die if it can't find the library. If
+dnl the user gives the `--without-berkeley-db' flag, the entire
+dnl search is skipped.
+
+
+AC_DEFUN(SVN_LIB_BERKELEY_DB,
+[
+ db_version=$1.$2.$3
+ dnl Process the `with-berkeley-db' switch. We set `status' to one
+ dnl of the following values:
+ dnl `required' --- the user specified that they did want to use
+ dnl Berkeley DB, so abort the configuration if we cannot find it.
+ dnl `try-link' --- See if APR-UTIL supplies the correct DB version;
+ dnl if it doesn't, just do not build the bdb based filesystem.
+ dnl `skip' --- Do not look for Berkeley DB, and do not build the
+ dnl bdb based filesystem.
+
+ AC_ARG_WITH(berkeley-db, [AS_HELP_STRING(
+ [[--with-berkeley-db[=HEADER:INCLUDES:LIB_SEARCH_DIRS:LIBS]]], [
+ The Subversion Berkeley DB based filesystem library
+ requires Berkeley DB $db_version or newer. If you
+ specify `--without-berkeley-db', that library will
+ not be built. If you omit the argument of this option
+ completely, the configure script will use Berkeley DB
+ used by APR-UTIL.])],
+ [
+ if test "$withval" = "no"; then
+ status=skip
+ elif test "$withval" = "yes"; then
+ apu_db_version="`$apu_config --db-version`"
+ if test $? -ne 0; then
+ AC_MSG_ERROR([Can't determine whether apr-util is linked against a
+ proper version of Berkeley DB.])
+ fi
+
+ if test "$withval" = "yes"; then
+ if test "$apu_db_version" -lt "4"; then
+ AC_MSG_ERROR([APR-UTIL was linked against Berkeley DB version $apu_db_version,
+ while version 4 or higher is required. Reinstall
+ APR-UTIL with the appropriate options.])
+ fi
+
+ status=required
+
+ elif test "$apu_found" != "reconfig"; then
+ if test "$apu_db_version" -lt 4; then
+ AC_MSG_ERROR([APR-UTIL was installed independently, it won't be
+ possible to use the specified Berkeley DB: $withval])
+ fi
+
+ status=required
+ fi
+ else
+ if echo "$withval" | $EGREP ":.*:.*:" > /dev/null; then
+ svn_berkeley_db_header=["`echo "$withval" | $SED -e "s/\([^:]*\):.*/\1/"`"]
+ SVN_DB_INCLUDES=""
+ for i in [`echo "$withval" | $SED -e "s/.*:\([^:]*\):[^:]*:.*/\1/"`]; do
+ SVN_DB_INCLUDES="$SVN_DB_INCLUDES -I$i"
+ done
+ SVN_DB_INCLUDES="${SVN_DB_INCLUDES## }"
+ for l in [`echo "$withval" | $SED -e "s/.*:[^:]*:\([^:]*\):.*/\1/"`]; do
+ LDFLAGS="$LDFLAGS -L$l"
+ done
+ SVN_DB_LIBS=""
+ for l in [`echo "$withval" | $SED -e "s/.*:\([^:]*\)/\1/"`]; do
+ SVN_DB_LIBS="$SVN_DB_LIBS -l$l"
+ done
+ SVN_DB_LIBS="${SVN_DB_LIBS## }"
+
+ status=required
+ else
+ AC_MSG_ERROR([Invalid syntax of argument of --with-berkeley-db option])
+ fi
+ fi
+ ],
+ [
+ # No --with-berkeley-db option:
+ #
+ # Check if APR-UTIL is providing the correct Berkeley DB version
+ # for us.
+ #
+ apu_db_version="`$apu_config --db-version`"
+ if test $? -ne 0; then
+ AC_MSG_WARN([Detected older version of APR-UTIL, trying to determine
+ whether apr-util is linked against Berkeley DB
+ $db_version])
+ status=try-link
+ elif test "$apu_db_version" -lt "4"; then
+ status=skip
+ else
+ status=try-link
+ fi
+ ])
+
+ if test "$status" = "skip"; then
+ svn_lib_berkeley_db=no
+ else
+ AC_MSG_CHECKING([for availability of Berkeley DB])
+ SVN_LIB_BERKELEY_DB_TRY($1, $2, $3)
+ if test "$svn_have_berkeley_db" = "yes"; then
+ AC_MSG_RESULT([yes])
+ svn_lib_berkeley_db=yes
+ else
+ AC_MSG_RESULT([no])
+ svn_lib_berkeley_db=no
+ if test "$status" = "required"; then
+ AC_MSG_ERROR([Berkeley DB $db_version or newer wasn't found.])
+ fi
+ fi
+ fi
+])
+
+
+dnl SVN_LIB_BERKELEY_DB_TRY(major, minor, patch)
+dnl
+dnl A subroutine of SVN_LIB_BERKELEY_DB.
+dnl
+dnl Check that a new-enough version of Berkeley DB is installed.
+dnl "New enough" means no older than the version given by MAJOR,
+dnl MINOR, and PATCH. The result of the test is not cached; no
+dnl messages are printed.
+dnl
+dnl Set the shell variable `svn_have_berkeley_db' to `yes' if we found
+dnl an appropriate version, or `no' otherwise.
+dnl
+dnl This macro uses the Berkeley DB library function `db_version' to
+dnl find the version. If the Berkeley DB library doesn't have this
+dnl function, then this macro assumes it is too old.
+
+dnl NOTE: This is pretty messed up. It seems that the FreeBSD port of
+dnl Berkeley DB 4 puts the header file in /usr/local/include/db4, but the
+dnl database library in /usr/local/lib, as libdb4.[a|so]. There is no
+dnl /usr/local/include/db.h. So if you check for /usr/local first, you'll
+dnl get the old header file from /usr/include, and the new library from
+dnl /usr/local/lib. Disaster. Thus this test compares the version constants
+dnl in the db.h header with the ones returned by db_version().
+
+
+AC_DEFUN(SVN_LIB_BERKELEY_DB_TRY,
+ [
+ svn_lib_berkeley_db_try_save_cppflags="$CPPFLAGS"
+ svn_lib_berkeley_db_try_save_libs="$LIBS"
+
+ svn_check_berkeley_db_major=$1
+ svn_check_berkeley_db_minor=$2
+ svn_check_berkeley_db_patch=$3
+
+ # Extract only the -ldb.* flag from the libs supplied by apu-config
+ # Otherwise we get bit by the fact that expat might not be built yet
+ # Or that it resides in a non-standard location which we would have
+ # to compensate with using something like -R`$apu_config --prefix`/lib.
+ #
+ SVN_DB_LIBS=["${SVN_DB_LIBS-`$apu_config --libs | $SED -e 's/.*\(-ldb[^[:space:]]*\).*/\1/' | $EGREP -- '-ldb[^[:space:]]*'`}"]
+
+ CPPFLAGS="$SVN_DB_INCLUDES $SVN_APRUTIL_INCLUDES $CPPFLAGS"
+ LIBS="`$apu_config --ldflags` $SVN_DB_LIBS $LIBS"
+
+ if test -n "$svn_berkeley_db_header"; then
+ SVN_DB_HEADER="#include <$svn_berkeley_db_header>"
+ svn_db_header="#include <$svn_berkeley_db_header>"
+ else
+ SVN_DB_HEADER="#include <apu_want.h>"
+ svn_db_header="#define APU_WANT_DB
+#include <apu_want.h>"
+ fi
+
+ AH_BOTTOM(
+#ifdef SVN_WANT_BDB
+#define APU_WANT_DB
+@SVN_DB_HEADER@
+#endif
+)
+
+ AC_RUN_IFELSE([AC_LANG_SOURCE([[
+#include <stdlib.h>
+$svn_db_header
+
+int main ()
+{
+ int major, minor, patch;
+
+ db_version (&major, &minor, &patch);
+
+ /* Sanity check: ensure that db.h constants actually match the db library */
+ if (major != DB_VERSION_MAJOR
+ || minor != DB_VERSION_MINOR
+ || patch != DB_VERSION_PATCH)
+ exit (1);
+
+ /* Run-time check: ensure the library claims to be the correct version. */
+
+ if (major < $svn_check_berkeley_db_major)
+ exit (1);
+ if (major > $svn_check_berkeley_db_major)
+ exit (0);
+
+ if (minor < $svn_check_berkeley_db_minor)
+ exit (1);
+ if (minor > $svn_check_berkeley_db_minor)
+ exit (0);
+
+ if (patch >= $svn_check_berkeley_db_patch)
+ exit (0);
+ else
+ exit (1);
+}
+ ]])],
+ [svn_have_berkeley_db=yes],
+ [svn_have_berkeley_db=no],
+ [svn_have_berkeley_db=yes]
+ )
+
+ CPPFLAGS="$svn_lib_berkeley_db_try_save_cppflags"
+ LIBS="$svn_lib_berkeley_db_try_save_libs"
+ ]
+)
diff --git a/build/ac-macros/ctypesgen.m4 b/build/ac-macros/ctypesgen.m4
new file mode 100644
index 0000000..e78db7e
--- /dev/null
+++ b/build/ac-macros/ctypesgen.m4
@@ -0,0 +1,78 @@
+dnl ===================================================================
+dnl Licensed to the Apache Software Foundation (ASF) under one
+dnl or more contributor license agreements. See the NOTICE file
+dnl distributed with this work for additional information
+dnl regarding copyright ownership. The ASF licenses this file
+dnl to you under the Apache License, Version 2.0 (the
+dnl "License"); you may not use this file except in compliance
+dnl with the License. You may obtain a copy of the License at
+dnl
+dnl http://www.apache.org/licenses/LICENSE-2.0
+dnl
+dnl Unless required by applicable law or agreed to in writing,
+dnl software distributed under the License is distributed on an
+dnl "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+dnl KIND, either express or implied. See the License for the
+dnl specific language governing permissions and limitations
+dnl under the License.
+dnl ===================================================================
+dnl
+dnl ctypesgen.m4: Locates ctypesgen for building/installing ctypes-python.
+dnl
+
+AC_DEFUN(SVN_CHECK_CTYPESGEN,
+[
+ AC_ARG_WITH(ctypesgen,
+ AS_HELP_STRING([--with-ctypesgen=PATH],
+ [Specify the path to ctypesgen. This can either
+ be the full path to a ctypesgen installation,
+ the full path to a ctypesgen source tree or the
+ full path to ctypesgen.py.]),
+ [
+ case "$withval" in
+ "no")
+ SVN_FIND_CTYPESGEN(no)
+ ;;
+ "yes")
+ SVN_FIND_CTYPESGEN(check)
+ ;;
+ *)
+ SVN_FIND_CTYPESGEN($withval)
+ ;;
+ esac
+ ],
+ [
+ SVN_FIND_CTYPESGEN(check)
+ ])
+])
+
+AC_DEFUN(SVN_FIND_CTYPESGEN,
+[
+ where=$1
+
+ CTYPESGEN=none
+
+ if test $where = check; then
+ AC_PATH_PROG(CTYPESGEN, "ctypesgen.py", none)
+ elif test $where != no; then
+ AC_MSG_CHECKING([for ctypesgen.py])
+
+ if test -f "$where"; then
+ CTYPESGEN="$where"
+ elif test -f "$where/bin/ctypesgen.py"; then
+ CTYPESGEN="$where/bin/ctypesgen.py"
+ else
+ CTYPESGEN="$where/ctypesgen.py"
+ fi
+
+ if test ! -f "$CTYPESGEN" || test ! -x "$CTYPESGEN"; then
+ AC_MSG_ERROR([Could not find ctypesgen at $where/ctypesgen.py or at
+ $where/bin/ctypesgen.py])
+ else
+ AC_MSG_RESULT([$CTYPESGEN])
+ fi
+ fi
+
+ dnl We use CTYPESGEN in the Makefile
+ AC_SUBST(CTYPESGEN)
+])
diff --git a/build/ac-macros/find_apr.m4 b/build/ac-macros/find_apr.m4
new file mode 100644
index 0000000..f5b4e06
--- /dev/null
+++ b/build/ac-macros/find_apr.m4
@@ -0,0 +1,168 @@
+dnl ===================================================================
+dnl Licensed to the Apache Software Foundation (ASF) under one
+dnl or more contributor license agreements. See the NOTICE file
+dnl distributed with this work for additional information
+dnl regarding copyright ownership. The ASF licenses this file
+dnl to you under the Apache License, Version 2.0 (the
+dnl "License"); you may not use this file except in compliance
+dnl with the License. You may obtain a copy of the License at
+dnl
+dnl http://www.apache.org/licenses/LICENSE-2.0
+dnl
+dnl Unless required by applicable law or agreed to in writing,
+dnl software distributed under the License is distributed on an
+dnl "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+dnl KIND, either express or implied. See the License for the
+dnl specific language governing permissions and limitations
+dnl under the License.
+dnl ===================================================================
+dnl
+dnl find_apr.m4 : locate the APR include files and libraries
+dnl
+dnl This macro file can be used by applications to find and use the APR
+dnl library. It provides a standardized mechanism for using APR. It supports
+dnl embedding APR into the application source, or locating an installed
+dnl copy of APR.
+dnl
+dnl APR_FIND_APR(srcdir, builddir, implicit-install-check, acceptable-majors)
+dnl
+dnl where srcdir is the location of the bundled APR source directory, or
+dnl empty if source is not bundled.
+dnl
+dnl where builddir is the location where the bundled APR will will be built,
+dnl or empty if the build will occur in the srcdir.
+dnl
+dnl where implicit-install-check set to 1 indicates if there is no
+dnl --with-apr option specified, we will look for installed copies.
+dnl
+dnl where acceptable-majors is a space separated list of acceptable major
+dnl version numbers. Often only a single major version will be acceptable.
+dnl If multiple versions are specified, and --with-apr=PREFIX or the
+dnl implicit installed search are used, then the first (leftmost) version
+dnl in the list that is found will be used. Currently defaults to [0 1].
+dnl
+dnl Sets the following variables on exit:
+dnl
+dnl apr_found : "yes", "no", "reconfig"
+dnl
+dnl apr_config : If the apr-config tool exists, this refers to it. If
+dnl apr_found is "reconfig", then the bundled directory
+dnl should be reconfigured *before* using apr_config.
+dnl
+dnl Note: this macro file assumes that apr-config has been installed; it
+dnl is normally considered a required part of an APR installation.
+dnl
+dnl If a bundled source directory is available and needs to be (re)configured,
+dnl then apr_found is set to "reconfig". The caller should reconfigure the
+dnl (passed-in) source directory, placing the result in the build directory,
+dnl as appropriate.
+dnl
+dnl If apr_found is "yes" or "reconfig", then the caller should use the
+dnl value of apr_config to fetch any necessary build/link information.
+dnl
+
+AC_DEFUN([APR_FIND_APR], [
+ apr_found="no"
+
+ if test "$ac_cv_emxos2" = "yes"; then
+ # Scripts don't pass test -x on OS/2
+ TEST_X="test -f"
+ else
+ TEST_X="test -x"
+ fi
+
+ ifelse([$4], [], [
+ ifdef(AC_WARNING,AC_WARNING([$0: missing argument 4 (acceptable-majors): Defaulting to APR 0.x then APR 1.x]))
+ acceptable_majors="0 1"],
+ [acceptable_majors="$4"])
+
+ apr_temp_acceptable_apr_config=""
+ for apr_temp_major in $acceptable_majors
+ do
+ case $apr_temp_major in
+ 0)
+ apr_temp_acceptable_apr_config="$apr_temp_acceptable_apr_config apr-config"
+ ;;
+ *)
+ apr_temp_acceptable_apr_config="$apr_temp_acceptable_apr_config apr-$apr_temp_major-config"
+ ;;
+ esac
+ done
+
+ AC_MSG_CHECKING(for APR)
+ AC_ARG_WITH(apr,
+ [ --with-apr=PATH prefix for installed APR, path to APR build tree,
+ or the full path to apr-config],
+ [
+ if test "$withval" = "no" || test "$withval" = "yes"; then
+ AC_MSG_ERROR([--with-apr requires a directory or file to be provided])
+ fi
+
+ for apr_temp_apr_config_file in $apr_temp_acceptable_apr_config
+ do
+ for lookdir in "$withval/bin" "$withval"
+ do
+ if $TEST_X "$lookdir/$apr_temp_apr_config_file"; then
+ apr_found="yes"
+ apr_config="$lookdir/$apr_temp_apr_config_file"
+ break 2
+ fi
+ done
+ done
+
+ if test "$apr_found" != "yes" && $TEST_X "$withval" && $withval --help > /dev/null 2>&1 ; then
+ apr_found="yes"
+ apr_config="$withval"
+ fi
+
+ dnl if --with-apr is used, it is a fatal error for its argument
+ dnl to be invalid
+ if test "$apr_found" != "yes"; then
+ AC_MSG_ERROR([the --with-apr parameter is incorrect. It must specify an install prefix, a build directory, or an apr-config file.])
+ fi
+ ],[
+ dnl if we have a bundled source directory, use it
+ if test -d "$1"; then
+ apr_temp_abs_srcdir="`cd $1 && pwd`"
+ apr_found="reconfig"
+ apr_bundled_major="`sed -n '/#define.*APR_MAJOR_VERSION/s/^[^0-9]*\([0-9]*\).*$/\1/p' \"$1/include/apr_version.h\"`"
+ case $apr_bundled_major in
+ "")
+ AC_MSG_ERROR([failed to find major version of bundled APR])
+ ;;
+ 0)
+ apr_temp_apr_config_file="apr-config"
+ ;;
+ *)
+ apr_temp_apr_config_file="apr-$apr_bundled_major-config"
+ ;;
+ esac
+ if test -n "$2"; then
+ apr_config="$2/$apr_temp_apr_config_file"
+ else
+ apr_config="$1/$apr_temp_apr_config_file"
+ fi
+ fi
+ if test "$apr_found" = "no" && test -n "$3" && test "$3" = "1"; then
+ for apr_temp_apr_config_file in $apr_temp_acceptable_apr_config
+ do
+ if $apr_temp_apr_config_file --help > /dev/null 2>&1 ; then
+ apr_found="yes"
+ apr_config="$apr_temp_apr_config_file"
+ break
+ else
+ dnl look in some standard places (apparently not in builtin/default)
+ for lookdir in /usr /usr/local /opt/apr /usr/local/apache2 ; do
+ if $TEST_X "$lookdir/bin/$apr_temp_apr_config_file"; then
+ apr_found="yes"
+ apr_config="$lookdir/bin/$apr_temp_apr_config_file"
+ break 2
+ fi
+ done
+ fi
+ done
+ fi
+ ])
+
+ AC_MSG_RESULT($apr_found)
+])
diff --git a/build/ac-macros/find_apu.m4 b/build/ac-macros/find_apu.m4
new file mode 100644
index 0000000..84286db
--- /dev/null
+++ b/build/ac-macros/find_apu.m4
@@ -0,0 +1,178 @@
+dnl ===================================================================
+dnl Licensed to the Apache Software Foundation (ASF) under one
+dnl or more contributor license agreements. See the NOTICE file
+dnl distributed with this work for additional information
+dnl regarding copyright ownership. The ASF licenses this file
+dnl to you under the Apache License, Version 2.0 (the
+dnl "License"); you may not use this file except in compliance
+dnl with the License. You may obtain a copy of the License at
+dnl
+dnl http://www.apache.org/licenses/LICENSE-2.0
+dnl
+dnl Unless required by applicable law or agreed to in writing,
+dnl software distributed under the License is distributed on an
+dnl "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+dnl KIND, either express or implied. See the License for the
+dnl specific language governing permissions and limitations
+dnl under the License.
+dnl ===================================================================
+dnl
+dnl find_apu.m4 : locate the APR-util (APU) include files and libraries
+dnl
+dnl This macro file can be used by applications to find and use the APU
+dnl library. It provides a standardized mechanism for using APU. It supports
+dnl embedding APU into the application source, or locating an installed
+dnl copy of APU.
+dnl
+dnl APR_FIND_APU(srcdir, builddir, implicit-install-check, acceptable-majors)
+dnl
+dnl where srcdir is the location of the bundled APU source directory, or
+dnl empty if source is not bundled.
+dnl
+dnl where builddir is the location where the bundled APU will be built,
+dnl or empty if the build will occur in the srcdir.
+dnl
+dnl where implicit-install-check set to 1 indicates if there is no
+dnl --with-apr-util option specified, we will look for installed copies.
+dnl
+dnl where acceptable-majors is a space separated list of acceptable major
+dnl version numbers. Often only a single major version will be acceptable.
+dnl If multiple versions are specified, and --with-apr-util=PREFIX or the
+dnl implicit installed search are used, then the first (leftmost) version
+dnl in the list that is found will be used. Currently defaults to [0 1].
+dnl
+dnl Sets the following variables on exit:
+dnl
+dnl apu_found : "yes", "no", "reconfig"
+dnl
+dnl apu_config : If the apu-config tool exists, this refers to it. If
+dnl apu_found is "reconfig", then the bundled directory
+dnl should be reconfigured *before* using apu_config.
+dnl
+dnl Note: this macro file assumes that apr-config has been installed; it
+dnl is normally considered a required part of an APR installation.
+dnl
+dnl Note: At this time, we cannot find *both* a source dir and a build dir.
+dnl If both are available, the build directory should be passed to
+dnl the --with-apr-util switch.
+dnl
+dnl Note: the installation layout is presumed to follow the standard
+dnl PREFIX/lib and PREFIX/include pattern. If the APU config file
+dnl is available (and can be found), then non-standard layouts are
+dnl possible, since it will be described in the config file.
+dnl
+dnl If a bundled source directory is available and needs to be (re)configured,
+dnl then apu_found is set to "reconfig". The caller should reconfigure the
+dnl (passed-in) source directory, placing the result in the build directory,
+dnl as appropriate.
+dnl
+dnl If apu_found is "yes" or "reconfig", then the caller should use the
+dnl value of apu_config to fetch any necessary build/link information.
+dnl
+
+AC_DEFUN([APR_FIND_APU], [
+ apu_found="no"
+
+ if test "$ac_cv_emxos2" = "yes"; then
+ # Scripts don't pass test -x on OS/2
+ TEST_X="test -f"
+ else
+ TEST_X="test -x"
+ fi
+
+ ifelse([$4], [],
+ [
+ ifdef(AC_WARNING,([$0: missing argument 4 (acceptable-majors): Defaulting to APU 0.x then APU 1.x]))
+ acceptable_majors="0 1"
+ ], [acceptable_majors="$4"])
+
+ apu_temp_acceptable_apu_config=""
+ for apu_temp_major in $acceptable_majors
+ do
+ case $apu_temp_major in
+ 0)
+ apu_temp_acceptable_apu_config="$apu_temp_acceptable_apu_config apu-config"
+ ;;
+ *)
+ apu_temp_acceptable_apu_config="$apu_temp_acceptable_apu_config apu-$apu_temp_major-config"
+ ;;
+ esac
+ done
+
+ AC_MSG_CHECKING(for APR-util)
+ AC_ARG_WITH(apr-util,
+ [ --with-apr-util=PATH prefix for installed APU, path to APU build tree,
+ or the full path to apu-config],
+ [
+ if test "$withval" = "no" || test "$withval" = "yes"; then
+ AC_MSG_ERROR([--with-apr-util requires a directory or file to be provided])
+ fi
+
+ for apu_temp_apu_config_file in $apu_temp_acceptable_apu_config
+ do
+ for lookdir in "$withval/bin" "$withval"
+ do
+ if $TEST_X "$lookdir/$apu_temp_apu_config_file"; then
+ apu_found="yes"
+ apu_config="$lookdir/$apu_temp_apu_config_file"
+ break 2
+ fi
+ done
+ done
+
+ if test "$apu_found" != "yes" && $TEST_X "$withval" && $withval --help > /dev/null 2>&1 ; then
+ apu_found="yes"
+ apu_config="$withval"
+ fi
+
+ dnl if --with-apr-util is used, it is a fatal error for its argument
+ dnl to be invalid
+ if test "$apu_found" != "yes"; then
+ AC_MSG_ERROR([the --with-apr-util parameter is incorrect. It must specify an install prefix, a build directory, or an apu-config file.])
+ fi
+ ],[
+ dnl if we have a bundled source directory, use it
+ if test -d "$1"; then
+ apu_temp_abs_srcdir="`cd $1 && pwd`"
+ apu_found="reconfig"
+ apu_bundled_major="`sed -n '/#define.*APU_MAJOR_VERSION/s/^[^0-9]*\([0-9]*\).*$/\1/p' \"$1/include/apu_version.h\"`"
+ case $apu_bundled_major in
+ "")
+ AC_MSG_ERROR([failed to find major version of bundled APU])
+ ;;
+ 0)
+ apu_temp_apu_config_file="apu-config"
+ ;;
+ *)
+ apu_temp_apu_config_file="apu-$apu_bundled_major-config"
+ ;;
+ esac
+ if test -n "$2"; then
+ apu_config="$2/$apu_temp_apu_config_file"
+ else
+ apu_config="$1/$apu_temp_apu_config_file"
+ fi
+ fi
+ if test "$apu_found" = "no" && test -n "$3" && test "$3" = "1"; then
+ for apu_temp_apu_config_file in $apu_temp_acceptable_apu_config
+ do
+ if $apu_temp_apu_config_file --help > /dev/null 2>&1 ; then
+ apu_found="yes"
+ apu_config="$apu_temp_apu_config_file"
+ break
+ else
+ dnl look in some standard places (apparently not in builtin/default)
+ for lookdir in /usr /usr/local /opt/apr /usr/local/apache2 ; do
+ if $TEST_X "$lookdir/bin/$apu_temp_apu_config_file"; then
+ apu_found="yes"
+ apu_config="$lookdir/bin/$apu_temp_apu_config_file"
+ break 2
+ fi
+ done
+ fi
+ done
+ fi
+ ])
+
+ AC_MSG_RESULT($apu_found)
+])
diff --git a/build/ac-macros/gssapi.m4 b/build/ac-macros/gssapi.m4
new file mode 100644
index 0000000..8a8dc31
--- /dev/null
+++ b/build/ac-macros/gssapi.m4
@@ -0,0 +1,82 @@
+dnl ===================================================================
+dnl Licensed to the Apache Software Foundation (ASF) under one
+dnl or more contributor license agreements. See the NOTICE file
+dnl distributed with this work for additional information
+dnl regarding copyright ownership. The ASF licenses this file
+dnl to you under the Apache License, Version 2.0 (the
+dnl "License"); you may not use this file except in compliance
+dnl with the License. You may obtain a copy of the License at
+dnl
+dnl http://www.apache.org/licenses/LICENSE-2.0
+dnl
+dnl Unless required by applicable law or agreed to in writing,
+dnl software distributed under the License is distributed on an
+dnl "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+dnl KIND, either express or implied. See the License for the
+dnl specific language governing permissions and limitations
+dnl under the License.
+dnl ===================================================================
+dnl
+dnl SVN_LIB_GSSAPI
+dnl
+dnl Check configure options and assign variables related to
+dnl the gssapi library.
+dnl
+
+AC_DEFUN(SVN_LIB_RA_SERF_GSSAPI,
+[
+ AC_ARG_WITH(gssapi,
+ [AS_HELP_STRING([[--with-gssapi[=PREFIX]]],
+ [GSSAPI (Kerberos) support])],
+ [svn_lib_gssapi="$withval"],
+ [svn_lib_gssapi="no"])
+
+ AC_MSG_CHECKING([whether to look for GSSAPI (Kerberos)])
+ if test "$svn_lib_gssapi" != "no"; then
+ AC_MSG_RESULT([yes])
+ if test "$svn_lib_gssapi" != "yes"; then
+ AC_MSG_CHECKING([for krb5-config])
+ KRB5_CONFIG="$svn_lib_gssapi/bin/krb5-config"
+ if test -f "$KRB5_CONFIG" && test -x "$KRB5_CONFIG"; then
+ AC_MSG_RESULT([yes])
+ else
+ KRB5_CONFIG=""
+ AC_MSG_RESULT([no])
+ fi
+ else
+ AC_PATH_PROG(KRB5_CONFIG, krb5-config)
+ fi
+ if test -n "$KRB5_CONFIG"; then
+ AC_MSG_CHECKING([for GSSAPI (Kerberos)])
+ old_CPPFLAGS="$CPPFLAGS"
+ old_CFLAGS="$CFLAGS"
+ old_LIBS="$LIBS"
+ CFLAGS=""
+ SVN_GSSAPI_INCLUDES="`$KRB5_CONFIG --cflags`"
+ SVN_GSSAPI_LIBS="`$KRB5_CONFIG --libs gssapi`"
+ SVN_GSSAPI_LIBS="`SVN_REMOVE_STANDARD_LIB_DIRS($SVN_GSSAPI_LIBS)`"
+ CPPFLAGS="$CPPFLAGS $SVN_GSSAPI_INCLUDES"
+ CFLAGS="$old_CFLAGS"
+ LIBS="$LIBS $SVN_GSSAPI_LIBS"
+ AC_LINK_IFELSE([AC_LANG_SOURCE([[
+#include <gssapi.h>
+int main()
+{gss_init_sec_context(NULL, NULL, NULL, NULL, NULL, 0, 0, NULL, NULL, NULL, NULL, NULL, NULL);}]])],
+ svn_lib_gssapi="yes", svn_lib_gssapi="no")
+ if test "$svn_lib_gssapi" = "yes"; then
+ AC_MSG_RESULT([yes])
+ CPPFLAGS="$old_CPPFLAGS"
+ LIBS="$old_LIBS"
+ else
+ AC_MSG_RESULT([no])
+ AC_MSG_ERROR([cannot find GSSAPI (Kerberos)])
+ fi
+ else
+ AC_MSG_ERROR([cannot find krb5-config])
+ fi
+ else
+ AC_MSG_RESULT([no])
+ fi
+ AC_SUBST(SVN_GSSAPI_INCLUDES)
+ AC_SUBST(SVN_GSSAPI_LIBS)
+])
diff --git a/build/ac-macros/java.m4 b/build/ac-macros/java.m4
new file mode 100644
index 0000000..9ef4015
--- /dev/null
+++ b/build/ac-macros/java.m4
@@ -0,0 +1,215 @@
+dnl ===================================================================
+dnl Licensed to the Apache Software Foundation (ASF) under one
+dnl or more contributor license agreements. See the NOTICE file
+dnl distributed with this work for additional information
+dnl regarding copyright ownership. The ASF licenses this file
+dnl to you under the Apache License, Version 2.0 (the
+dnl "License"); you may not use this file except in compliance
+dnl with the License. You may obtain a copy of the License at
+dnl
+dnl http://www.apache.org/licenses/LICENSE-2.0
+dnl
+dnl Unless required by applicable law or agreed to in writing,
+dnl software distributed under the License is distributed on an
+dnl "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+dnl KIND, either express or implied. See the License for the
+dnl specific language governing permissions and limitations
+dnl under the License.
+dnl ===================================================================
+dnl
+dnl java.m4: Locates the JDK and its include files and libraries.
+dnl
+
+AC_DEFUN(SVN_CHECK_JDK,
+[
+ JAVA_OLDEST_WORKING_VER="$1"
+ AC_ARG_WITH(jdk,
+ AS_HELP_STRING([--with-jdk=PATH],
+ [Try to use 'PATH/include' to find the JNI
+ headers. If PATH is not specified, look
+ for a Java Development Kit at JAVA_HOME.]),
+ [
+ case "$withval" in
+ "no")
+ JDK_SUITABLE=no
+ ;;
+ "yes")
+ SVN_FIND_JDK(check, $JAVA_OLDEST_WORKING_VER)
+ ;;
+ *)
+ SVN_FIND_JDK($withval, $JAVA_OLDEST_WORKING_VER)
+ ;;
+ esac
+ ],
+ [
+ SVN_FIND_JDK(check, $JAVA_OLDEST_WORKING_VER)
+ ])
+])
+
+AC_DEFUN(SVN_FIND_JDK,
+[
+ where=$1
+ JAVA_OLDEST_WORKING_VER="$2"
+
+ JDK=none
+ JAVA_BIN=none
+ JAVADOC=none
+ JAVAC=none
+ JAVAH=none
+ JAR=none
+ JNI_INCLUDES=none
+
+ JDK_SUITABLE=no
+ AC_MSG_CHECKING([for JDK])
+ if test $where = check; then
+ dnl Prefer /Library/Java/Home first to try to be nice on Darwin.
+ dnl We'll correct later if we get caught in the tangled web of JAVA_HOME.
+ if test -x "$JAVA_HOME/bin/java"; then
+ JDK="$JAVA_HOME"
+ elif test -x "/Library/Java/Home/bin/java"; then
+ JDK="/Library/Java/Home"
+ elif test -x "/usr/bin/java"; then
+ JDK="/usr"
+ elif test -x "/usr/local/bin/java"; then
+ JDK="/usr/local"
+ fi
+ else
+ JDK=$where
+ fi
+
+ dnl Correct for Darwin's odd JVM layout. Ideally, we should use realpath,
+ dnl but Darwin doesn't have that utility. /usr/bin/java is a symlink into
+ dnl /System/Library/Frameworks/JavaVM.framework/Versions/CurrentJDK/Commands
+ dnl See http://developer.apple.com/qa/qa2001/qa1170.html
+ os_arch="`uname`"
+ if test "$os_arch" = "Darwin"; then
+ OSX_VER=`/usr/bin/sw_vers | grep ProductVersion | cut -f2 | cut -d"." -f1,2`
+
+ if test "$OSX_VER" = "10.4"; then
+ dnl For OS X 10.4, the SDK version is 10.4u instead of 10.4.
+ OSX_VER="10.4u"
+ fi
+
+ OSX_SYS_JAVA_FRAMEWORK="/System/Library/Frameworks/JavaVM.framework"
+ OSX_SDK_JAVA_FRAMEWORK="/Developer/SDKs/MacOSX$OSX_VER.sdk/System/Library"
+ OSX_SDK_JAVA_FRAMEWORK="$OSX_SDK_JAVA_FRAMEWORK/Frameworks/JavaVM.framework"
+ fi
+
+ if test "$os_arch" = "Darwin" && test "$JDK" = "/usr" &&
+ test -d "/Library/Java/Home"; then
+ JDK="/Library/Java/Home"
+ fi
+
+ if test "$os_arch" = "Darwin" && test "$JDK" = "/Library/Java/Home"; then
+ JRE_LIB_DIR="$OSX_SYS_JAVA_FRAMEWORK/Classes"
+ else
+ JRE_LIB_DIR="$JDK/jre/lib"
+ fi
+
+ if test -f "$JDK/include/jni.h"; then
+ dnl This *must* be fully expanded, or we'll have problems later in find.
+ JNI_INCLUDEDIR="$JDK/include"
+ JDK_SUITABLE=yes
+ elif test "$os_arch" = "Darwin" && test -e "$JDK/Headers/jni.h"; then
+ dnl Search the Headers directory in the JDK
+ JNI_INCLUDEDIR="$JDK/Headers"
+ JDK_SUITABLE=yes
+ elif test "$os_arch" = "Darwin" &&
+ test -e "$OSX_SYS_JAVA_FRAMEWORK/Headers/jni.h"; then
+ dnl Search the System framework's Headers directory
+ JNI_INCLUDEDIR="$OSX_SYS_JAVA_FRAMEWORK/Headers"
+ JDK_SUITABLE=yes
+ elif test "$os_arch" = "Darwin" &&
+ test -e "$OSX_SDK_JAVA_FRAMEWORK/Headers/jni.h"; then
+ dnl Search the SDK's System framework's Headers directory
+ JNI_INCLUDEDIR="$OSX_SDK_JAVA_FRAMEWORK/Headers"
+ JDK_SUITABLE=yes
+ else
+ JDK_SUITABLE=no
+ fi
+ if test "$JDK_SUITABLE" = "yes"; then
+ AC_MSG_RESULT([$JNI_INCLUDEDIR/jni.h])
+ else
+ AC_MSG_RESULT([no])
+ if test "$where" != "check"; then
+ AC_MSG_WARN([no JNI header files found.])
+ if test "$os_arch" = "Darwin"; then
+ AC_MSG_WARN([You may need to install the latest Java Development package from http://connect.apple.com/. Apple no longer includes the JNI header files by default on Java updates.])
+ fi
+ fi
+ fi
+
+ if test "$JDK_SUITABLE" = "yes"; then
+ JAVA_BIN='$(JDK)/bin'
+
+ JAVA="$JAVA_BIN/java"
+ JAVAC="$JAVA_BIN/javac"
+ JAVAH="$JAVA_BIN/javah"
+ JAVADOC="$JAVA_BIN/javadoc"
+ JAR="$JAVA_BIN/jar"
+
+ dnl Prefer Jikes (for speed) if available.
+ jikes_options="/usr/local/bin/jikes /usr/bin/jikes"
+ AC_ARG_WITH(jikes,
+ AS_HELP_STRING([--with-jikes=PATH],
+ [Specify the path to a jikes binary to use
+ it as your Java compiler. The default is to
+ look for jikes (PATH optional). This behavior
+ can be switched off by supplying 'no'.]),
+ [
+ if test "$withval" != "no" && test "$withval" != "yes"; then
+ dnl Assume a path was provided.
+ jikes_options="$withval $jikes_options"
+ fi
+ requested_jikes="$withval" # will be 'yes' if path unspecified
+ ])
+ if test "$requested_jikes" != "no"; then
+ dnl Look for a usable jikes binary.
+ for jikes in $jikes_options; do
+ if test -z "$jikes_found" && test -x "$jikes"; then
+ jikes_found="yes"
+ JAVAC="$jikes"
+ JAVA_CLASSPATH="$JRE_LIB_DIR"
+ for jar in $JRE_LIB_DIR/*.jar; do
+ JAVA_CLASSPATH="$JAVA_CLASSPATH:$jar"
+ done
+ fi
+ done
+ fi
+ if test -n "$requested_jikes" && test "$requested_jikes" != "no"; then
+ dnl Jikes was explicitly requested. Verify that it was provided.
+ if test -z "$jikes_found"; then
+ AC_MSG_ERROR([Could not find a usable version of Jikes])
+ elif test -n "$jikes_found" && test "$requested_jikes" != "yes" &&
+ test "$JAVAC" != "$requested_jikes"; then
+ AC_MSG_WARN([--with-jikes PATH was invalid, substitute found])
+ fi
+ fi
+
+ dnl Add javac flags.
+ # The release for "-source" could actually be greater than that
+ # of "-target", if we want to cross-compile for lesser JVMs.
+ if test -z "$JAVAC_FLAGS"; then
+ JAVAC_FLAGS="-target $JAVA_OLDEST_WORKING_VER -source 1.5"
+ if test "$enable_debugging" = "yes"; then
+ JAVAC_FLAGS="-g -Xlint:unchecked $JAVAC_FLAGS"
+ fi
+ fi
+
+ JNI_INCLUDES="-I$JNI_INCLUDEDIR"
+ list="`find "$JNI_INCLUDEDIR" -type d -print`"
+ for dir in $list; do
+ JNI_INCLUDES="$JNI_INCLUDES -I$dir"
+ done
+ fi
+
+ dnl We use JDK in the Makefile
+ AC_SUBST(JDK)
+ AC_SUBST(JAVA)
+ AC_SUBST(JAVAC)
+ AC_SUBST(JAVAC_FLAGS)
+ AC_SUBST(JAVADOC)
+ AC_SUBST(JAVAH)
+ AC_SUBST(JAR)
+ AC_SUBST(JNI_INCLUDES)
+])
diff --git a/build/ac-macros/kwallet.m4 b/build/ac-macros/kwallet.m4
new file mode 100644
index 0000000..61a2d10
--- /dev/null
+++ b/build/ac-macros/kwallet.m4
@@ -0,0 +1,117 @@
+dnl ===================================================================
+dnl Licensed to the Apache Software Foundation (ASF) under one
+dnl or more contributor license agreements. See the NOTICE file
+dnl distributed with this work for additional information
+dnl regarding copyright ownership. The ASF licenses this file
+dnl to you under the Apache License, Version 2.0 (the
+dnl "License"); you may not use this file except in compliance
+dnl with the License. You may obtain a copy of the License at
+dnl
+dnl http://www.apache.org/licenses/LICENSE-2.0
+dnl
+dnl Unless required by applicable law or agreed to in writing,
+dnl software distributed under the License is distributed on an
+dnl "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+dnl KIND, either express or implied. See the License for the
+dnl specific language governing permissions and limitations
+dnl under the License.
+dnl ===================================================================
+dnl
+dnl SVN_LIB_KWALLET
+dnl
+dnl Check configure options and assign variables related to KWallet support
+dnl
+
+AC_DEFUN(SVN_LIB_KWALLET,
+[
+ AC_ARG_WITH(kwallet,
+ [AS_HELP_STRING([[--with-kwallet[=PATH]]],
+ [Enable use of KWallet (KDE 4) for auth credentials])],
+ [svn_lib_kwallet="$withval"],
+ [svn_lib_kwallet=no])
+
+ AC_MSG_CHECKING([whether to look for KWallet])
+ if test "$svn_lib_kwallet" != "no"; then
+ AC_MSG_RESULT([yes])
+ if test "$svn_enable_shared" = "yes"; then
+ if test "$APR_HAS_DSO" = "yes"; then
+ if test "$USE_NLS" = "yes"; then
+ if test -n "$PKG_CONFIG"; then
+ if test "$HAVE_DBUS" = "yes"; then
+ AC_MSG_CHECKING([for QtCore, QtDBus, QtGui])
+ if $PKG_CONFIG --exists QtCore QtDBus QtGui; then
+ AC_MSG_RESULT([yes])
+ if test "$svn_lib_kwallet" != "yes"; then
+ AC_MSG_CHECKING([for kde4-config])
+ KDE4_CONFIG="$svn_lib_kwallet/bin/kde4-config"
+ if test -f "$KDE4_CONFIG" && test -x "$KDE4_CONFIG"; then
+ AC_MSG_RESULT([yes])
+ else
+ KDE4_CONFIG=""
+ AC_MSG_RESULT([no])
+ fi
+ else
+ AC_PATH_PROG(KDE4_CONFIG, kde4-config)
+ fi
+ if test -n "$KDE4_CONFIG"; then
+ AC_MSG_CHECKING([for KWallet])
+ old_CXXFLAGS="$CXXFLAGS"
+ old_LDFLAGS="$LDFLAGS"
+ old_LIBS="$LIBS"
+ for d in [`$PKG_CONFIG --cflags QtCore QtDBus QtGui`]; do
+ if test -n ["`echo "$d" | $EGREP -- '^-D[^[:space:]]*'`"]; then
+ CPPFLAGS="$CPPFLAGS $d"
+ fi
+ done
+ qt_include_dirs="`$PKG_CONFIG --cflags-only-I QtCore QtDBus QtGui`"
+ kde_dir="`$KDE4_CONFIG --prefix`"
+ SVN_KWALLET_INCLUDES="$DBUS_CPPFLAGS $qt_include_dirs -I$kde_dir/include"
+ qt_libs_other_options="`$PKG_CONFIG --libs-only-other QtCore QtDBus QtGui`"
+ SVN_KWALLET_LIBS="$DBUS_LIBS -lQtCore -lQtDBus -lQtGui -lkdecore -lkdeui $qt_libs_other_options"
+ CXXFLAGS="$CXXFLAGS $SVN_KWALLET_INCLUDES"
+ LIBS="$LIBS $SVN_KWALLET_LIBS"
+ qt_lib_dirs="`$PKG_CONFIG --libs-only-L QtCore QtDBus QtGui`"
+ kde_lib_suffix="`$KDE4_CONFIG --libsuffix`"
+ LDFLAGS="$old_LDFLAGS `SVN_REMOVE_STANDARD_LIB_DIRS($qt_lib_dirs -L$kde_dir/lib$kde_lib_suffix)`"
+ AC_LANG(C++)
+ AC_LINK_IFELSE([AC_LANG_SOURCE([[
+#include <kwallet.h>
+int main()
+{KWallet::Wallet::walletList();}]])], svn_lib_kwallet="yes", svn_lib_kwallet="no")
+ AC_LANG(C)
+ if test "$svn_lib_kwallet" = "yes"; then
+ AC_MSG_RESULT([yes])
+ CXXFLAGS="$old_CXXFLAGS"
+ LIBS="$old_LIBS"
+ else
+ AC_MSG_RESULT([no])
+ AC_MSG_ERROR([cannot find KWallet])
+ fi
+ else
+ AC_MSG_ERROR([cannot find kde4-config])
+ fi
+ else
+ AC_MSG_RESULT([no])
+ AC_MSG_ERROR([cannot find QtCore, QtDBus, QtGui])
+ fi
+ else
+ AC_MSG_ERROR([cannot find D-Bus])
+ fi
+ else
+ AC_MSG_ERROR([cannot find pkg-config])
+ fi
+ else
+ AC_MSG_ERROR([missing support for internationalization])
+ fi
+ else
+ AC_MSG_ERROR([APR does not have support for DSOs])
+ fi
+ else
+ AC_MSG_ERROR([--with-kwallet conflicts with --disable-shared])
+ fi
+ else
+ AC_MSG_RESULT([no])
+ fi
+ AC_SUBST(SVN_KWALLET_INCLUDES)
+ AC_SUBST(SVN_KWALLET_LIBS)
+])
diff --git a/build/ac-macros/neon.m4 b/build/ac-macros/neon.m4
new file mode 100644
index 0000000..f951039
--- /dev/null
+++ b/build/ac-macros/neon.m4
@@ -0,0 +1,168 @@
+dnl ===================================================================
+dnl Licensed to the Apache Software Foundation (ASF) under one
+dnl or more contributor license agreements. See the NOTICE file
+dnl distributed with this work for additional information
+dnl regarding copyright ownership. The ASF licenses this file
+dnl to you under the Apache License, Version 2.0 (the
+dnl "License"); you may not use this file except in compliance
+dnl with the License. You may obtain a copy of the License at
+dnl
+dnl http://www.apache.org/licenses/LICENSE-2.0
+dnl
+dnl Unless required by applicable law or agreed to in writing,
+dnl software distributed under the License is distributed on an
+dnl "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+dnl KIND, either express or implied. See the License for the
+dnl specific language governing permissions and limitations
+dnl under the License.
+dnl ===================================================================
+dnl
+dnl SVN_LIB_NEON(allowed_list_of_regex, recommended_ver, url)
+dnl
+dnl Search for a suitable version of neon. allowed_list_of_regex is a
+dnl list of regular expressions used in a grep invocation
+dnl to match versions of Neon that can be used. recommended_ver is the
+dnl recommended version of Neon, which is not necessarily the latest
+dnl released version of neon that exists. url is the URL of the
+dnl recommended version of Neon.
+dnl
+dnl If a --with-neon option is passed (no argument), then a search for
+dnl neon on the system is performed.
+dnl
+dnl If a --with-neon=PREFIX option is passed search for a suitable
+dnl neon installed on the system whose configuration can be found in
+dnl PREFIX/bin/neon-config.
+dnl
+dnl If the search for neon fails, set svn_lib_neon to no, otherwise set
+dnl it to yes.
+
+AC_DEFUN(SVN_LIB_NEON,
+[
+ NEON_ALLOWED_LIST="$1"
+ NEON_RECOMMENDED_VER="$2"
+ NEON_URL="$3"
+
+ AC_MSG_NOTICE([checking neon library])
+
+ AC_ARG_WITH(neon,
+ AS_HELP_STRING([--with-neon=PREFIX],
+ [Determine neon library configuration based on
+ 'PREFIX/bin/neon-config'. Default is to search for
+ neon-config in $PATH.]),
+ [
+ if test "$withval" = "yes" ; then
+ if test -n "$PKG_CONFIG" && $PKG_CONFIG neon --exists ; then
+ NEON_PKG_CONFIG="yes"
+ else
+ AC_PATH_PROG(neon_config, neon-config)
+ fi
+ else
+ neon_config="$withval/bin/neon-config"
+ fi
+
+ SVN_NEON_CONFIG()
+ ],
+ [
+ AC_PATH_PROG(neon_config, neon-config)
+ SVN_NEON_CONFIG()
+ ])
+
+ AC_SUBST(SVN_NEON_INCLUDES)
+ AC_SUBST(NEON_LIBS)
+])
+
+dnl SVN_NEON_CONFIG()
+dnl neon-config found, gather relevant information from it
+AC_DEFUN(SVN_NEON_CONFIG,
+[
+ if test "$NEON_PKG_CONFIG" = "yes" || test -f "$neon_config"; then
+ if test "$NEON_PKG_CONFIG" = "yes" || test "$neon_config" != ""; then
+ AC_MSG_CHECKING([neon library version])
+ if test "$NEON_PKG_CONFIG" = "yes" ; then
+ NEON_VERSION=`$PKG_CONFIG neon --modversion`
+ else
+ NEON_VERSION=`$neon_config --version | $SED -e 's/^neon //'`
+ fi
+ AC_MSG_RESULT([$NEON_VERSION])
+
+ if test -n ["`echo "$NEON_VERSION" | $EGREP '^0\.(2[6-9]|3[0-9])\.'`"] ; then
+ AC_DEFINE_UNQUOTED([SVN_NEON_0_26], [1],
+ [Define to 1 if you have Neon 0.26 or later.])
+ fi
+
+ if test -n ["`echo "$NEON_VERSION" | $EGREP '^0\.(2[7-9]|3[0-9])\.'`"] ; then
+ AC_DEFINE_UNQUOTED([SVN_NEON_0_27], [1],
+ [Define to 1 if you have Neon 0.27 or later.])
+ fi
+
+ if test -n ["`echo "$NEON_VERSION" | $EGREP '^0\.(2[8-9]|3[0-9])\.'`"] ; then
+ AC_DEFINE_UNQUOTED([SVN_NEON_0_28], [1],
+ [Define to 1 if you have Neon 0.28 or later.])
+ fi
+
+ for svn_allowed_neon in $NEON_ALLOWED_LIST; do
+ if test -n "`echo "$NEON_VERSION" | grep "^$svn_allowed_neon"`" ||
+ test "$svn_allowed_neon" = "any"; then
+ svn_allowed_neon_on_system="yes"
+ if test "$NEON_PKG_CONFIG" = "yes"; then
+ SVN_NEON_INCLUDES=[`$PKG_CONFIG neon --cflags | $SED -e 's/-D[^ ]*//g'`]
+ CFLAGS=["$CFLAGS `$PKG_CONFIG neon --cflags | $SED -e 's/-I[^ ]*//g'`"]
+ old_CFLAGS="$CFLAGS"
+ old_LIBS="$LIBS"
+ NEON_LIBS=`$PKG_CONFIG neon --libs`
+ CFLAGS="$CFLAGS $SVN_NEON_INCLUDES"
+ LIBS="$LIBS $NEON_LIBS"
+ neon_test_code="
+#include <ne_compress.h>
+#include <ne_xml.h>
+int main()
+{ne_xml_create(); ne_decompress_destroy(NULL);}"
+ AC_LINK_IFELSE([AC_LANG_SOURCE([[$neon_test_code]])], shared_linking="yes", shared_linking="no")
+ if test "$shared_linking" = "no"; then
+ NEON_LIBS=`$PKG_CONFIG neon --libs --static`
+ LIBS="$LIBS $NEON_LIBS"
+ AC_LINK_IFELSE([AC_LANG_SOURCE([[$neon_test_code]])], , AC_MSG_ERROR([cannot find Neon]))
+ fi
+ CFLAGS="$old_CFLAGS"
+ LIBS="$old_LIBS"
+ else
+ SVN_NEON_INCLUDES=[`$neon_config --cflags | $SED -e 's/-D[^ ]*//g'`]
+ CFLAGS=["$CFLAGS `$neon_config --cflags | $SED -e 's/-I[^ ]*//g'`"]
+ NEON_LIBS=`$neon_config --libs`
+ fi
+ svn_lib_neon="yes"
+ break
+ fi
+ done
+
+ if test -z $svn_allowed_neon_on_system; then
+ echo "You have neon version $NEON_VERSION,"
+ echo "but Subversion needs neon $NEON_RECOMMENDED_VER."
+ SVN_DOWNLOAD_NEON()
+ fi
+
+ else
+ # no neon subdir, no neon-config in PATH
+ AC_MSG_RESULT([nothing])
+ echo "No suitable neon can be found."
+ SVN_DOWNLOAD_NEON()
+ fi
+
+ elif test "$with_neon" != "no"; then
+ # user passed --with-neon=/incorrect/path
+ SVN_DOWNLOAD_NEON()
+ fi
+])
+
+dnl SVN_DOWNLOAD_NEON()
+dnl no neon found, print out a message telling the user what to do
+AC_DEFUN(SVN_DOWNLOAD_NEON,
+[
+ echo ""
+ echo "An appropriate version of neon could not be found, so libsvn_ra_neon"
+ echo "will not be built. If you want to build libsvn_ra_neon, please"
+ echo "install neon ${NEON_RECOMMENDED_VER} on this system."
+ echo ""
+ AC_MSG_RESULT([no suitable neon found])
+ svn_lib_neon="no"
+])
diff --git a/build/ac-macros/sasl.m4 b/build/ac-macros/sasl.m4
new file mode 100644
index 0000000..563f9cb
--- /dev/null
+++ b/build/ac-macros/sasl.m4
@@ -0,0 +1,102 @@
+dnl ===================================================================
+dnl Licensed to the Apache Software Foundation (ASF) under one
+dnl or more contributor license agreements. See the NOTICE file
+dnl distributed with this work for additional information
+dnl regarding copyright ownership. The ASF licenses this file
+dnl to you under the Apache License, Version 2.0 (the
+dnl "License"); you may not use this file except in compliance
+dnl with the License. You may obtain a copy of the License at
+dnl
+dnl http://www.apache.org/licenses/LICENSE-2.0
+dnl
+dnl Unless required by applicable law or agreed to in writing,
+dnl software distributed under the License is distributed on an
+dnl "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+dnl KIND, either express or implied. See the License for the
+dnl specific language governing permissions and limitations
+dnl under the License.
+dnl ===================================================================
+dnl
+dnl SVN_LIB_SASL
+dnl
+dnl Check configure options and assign variables related to
+dnl the sasl library.
+dnl
+dnl If we find the library, set the shell variable
+dnl `svn_lib_sasl' to `yes'. Otherwise, set `svn_lib_sasl'
+dnl to `no'.
+
+AC_DEFUN(SVN_LIB_SASL,
+[
+ AC_ARG_WITH(sasl, [AS_HELP_STRING([--with-sasl=PATH],
+ [Compile with libsasl2 in PATH])],
+ [
+ with_sasl="$withval"
+ required="yes"
+ ],
+ [
+ with_sasl="yes"
+ required="no"
+ ])
+
+ AC_MSG_CHECKING([whether to look for SASL])
+
+ if test "${with_sasl}" = "no"; then
+ AC_MSG_RESULT([no])
+ svn_lib_sasl=no
+ else
+ AC_MSG_RESULT([yes])
+ saved_LDFLAGS="$LDFLAGS"
+ saved_CPPFLAGS="$CPPFLAGS"
+
+ if test "$with_sasl" = "yes"; then
+ AC_MSG_NOTICE([Looking in default locations])
+ AC_CHECK_HEADER(sasl/sasl.h,
+ [AC_CHECK_HEADER(sasl/saslutil.h,
+ [AC_CHECK_LIB(sasl2, prop_get,
+ svn_lib_sasl=yes,
+ svn_lib_sasl=no)],
+ svn_lib_sasl=no)], svn_lib_sasl=no)
+ if test "$svn_lib_sasl" = "no"; then
+ with_sasl="/usr/local"
+ fi
+ else
+ svn_lib_sasl=no
+ fi
+
+ if test "$svn_lib_sasl" = "no"; then
+ SVN_SASL_INCLUDES="-I${with_sasl}/include"
+ CPPFLAGS="$CPPFLAGS $SVN_SASL_INCLUDES"
+ LDFLAGS="$LDFLAGS `SVN_REMOVE_STANDARD_LIB_DIRS(-L${with_sasl}/lib)`"
+
+ AC_CHECK_HEADER(sasl/sasl.h,
+ [AC_CHECK_HEADER(sasl/saslutil.h,
+ [AC_CHECK_LIB(sasl2, prop_get,
+ svn_lib_sasl=yes,
+ svn_lib_sasl=no)],
+ svn_lib_sasl=no)], svn_lib_sasl=no)
+ fi
+
+ AC_MSG_CHECKING([for availability of Cyrus SASL v2])
+ if test "$svn_lib_sasl" = "yes"; then
+ SVN_SASL_LIBS="-lsasl2"
+ AC_MSG_RESULT([yes])
+ else
+ AC_MSG_RESULT([no])
+
+ if test "$required" = "yes"; then
+ dnl The user explicitly requested SASL, but we couldn't find it.
+ dnl Exit with an error message.
+ AC_MSG_ERROR([Could not find Cyrus SASL v2])
+ fi
+
+ SVN_SASL_INCLUDES=""
+ LDFLAGS="$saved_LDFLAGS"
+ fi
+
+ CPPFLAGS="$saved_CPPFLAGS"
+ fi
+
+ AC_SUBST(SVN_SASL_INCLUDES)
+ AC_SUBST(SVN_SASL_LIBS)
+])
diff --git a/build/ac-macros/serf.m4 b/build/ac-macros/serf.m4
new file mode 100644
index 0000000..a6f7cf1
--- /dev/null
+++ b/build/ac-macros/serf.m4
@@ -0,0 +1,99 @@
+dnl ===================================================================
+dnl Licensed to the Apache Software Foundation (ASF) under one
+dnl or more contributor license agreements. See the NOTICE file
+dnl distributed with this work for additional information
+dnl regarding copyright ownership. The ASF licenses this file
+dnl to you under the Apache License, Version 2.0 (the
+dnl "License"); you may not use this file except in compliance
+dnl with the License. You may obtain a copy of the License at
+dnl
+dnl http://www.apache.org/licenses/LICENSE-2.0
+dnl
+dnl Unless required by applicable law or agreed to in writing,
+dnl software distributed under the License is distributed on an
+dnl "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+dnl KIND, either express or implied. See the License for the
+dnl specific language governing permissions and limitations
+dnl under the License.
+dnl ===================================================================
+dnl
+dnl SVN_LIB_SERF(min_major_num, min_minor_num, min_micro_num)
+dnl
+dnl Check configure options and assign variables related to
+dnl the serf library.
+dnl
+
+AC_DEFUN(SVN_LIB_SERF,
+[
+ serf_found=no
+
+ serf_check_major="$1"
+ serf_check_minor="$2"
+ serf_check_patch="$3"
+
+ AC_ARG_WITH(serf,AS_HELP_STRING([--with-serf=PREFIX],
+ [Serf WebDAV client library]),
+ [
+ if test "$withval" = "yes" ; then
+ AC_MSG_ERROR([--with-serf requires an argument.])
+ elif test "$withval" != "no" ; then
+ AC_MSG_NOTICE([serf library configuration])
+ serf_prefix=$withval
+ for serf_major in serf-1 serf-0; do
+ if ! test -d $serf_prefix/include/$serf_major; then continue; fi
+ save_cppflags="$CPPFLAGS"
+ CPPFLAGS="$CPPFLAGS $SVN_APR_INCLUDES $SVN_APRUTIL_INCLUDES -I$serf_prefix/include/$serf_major"
+ AC_CHECK_HEADERS(serf.h,[
+ save_ldflags="$LDFLAGS"
+ LDFLAGS="$LDFLAGS -L$serf_prefix/lib"
+ AC_CHECK_LIB($serf_major, serf_context_create,[
+ AC_TRY_COMPILE([
+#include <stdlib.h>
+#include "serf.h"
+],[
+#if ! SERF_VERSION_AT_LEAST($serf_check_major, $serf_check_minor, $serf_check_patch)
+#error Serf version too old: need $serf_check_major.$serf_check_minor.$serf_check_patch
+#endif
+], [serf_found=yes], [AC_MSG_WARN([Serf version too old: need $serf_check_major.$serf_check_minor.$serf_check_patch])
+ serf_found=no])], ,
+ $SVN_APRUTIL_LIBS $SVN_APR_LIBS -lz)
+ LDFLAGS="$save_ldflags"])
+ CPPFLAGS="$save_cppflags"
+ test $serf_found = yes && break
+ done
+ fi
+ ], [
+ if test -d "$srcdir/serf"; then
+ serf_found=reconfig
+ fi
+ ])
+
+
+ if test $serf_found = "reconfig"; then
+ SVN_EXTERNAL_PROJECT([serf], [--with-apr=$apr_config --with-apr-util=$apu_config])
+ serf_major=serf-`$srcdir/serf/build/get-version.sh major $srcdir/serf/serf.h SERF`
+ serf_prefix=$prefix
+ SVN_SERF_PREFIX="$serf_prefix"
+ SVN_SERF_INCLUDES="-I$srcdir/serf"
+ SVN_SERF_LIBS="$abs_builddir/serf/lib$serf_major.la"
+ fi
+
+ if test $serf_found = "yes"; then
+ SVN_SERF_PREFIX="$serf_prefix"
+ SVN_SERF_INCLUDES="-I$serf_prefix/include/$serf_major"
+ if test -e "$serf_prefix/lib/lib$serf_major.la"; then
+ SVN_SERF_LIBS="$serf_prefix/lib/lib$serf_major.la"
+ else
+ SVN_SERF_LIBS="-l$serf_major"
+ LDFLAGS="$LDFLAGS -L$serf_prefix/lib"
+ fi
+ elif test $serf_found = "reconfig"; then
+ serf_found=yes
+ fi
+
+ svn_lib_serf=$serf_found
+
+ AC_SUBST(SVN_SERF_PREFIX)
+ AC_SUBST(SVN_SERF_INCLUDES)
+ AC_SUBST(SVN_SERF_LIBS)
+])
diff --git a/build/ac-macros/sqlite.m4 b/build/ac-macros/sqlite.m4
new file mode 100644
index 0000000..85a48da
--- /dev/null
+++ b/build/ac-macros/sqlite.m4
@@ -0,0 +1,249 @@
+dnl ===================================================================
+dnl Licensed to the Apache Software Foundation (ASF) under one
+dnl or more contributor license agreements. See the NOTICE file
+dnl distributed with this work for additional information
+dnl regarding copyright ownership. The ASF licenses this file
+dnl to you under the Apache License, Version 2.0 (the
+dnl "License"); you may not use this file except in compliance
+dnl with the License. You may obtain a copy of the License at
+dnl
+dnl http://www.apache.org/licenses/LICENSE-2.0
+dnl
+dnl Unless required by applicable law or agreed to in writing,
+dnl software distributed under the License is distributed on an
+dnl "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+dnl KIND, either express or implied. See the License for the
+dnl specific language governing permissions and limitations
+dnl under the License.
+dnl ===================================================================
+dnl
+dnl SVN_LIB_SQLITE(minimum_ver, recommended_ver, url)
+dnl
+dnl Search for a suitable version of sqlite. minimum_ver is a
+dnl version string which is the lowest suitable version we can use.
+dnl recommended_ver is the recommended version of sqlite, which is
+dnl not necessarily the latest version released. url is the URL of
+dnl the recommended version of sqlite.
+dnl
+dnl If a --with-sqlite=PREFIX option is passed, look for a suitable sqlite
+dnl either installed under the directory PREFIX or as an amalgamation file
+dnl at the path PREFIX. In this case ignore any sqlite-amalgamation/ subdir
+dnl within the source tree.
+dnl
+dnl If no --with-sqlite option is passed, look first for
+dnl sqlite-amalgamation/sqlite3.c which should be the amalgamated version of
+dnl the source distribution. If the amalgamation exists and is the wrong
+dnl version, exit with a failure. If no sqlite-amalgamation/ subdir is
+dnl present, search for a sqlite installed on the system.
+dnl
+dnl If the search for sqlite fails, set svn_lib_sqlite to no, otherwise set
+dnl it to yes.
+
+AC_DEFUN(SVN_LIB_SQLITE,
+[
+ SQLITE_MINIMUM_VER="$1"
+ SQLITE_RECOMMENDED_VER="$2"
+ SQLITE_URL="$3"
+ SQLITE_PKGNAME="sqlite3"
+
+ SVN_SQLITE_MIN_VERNUM_PARSE
+
+ AC_MSG_NOTICE([checking sqlite library])
+
+ AC_ARG_WITH(sqlite,
+ AS_HELP_STRING([--with-sqlite=PREFIX],
+ [Use installed SQLite library or amalgamation file.]),
+ [
+ if test "$withval" = "yes" ; then
+ AC_MSG_ERROR([--with-sqlite requires an argument.])
+ else
+ sqlite_dir="$withval"
+ fi
+
+ if test -d $sqlite_dir; then
+ dnl pointed at an sqlite installation
+ SVN_SQLITE_DIR_CONFIG($sqlite_dir)
+ else
+ dnl pointed at the amalgamation file
+ SVN_SQLITE_FILE_CONFIG($sqlite_dir)
+ fi
+
+ if test -z "$svn_lib_sqlite"; then
+ AC_MSG_WARN([no suitable sqlite found in $sqlite_dir])
+ SVN_DOWNLOAD_SQLITE
+ fi
+ ],
+ [
+ dnl see if the sqlite amalgamation exists in the source tree
+ SVN_SQLITE_FILE_CONFIG($abs_srcdir/sqlite-amalgamation/sqlite3.c)
+
+ if test -z "$svn_lib_sqlite"; then
+ dnl check the "standard" location of /usr
+ SVN_SQLITE_DIR_CONFIG()
+ fi
+
+ if test -z "$svn_lib_sqlite"; then
+ dnl no --with-sqlite switch, and no sqlite subdir, look in PATH
+ SVN_SQLITE_PKG_CONFIG
+ fi
+
+ if test -z "$svn_lib_sqlite"; then
+ SVN_DOWNLOAD_SQLITE
+ fi
+ ])
+
+ AC_SUBST(SVN_SQLITE_INCLUDES)
+ AC_SUBST(SVN_SQLITE_LIBS)
+])
+
+dnl SVN_SQLITE_PKG_CONFIG
+dnl
+dnl Look for sqlite in PATH using pkg-config.
+AC_DEFUN(SVN_SQLITE_PKG_CONFIG,
+[
+ if test -n "$PKG_CONFIG"; then
+ AC_MSG_CHECKING([sqlite library version (via pkg-config)])
+ sqlite_version=`$PKG_CONFIG $SQLITE_PKGNAME --modversion --silence-errors`
+
+ if test -n "$sqlite_version"; then
+ SVN_SQLITE_VERNUM_PARSE
+
+ if test "$sqlite_ver_num" -ge "$sqlite_min_ver_num"; then
+ AC_MSG_RESULT([$sqlite_version])
+ svn_lib_sqlite="yes"
+ SVN_SQLITE_INCLUDES="`$PKG_CONFIG $SQLITE_PKGNAME --cflags`"
+ SVN_SQLITE_LIBS="`$PKG_CONFIG $SQLITE_PKGNAME --libs`"
+ else
+ AC_MSG_RESULT([none or unsupported $sqlite_version])
+ fi
+ fi
+ fi
+
+ if test -z "$svn_lib_sqlite"; then
+ AC_MSG_RESULT(no)
+ fi
+])
+
+dnl SVN_SQLITE_DIR_CONFIG(sqlite_dir)
+dnl
+dnl Check to see if we've got an appropriate sqlite library at sqlite_dir.
+dnl If we don't, fail.
+AC_DEFUN(SVN_SQLITE_DIR_CONFIG,
+[
+ if test -z "$1"; then
+ sqlite_dir=""
+ sqlite_include="sqlite3.h"
+ else
+ sqlite_dir="$1"
+ sqlite_include="$1/include/sqlite3.h"
+ fi
+
+ save_CPPFLAGS="$CPPFLAGS"
+ save_LDFLAGS="$LDFLAGS"
+
+ if test ! -z "$1"; then
+ CPPFLAGS="$CPPFLAGS -I$sqlite_dir/include"
+ LDFLAGS="$LDFLAGS -L$sqlite_dir/lib"
+ fi
+
+ AC_CHECK_HEADER(sqlite3.h,
+ [
+ AC_MSG_CHECKING([sqlite library version (via header)])
+ AC_EGREP_CPP(SQLITE_VERSION_OKAY,[
+#include "$sqlite_include"
+#if SQLITE_VERSION_NUMBER >= $sqlite_min_ver_num
+SQLITE_VERSION_OKAY
+#endif],
+ [AC_MSG_RESULT([okay])
+ AC_CHECK_LIB(sqlite3, sqlite3_close, [
+ svn_lib_sqlite="yes"
+ if test -z "$sqlite_dir" -o ! -d "$sqlite_dir"; then
+ SVN_SQLITE_LIBS="-lsqlite3"
+ else
+ SVN_SQLITE_INCLUDES="-I$sqlite_dir/include"
+ SVN_SQLITE_LIBS="`SVN_REMOVE_STANDARD_LIB_DIRS(-L$sqlite_dir/lib -lsqlite3)`"
+ fi
+ ])], [AC_MSG_RESULT([unsupported SQLite version])])
+ ])
+
+ CPPFLAGS="$save_CPPFLAGS"
+ LDFLAGS="$save_LDFLAGS"
+])
+
+dnl SVN_SQLITE_FILE_CONFIG(sqlite_file)
+dnl
+dnl Check to see if we've got an appropriate sqlite amalgamation file
+dnl at sqlite_file. If not, fail.
+AC_DEFUN(SVN_SQLITE_FILE_CONFIG,
+[
+ sqlite_amalg="$1"
+ AC_MSG_CHECKING([sqlite amalgamation])
+ if test ! -e $sqlite_amalg; then
+ AC_MSG_RESULT([no])
+ else
+ AC_MSG_RESULT([yes])
+ AC_MSG_CHECKING([sqlite amalgamation file version])
+ AC_EGREP_CPP(SQLITE_VERSION_OKAY,[
+#include "$sqlite_amalg"
+#if SQLITE_VERSION_NUMBER >= $sqlite_min_ver_num
+SQLITE_VERSION_OKAY
+#endif],
+ [AC_MSG_RESULT([amalgamation found and is okay])
+ AC_DEFINE(SVN_SQLITE_INLINE, 1,
+ [Defined if svn should use the amalgamated version of sqlite])
+ SVN_SQLITE_INCLUDES="-I`dirname $sqlite_amalg`"
+ SVN_SQLITE_LIBS="-ldl -lpthread"
+ svn_lib_sqlite="yes"],
+ [AC_MSG_RESULT([unsupported amalgamation SQLite version])])
+ fi
+])
+
+dnl SVN_SQLITE_VERNUM_PARSE()
+dnl
+dnl Parse a x.y[.z] version string sqlite_version into a number sqlite_ver_num.
+AC_DEFUN(SVN_SQLITE_VERNUM_PARSE,
+[
+ sqlite_major=`expr $sqlite_version : '\([[0-9]]*\)'`
+ sqlite_minor=`expr $sqlite_version : '[[0-9]]*\.\([[0-9]]*\)'`
+ sqlite_micro=`expr $sqlite_version : '[[0-9]]*\.[[0-9]]*\.\([[0-9]]*\)'`
+ if test -z "$sqlite_micro"; then
+ sqlite_micro=0
+ fi
+ sqlite_ver_num=`expr $sqlite_major \* 1000000 \
+ \+ $sqlite_minor \* 1000 \
+ \+ $sqlite_micro`
+])
+
+dnl SVN_SQLITE_MIN_VERNUM_PARSE()
+dnl
+dnl Parse a x.y.z version string SQLITE_MINIMUM_VER into a number
+dnl sqlite_min_ver_num.
+AC_DEFUN(SVN_SQLITE_MIN_VERNUM_PARSE,
+[
+ sqlite_min_major=`expr $SQLITE_MINIMUM_VER : '\([[0-9]]*\)'`
+ sqlite_min_minor=`expr $SQLITE_MINIMUM_VER : '[[0-9]]*\.\([[0-9]]*\)'`
+ sqlite_min_micro=`expr $SQLITE_MINIMUM_VER : '[[0-9]]*\.[[0-9]]*\.\([[0-9]]*\)'`
+ sqlite_min_ver_num=`expr $sqlite_min_major \* 1000000 \
+ \+ $sqlite_min_minor \* 1000 \
+ \+ $sqlite_min_micro`
+])
+
+dnl SVN_DOWNLOAD_SQLITE()
+dnl no sqlite found, print out a message telling the user what to do
+AC_DEFUN(SVN_DOWNLOAD_SQLITE,
+[
+ echo ""
+ echo "An appropriate version of sqlite could not be found. We recommmend"
+ echo "${SQLITE_RECOMMENDED_VER}, but require at least ${SQLITE_MINIMUM_VER}."
+ echo "Please either install a newer sqlite on this system"
+ echo ""
+ echo "or"
+ echo ""
+ echo "get the sqlite ${SQLITE_RECOMMENDED_VER} amalgamation from:"
+ echo " ${SQLITE_URL}"
+ echo "unpack the archive using tar/gunzip and copy sqlite3.c from the"
+ echo "resulting directory to:"
+ echo "$abs_srcdir/sqlite-amalgamation/sqlite3.c"
+ echo ""
+ AC_MSG_ERROR([Subversion requires SQLite])
+])
diff --git a/build/ac-macros/svn-macros.m4 b/build/ac-macros/svn-macros.m4
new file mode 100644
index 0000000..29e9624
--- /dev/null
+++ b/build/ac-macros/svn-macros.m4
@@ -0,0 +1,204 @@
+dnl ===================================================================
+dnl Licensed to the Apache Software Foundation (ASF) under one
+dnl or more contributor license agreements. See the NOTICE file
+dnl distributed with this work for additional information
+dnl regarding copyright ownership. The ASF licenses this file
+dnl to you under the Apache License, Version 2.0 (the
+dnl "License"); you may not use this file except in compliance
+dnl with the License. You may obtain a copy of the License at
+dnl
+dnl http://www.apache.org/licenses/LICENSE-2.0
+dnl
+dnl Unless required by applicable law or agreed to in writing,
+dnl software distributed under the License is distributed on an
+dnl "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+dnl KIND, either express or implied. See the License for the
+dnl specific language governing permissions and limitations
+dnl under the License.
+dnl ===================================================================
+dnl
+# Miscellaneous additional macros for Subversion's own use.
+
+# SVN_CONFIG_NICE(FILENAME)
+# Write a shell script to FILENAME (typically 'config.nice') which reinvokes
+# configure with all of the arguments. Reserves use of the filename
+# FILENAME.old for its own use.
+# This is different from 'config.status --recheck' in that it does add implicit
+# --no-create --no-recursion options, and stores _just_ the configure
+# invocation, instead of the entire configured state.
+AC_DEFUN([SVN_CONFIG_NICE], [
+ AC_MSG_NOTICE([creating $1])
+ # This little dance satisfies Cygwin, which cannot overwrite in-use files.
+ if test -f "$1"; then
+ mv "$1" "$1.old"
+ fi
+
+ cat >"$1" <<EOF
+#! /bin/sh
+#
+# Created by configure
+
+'[$]0' $ac_configure_args "\[$]@"
+EOF
+
+ chmod +x "$1"
+ rm -f "$1.old"
+])
+
+
+# SVN_EXTERNAL_PROJECT_SETUP()
+# Internal helper for SVN_EXTERNAL_PROJECT.
+AC_DEFUN([SVN_EXTERNAL_PROJECT_SETUP], [
+ do_subdir_config="yes"
+ AC_ARG_ENABLE([subdir-config],
+ AS_HELP_STRING([--disable-subdir-config],
+ [do not reconfigure packages in subdirectories]),
+ [if test "$enableval" = "no"; then do_subdir_config="no"; fi])
+ AC_SUBST([SVN_EXTERNAL_PROJECT_SUBDIRS], [""])
+])
+
+# SVN_EXTERNAL_PROJECT(SUBDIR [, ADDITIONAL-CONFIGURE-ARGS])
+# Setup SUBDIR as an external project. This means:
+# - Execute the configure script immediately at the point of macro invocation.
+# - Add SUBDIR to the substitution variable SVN_EXTERNAL_PROJECT_SUBDIRS,
+# for the Makefile.in to arrange to execute make in the subdir.
+#
+# Derived from APR_SUBDIR_CONFIG
+AC_DEFUN([SVN_EXTERNAL_PROJECT], [
+ AC_REQUIRE([SVN_EXTERNAL_PROJECT_SETUP])
+ SVN_EXTERNAL_PROJECT_SUBDIRS="$SVN_EXTERNAL_PROJECT_SUBDIRS $1"
+ if test "$do_subdir_config" = "yes" ; then
+ # save our work to this point; this allows the sub-package to use it
+ AC_CACHE_SAVE
+
+ AC_MSG_NOTICE([configuring package in $1 now])
+ ac_popdir=`pwd`
+ ac_abs_srcdir=`(cd $srcdir/$1 && pwd)`
+ apr_config_subdirs="$1"
+ test -d $1 || $MKDIR $1
+ cd $1
+
+ # A "../" for each directory in /$config_subdirs.
+ ac_dots=[`echo $apr_config_subdirs| $SED -e 's%^\./%%' -e 's%[^/]$%&/%' -e 's%[^/]*/%../%g'`]
+
+ # Make the cache file name correct relative to the subdirectory.
+ case "$cache_file" in
+ /*) ac_sub_cache_file=$cache_file ;;
+ *) # Relative path.
+ ac_sub_cache_file="$ac_dots$cache_file" ;;
+ esac
+
+ # The eval makes quoting arguments work.
+ if eval $SHELL $ac_abs_srcdir/configure $ac_configure_args --cache-file=$ac_sub_cache_file --srcdir=$ac_abs_srcdir $2
+ then :
+ echo "$1 configured properly"
+ else
+ echo "configure failed for $1"
+ exit 1
+ fi
+ cd $ac_popdir
+
+ # grab any updates from the sub-package
+ AC_CACHE_LOAD
+ else
+ AC_MSG_WARN([not running configure in $1])
+ fi
+])
+
+dnl
+dnl SVN_CONFIG_SCRIPT(path)
+dnl
+dnl Make AC_OUTPUT create an executable file.
+dnl Accumulate filenames in $SVN_CONFIG_SCRIPT_FILES for AC_SUBSTing to
+dnl use in, for example, Makefile distclean rules.
+dnl
+AC_DEFUN(SVN_CONFIG_SCRIPT, [
+ SVN_CONFIG_SCRIPT_FILES="$SVN_CONFIG_SCRIPT_FILES $1"
+ AC_CONFIG_FILES([$1], [chmod +x $1])])
+
+dnl Iteratively interpolate the contents of the second argument
+dnl until interpolation offers no new result. Then assign the
+dnl final result to $1.
+dnl
+dnl Based on APR_EXPAND_VAR macro
+dnl
+dnl Example:
+dnl
+dnl foo=1
+dnl bar='${foo}/2'
+dnl baz='${bar}/3'
+dnl SVN_EXPAND_VAR(fraz, $baz)
+dnl $fraz is now "1/2/3"
+dnl
+AC_DEFUN(SVN_EXPAND_VAR,[
+svn_last=
+svn_cur="$2"
+while test "x${svn_cur}" != "x${svn_last}";
+do
+ svn_last="${svn_cur}"
+ svn_cur=`eval "echo ${svn_cur}"`
+done
+$1="${svn_cur}"
+])
+
+dnl SVN_MAYBE_ADD_TO_CFLAGS(option)
+dnl
+dnl Attempt to compile a trivial C program to test if the option passed
+dnl is valid. If it is, then add it to CFLAGS. with the passed in option
+dnl and see if it was successfully compiled.
+dnl
+dnl This macro is usually used for stricter syntax checking flags.
+dnl Therefore we include certain headers which may in turn include system
+dnl headers, as system headers on some platforms may fail strictness checks
+dnl we wish to use on other platforms.
+
+AC_DEFUN(SVN_MAYBE_ADD_TO_CFLAGS,
+[
+ option="$1"
+ svn_maybe_add_to_cflags_saved_flags="$CFLAGS"
+ CFLAGS="$CFLAGS $option"
+ AC_MSG_CHECKING([if $CC accepts $option])
+ AC_COMPILE_IFELSE([AC_LANG_PROGRAM(
+ [[#include <apr_portable.h>]],
+ [[]])],
+ [svn_maybe_add_to_cflags_ok="yes"],
+ [svn_maybe_add_to_cflags_ok="no"]
+ )
+ if test "$svn_maybe_add_to_cflags_ok" = "yes"; then
+ AC_MSG_RESULT([yes, will use it])
+ else
+ AC_MSG_RESULT([no])
+ CFLAGS="$svn_maybe_add_to_cflags_saved_flags"
+ fi
+])
+
+dnl SVN_REMOVE_STANDARD_LIB_DIRS(OPTIONS)
+dnl
+dnl Remove standard library search directories.
+dnl OPTIONS is a list of compiler/linker options.
+dnl This macro prints input options except -L options whose arguments are
+dnl standard library search directories (e.g. /usr/lib).
+dnl
+dnl This macro is used to avoid linking against Subversion libraries
+dnl potentially placed in standard library search directories.
+AC_DEFUN([SVN_REMOVE_STANDARD_LIB_DIRS],
+[
+ input_flags="$1"
+ output_flags=""
+ filtered_dirs="/lib /lib64 /usr/lib /usr/lib64"
+ for flag in $input_flags; do
+ filter="no"
+ for dir in $filtered_dirs; do
+ if test "$flag" = "-L$dir" || test "$flag" = "-L$dir/"; then
+ filter="yes"
+ break
+ fi
+ done
+ if test "$filter" = "no"; then
+ output_flags="$output_flags $flag"
+ fi
+ done
+ if test -n "$output_flags"; then
+ printf "%s" "${output_flags# }"
+ fi
+])
diff --git a/build/ac-macros/swig.m4 b/build/ac-macros/swig.m4
new file mode 100644
index 0000000..a0e91ee
--- /dev/null
+++ b/build/ac-macros/swig.m4
@@ -0,0 +1,297 @@
+dnl ===================================================================
+dnl Licensed to the Apache Software Foundation (ASF) under one
+dnl or more contributor license agreements. See the NOTICE file
+dnl distributed with this work for additional information
+dnl regarding copyright ownership. The ASF licenses this file
+dnl to you under the Apache License, Version 2.0 (the
+dnl "License"); you may not use this file except in compliance
+dnl with the License. You may obtain a copy of the License at
+dnl
+dnl http://www.apache.org/licenses/LICENSE-2.0
+dnl
+dnl Unless required by applicable law or agreed to in writing,
+dnl software distributed under the License is distributed on an
+dnl "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+dnl KIND, either express or implied. See the License for the
+dnl specific language governing permissions and limitations
+dnl under the License.
+dnl ===================================================================
+dnl
+dnl check to see if SWIG is current enough.
+dnl
+dnl if it is, then check to see if we have the correct version of python.
+dnl
+dnl if we do, then set up the appropriate SWIG_ variables to build the
+dnl python bindings.
+
+AC_DEFUN(SVN_CHECK_SWIG,
+[
+ AC_ARG_WITH(swig,
+ AS_HELP_STRING([--with-swig=PATH],
+ [Try to use 'PATH/bin/swig' to build the
+ swig bindings. If PATH is not specified,
+ look for a 'swig' binary in your PATH.]),
+ [
+ case "$withval" in
+ "no")
+ SWIG_SUITABLE=no
+ SVN_FIND_SWIG(no)
+ ;;
+ "yes")
+ SVN_FIND_SWIG(check)
+ ;;
+ *)
+ SVN_FIND_SWIG($withval)
+ ;;
+ esac
+ ],
+ [
+ SVN_FIND_SWIG(check)
+ ])
+])
+
+AC_DEFUN(SVN_FIND_SWIG,
+[
+ where=$1
+
+ if test $where = no; then
+ SWIG=none
+ elif test $where = check; then
+ AC_PATH_PROG(SWIG, swig, none)
+ else
+ if test -f "$where"; then
+ SWIG="$where"
+ else
+ SWIG="$where/bin/swig"
+ fi
+ if test ! -f "$SWIG" || test ! -x "$SWIG"; then
+ AC_MSG_ERROR([Could not find swig binary at $SWIG])
+ fi
+ fi
+
+ if test "$SWIG" != "none"; then
+ AC_MSG_CHECKING([swig version])
+ SWIG_VERSION_RAW="`$SWIG -version 2>&1 | \
+ $SED -ne 's/^.*Version \(.*\)$/\1/p'`"
+ # We want the version as an integer so we can test against
+ # which version we're using. SWIG doesn't provide this
+ # to us so we have to come up with it on our own.
+ # The major is passed straight through,
+ # the minor is zero padded to two places,
+ # and the patch level is zero padded to three places.
+ # e.g. 1.3.24 becomes 103024
+ SWIG_VERSION="`echo \"$SWIG_VERSION_RAW\" | \
+ $SED -e 's/[[^0-9\.]].*$//' \
+ -e 's/\.\([[0-9]]\)$/.0\1/' \
+ -e 's/\.\([[0-9]][[0-9]]\)$/.0\1/' \
+ -e 's/\.\([[0-9]]\)\./0\1/; s/\.//g;'`"
+ AC_MSG_RESULT([$SWIG_VERSION_RAW])
+ # If you change the required swig version number, don't forget to update:
+ # subversion/bindings/swig/INSTALL
+ # packages/rpm/redhat-8+/subversion.spec
+ # packages/rpm/redhat-7.x/subversion.spec
+ # packages/rpm/rhel-3/subversion.spec
+ # packages/rpm/rhel-4/subversion.spec
+ if test -n "$SWIG_VERSION" && test "$SWIG_VERSION" -ge "103024"; then
+ SWIG_SUITABLE=yes
+ else
+ SWIG_SUITABLE=no
+ AC_MSG_WARN([Detected SWIG version $SWIG_VERSION_RAW])
+ AC_MSG_WARN([Subversion requires SWIG 1.3.24 or later])
+ fi
+ fi
+
+ SWIG_PY_COMPILE="none"
+ SWIG_PY_LINK="none"
+ if test "$PYTHON" != "none"; then
+ AC_MSG_NOTICE([Configuring python swig binding])
+
+ AC_CACHE_CHECK([for Python includes], [ac_cv_python_includes],[
+ ac_cv_python_includes="`$PYTHON ${abs_srcdir}/build/get-py-info.py --includes`"
+ ])
+ SWIG_PY_INCLUDES="\$(SWIG_INCLUDES) $ac_cv_python_includes"
+
+ if test "$ac_cv_python_includes" = "none"; then
+ AC_MSG_WARN([python bindings cannot be built without distutils module])
+ fi
+
+ AC_CACHE_CHECK([for compiling Python extensions], [ac_cv_python_compile],[
+ ac_cv_python_compile="`$PYTHON ${abs_srcdir}/build/get-py-info.py --compile`"
+ ])
+ SWIG_PY_COMPILE="$ac_cv_python_compile $CFLAGS"
+
+ AC_CACHE_CHECK([for linking Python extensions], [ac_cv_python_link],[
+ ac_cv_python_link="`$PYTHON ${abs_srcdir}/build/get-py-info.py --link`"
+ ])
+ SWIG_PY_LINK="$ac_cv_python_link"
+
+ AC_CACHE_CHECK([for linking Python libraries], [ac_cv_python_libs],[
+ ac_cv_python_libs="`$PYTHON ${abs_srcdir}/build/get-py-info.py --libs`"
+ ])
+ SWIG_PY_LIBS="`SVN_REMOVE_STANDARD_LIB_DIRS($ac_cv_python_libs)`"
+
+ dnl Sun Forte adds an extra space before substituting APR_INT64_T_FMT
+ dnl gcc-2.95 adds an extra space after substituting APR_INT64_T_FMT
+ dnl thus the egrep patterns have a + in them.
+ SVN_PYCFMT_SAVE_CPPFLAGS="$CPPFLAGS"
+ CPPFLAGS="$CPPFLAGS $SVN_APR_INCLUDES"
+ AC_CACHE_CHECK([for apr_int64_t Python/C API format string],
+ [svn_cv_pycfmt_apr_int64_t], [
+ if test "x$svn_cv_pycfmt_apr_int64_t" = "x"; then
+ AC_EGREP_CPP([MaTcHtHiS +\"lld\" +EnDeNd],
+ [#include <apr.h>
+ MaTcHtHiS APR_INT64_T_FMT EnDeNd],
+ [svn_cv_pycfmt_apr_int64_t="L"])
+ fi
+ if test "x$svn_cv_pycfmt_apr_int64_t" = "x"; then
+ AC_EGREP_CPP([MaTcHtHiS +\"ld\" +EnDeNd],r
+ [#include <apr.h>
+ MaTcHtHiS APR_INT64_T_FMT EnDeNd],
+ [svn_cv_pycfmt_apr_int64_t="l"])
+ fi
+ if test "x$svn_cv_pycfmt_apr_int64_t" = "x"; then
+ AC_EGREP_CPP([MaTcHtHiS +\"d\" +EnDeNd],
+ [#include <apr.h>
+ MaTcHtHiS APR_INT64_T_FMT EnDeNd],
+ [svn_cv_pycfmt_apr_int64_t="i"])
+ fi
+ ])
+ CPPFLAGS="$SVN_PYCFMT_SAVE_CPPFLAGS"
+ if test "x$svn_cv_pycfmt_apr_int64_t" = "x"; then
+ AC_MSG_ERROR([failed to recognize APR_INT64_T_FMT on this platform])
+ fi
+ AC_DEFINE_UNQUOTED([SVN_APR_INT64_T_PYCFMT],
+ ["$svn_cv_pycfmt_apr_int64_t"],
+ [Define to the Python/C API format character suitable]
+ [ for apr_int64_t])
+ fi
+
+ if test "$PERL" != "none"; then
+ AC_MSG_CHECKING([perl version])
+ dnl Note that the q() bit is there to avoid unbalanced brackets
+ dnl which m4 really doesn't like.
+ PERL_VERSION="`$PERL -e 'q([[); print $]] * 1000000,$/;'`"
+ AC_MSG_RESULT([$PERL_VERSION])
+ if test "$PERL_VERSION" -ge "5008000"; then
+ SWIG_PL_INCLUDES="\$(SWIG_INCLUDES) `$PERL -MExtUtils::Embed -e ccopts`"
+ else
+ AC_MSG_WARN([perl bindings require perl 5.8.0 or newer.])
+ fi
+ fi
+
+ SWIG_RB_COMPILE="none"
+ SWIG_RB_LINK="none"
+ if test "$RUBY" != "none"; then
+ rbconfig="$RUBY -rrbconfig -e "
+
+ for var_name in arch archdir CC LDSHARED DLEXT LIBS LIBRUBYARG \
+ rubyhdrdir sitedir sitelibdir sitearchdir libdir
+ do
+ rbconfig_tmp=`$rbconfig "print Config::CONFIG@<:@'$var_name'@:>@"`
+ eval "rbconfig_$var_name=\"$rbconfig_tmp\""
+ done
+
+ AC_MSG_NOTICE([Configuring Ruby SWIG binding])
+
+ AC_CACHE_CHECK([for Ruby include path], [svn_cv_ruby_includes],[
+ if test -d "$rbconfig_rubyhdrdir"; then
+ dnl Ruby >=1.9
+ svn_cv_ruby_includes="-I. -I$rbconfig_rubyhdrdir -I$rbconfig_rubyhdrdir/ruby -I$rbconfig_rubyhdrdir/ruby/backward -I$rbconfig_rubyhdrdir/$rbconfig_arch"
+ else
+ dnl Ruby 1.8
+ svn_cv_ruby_includes="-I. -I$rbconfig_archdir"
+ fi
+ ])
+ SWIG_RB_INCLUDES="\$(SWIG_INCLUDES) $svn_cv_ruby_includes"
+
+ AC_CACHE_CHECK([how to compile Ruby extensions], [svn_cv_ruby_compile],[
+ # Ruby doesn't like '-ansi', so strip that out of CFLAGS
+ svn_cv_ruby_compile="$rbconfig_CC `echo $CFLAGS | $SED -e "s/ -ansi//g;s/ -std=c89//g"`"
+ ])
+ SWIG_RB_COMPILE="$svn_cv_ruby_compile"
+
+ AC_CACHE_CHECK([how to link Ruby extensions], [svn_cv_ruby_link],[
+ svn_cv_ruby_link="`$RUBY -e 'ARGV.shift; print ARGV.join(%q( ))' \
+ $rbconfig_LDSHARED`"
+ svn_cv_ruby_link="$rbconfig_CC $svn_cv_ruby_link"
+ svn_cv_ruby_link="$svn_cv_ruby_link -shrext .$rbconfig_DLEXT"
+ ])
+ SWIG_RB_LINK="$svn_cv_ruby_link"
+
+ AC_CACHE_CHECK([how to link Ruby libraries], [ac_cv_ruby_libs], [
+ ac_cv_ruby_libs="$rbconfig_LIBRUBYARG $rbconfig_LIBS"
+ ])
+ SWIG_RB_LIBS="`SVN_REMOVE_STANDARD_LIB_DIRS($ac_cv_ruby_libs)`"
+
+ AC_MSG_CHECKING([for rb_errinfo])
+ old_CFLAGS="$CFLAGS"
+ old_LIBS="$LIBS"
+ CFLAGS="`echo $CFLAGS | $SED -e "s/ -ansi//g;s/ -std=c89//g"` $svn_cv_ruby_includes"
+ LIBS="$SWIG_RB_LIBS"
+ AC_LINK_IFELSE([AC_LANG_SOURCE([[
+#include <ruby.h>
+int main()
+{rb_errinfo();}]])], have_rb_errinfo="yes", have_rb_errinfo="no")
+ if test "$have_rb_errinfo" = "yes"; then
+ AC_MSG_RESULT([yes])
+ AC_DEFINE([HAVE_RB_ERRINFO], [1],
+ [Define to 1 if you have the `rb_errinfo' function.])
+ else
+ AC_MSG_RESULT([no])
+ fi
+ CFLAGS="$old_CFLAGS"
+ LIBS="$old_LIBS"
+
+ AC_CACHE_VAL([svn_cv_ruby_sitedir],[
+ svn_cv_ruby_sitedir="$rbconfig_sitedir"
+ ])
+ AC_ARG_WITH([ruby-sitedir],
+ AS_HELP_STRING([--with-ruby-sitedir=SITEDIR],
+ [install Ruby bindings in SITEDIR
+ (default is same as ruby's one)]),
+ [svn_ruby_installdir="$withval"],
+ [svn_ruby_installdir="$svn_cv_ruby_sitedir"])
+
+ AC_MSG_CHECKING([where to install Ruby scripts])
+ AC_CACHE_VAL([svn_cv_ruby_sitedir_libsuffix],[
+ svn_cv_ruby_sitedir_libsuffix="`echo "$rbconfig_sitelibdir" | \
+ $SED -e "s,^$rbconfig_sitedir,,"`"
+ ])
+ SWIG_RB_SITE_LIB_DIR="${svn_ruby_installdir}${svn_cv_ruby_sitedir_libsuffix}"
+ AC_MSG_RESULT([$SWIG_RB_SITE_LIB_DIR])
+
+ AC_MSG_CHECKING([where to install Ruby extensions])
+ AC_CACHE_VAL([svn_cv_ruby_sitedir_archsuffix],[
+ svn_cv_ruby_sitedir_archsuffix="`echo "$rbconfig_sitearchdir" | \
+ $SED -e "s,^$rbconfig_sitedir,,"`"
+ ])
+ SWIG_RB_SITE_ARCH_DIR="${svn_ruby_installdir}${svn_cv_ruby_sitedir_archsuffix}"
+ AC_MSG_RESULT([$SWIG_RB_SITE_ARCH_DIR])
+
+ AC_MSG_CHECKING([how to use output level for Ruby bindings tests])
+ AC_CACHE_VAL([svn_cv_ruby_test_verbose],[
+ svn_cv_ruby_test_verbose="normal"
+ ])
+ AC_ARG_WITH([ruby-test-verbose],
+ AS_HELP_STRING([--with-ruby-test-verbose=LEVEL],
+ [how to use output level for Ruby bindings tests
+ (default is normal)]),
+ [svn_ruby_test_verbose="$withval"],
+ [svn_ruby_test_verbose="$svn_cv_ruby_test_verbose"])
+ SWIG_RB_TEST_VERBOSE="$svn_ruby_test_verbose"
+ AC_MSG_RESULT([$SWIG_RB_TEST_VERBOSE])
+ fi
+ AC_SUBST(SWIG)
+ AC_SUBST(SWIG_PY_INCLUDES)
+ AC_SUBST(SWIG_PY_COMPILE)
+ AC_SUBST(SWIG_PY_LINK)
+ AC_SUBST(SWIG_PY_LIBS)
+ AC_SUBST(SWIG_PL_INCLUDES)
+ AC_SUBST(SWIG_RB_LINK)
+ AC_SUBST(SWIG_RB_LIBS)
+ AC_SUBST(SWIG_RB_INCLUDES)
+ AC_SUBST(SWIG_RB_COMPILE)
+ AC_SUBST(SWIG_RB_SITE_LIB_DIR)
+ AC_SUBST(SWIG_RB_SITE_ARCH_DIR)
+ AC_SUBST(SWIG_RB_TEST_VERBOSE)
+])
diff --git a/build/ac-macros/zlib.m4 b/build/ac-macros/zlib.m4
new file mode 100644
index 0000000..298dfee
--- /dev/null
+++ b/build/ac-macros/zlib.m4
@@ -0,0 +1,74 @@
+dnl ===================================================================
+dnl Licensed to the Apache Software Foundation (ASF) under one
+dnl or more contributor license agreements. See the NOTICE file
+dnl distributed with this work for additional information
+dnl regarding copyright ownership. The ASF licenses this file
+dnl to you under the Apache License, Version 2.0 (the
+dnl "License"); you may not use this file except in compliance
+dnl with the License. You may obtain a copy of the License at
+dnl
+dnl http://www.apache.org/licenses/LICENSE-2.0
+dnl
+dnl Unless required by applicable law or agreed to in writing,
+dnl software distributed under the License is distributed on an
+dnl "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+dnl KIND, either express or implied. See the License for the
+dnl specific language governing permissions and limitations
+dnl under the License.
+dnl ===================================================================
+dnl
+dnl SVN_LIB_Z
+dnl
+dnl Check configure options and assign variables related to
+dnl the zlib library.
+dnl
+
+AC_DEFUN(SVN_LIB_Z,
+[
+ zlib_found=no
+
+ AC_ARG_WITH(zlib,AS_HELP_STRING([--with-zlib=PREFIX],
+ [zlib compression library]),
+ [
+ if test "$withval" = "yes" ; then
+ AC_CHECK_HEADER(zlib.h, [
+ AC_CHECK_LIB(z, inflate, [zlib_found="builtin"])
+ ])
+ elif test "$withval" = "no" ; then
+ AC_MSG_ERROR([cannot compile without zlib.])
+ else
+ AC_MSG_NOTICE([zlib library configuration])
+ zlib_prefix=$withval
+ save_cppflags="$CPPFLAGS"
+ CPPFLAGS="$CPPFLAGS -I$zlib_prefix/include"
+ AC_CHECK_HEADERS(zlib.h,[
+ save_ldflags="$LDFLAGS"
+ LDFLAGS="$LDFLAGS -L$zlib_prefix/lib"
+ AC_CHECK_LIB(z, inflate, [zlib_found="yes"])
+ LDFLAGS="$save_ldflags"
+ ])
+ CPPFLAGS="$save_cppflags"
+ fi
+ ],
+ [
+ AC_CHECK_HEADER(zlib.h, [
+ AC_CHECK_LIB(z, inflate, [zlib_found="builtin"])
+ ])
+ ])
+
+ if test "$zlib_found" = "no"; then
+ AC_MSG_ERROR([subversion requires zlib])
+ fi
+
+ if test "$zlib_found" = "yes"; then
+ SVN_ZLIB_PREFIX="$zlib_prefix"
+ SVN_ZLIB_INCLUDES="-I$zlib_prefix/include"
+ LDFLAGS="$LDFLAGS `SVN_REMOVE_STANDARD_LIB_DIRS(-L$zlib_prefix/lib)`"
+ fi
+
+ SVN_ZLIB_LIBS="-lz"
+
+ AC_SUBST(SVN_ZLIB_PREFIX)
+ AC_SUBST(SVN_ZLIB_INCLUDES)
+ AC_SUBST(SVN_ZLIB_LIBS)
+])
diff --git a/build/buildcheck.sh b/build/buildcheck.sh
new file mode 100755
index 0000000..6053e58
--- /dev/null
+++ b/build/buildcheck.sh
@@ -0,0 +1,161 @@
+#! /bin/sh
+#
+#
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements. See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership. The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied. See the License for the
+# specific language governing permissions and limitations
+# under the License.
+#
+#
+# buildcheck.sh: Inspects the build setup to make detection and
+# correction of problems an easier process.
+
+# Initialize parameters
+VERSION_CHECK="$1"
+
+if test "$VERSION_CHECK" != "--release"; then
+ echo "buildcheck: checking installation..."
+else
+ echo "buildcheck: checking installation for a source release..."
+fi
+
+#--------------------------------------------------------------------------
+# autoconf 2.59 or newer
+#
+ac_version=`${AUTOCONF:-autoconf} --version 2>/dev/null|sed -e 's/^[^0-9]*//' -e 's/[a-z]* *$//' -e 1q`
+if test -z "$ac_version"; then
+ echo "buildcheck: autoconf not found."
+ echo " You need autoconf version 2.59 or newer installed."
+ exit 1
+fi
+IFS=.; set $ac_version; IFS=' '
+if test "$1" = "2" -a "$2" -lt "59" || test "$1" -lt "2"; then
+ echo "buildcheck: autoconf version $ac_version found."
+ echo " You need autoconf version 2.59 or newer installed."
+ echo " If you have a sufficient autoconf installed, but it"
+ echo " is not named 'autoconf', then try setting the"
+ echo " AUTOCONF environment variable. (See the INSTALL file"
+ echo " for details.)"
+ exit 1
+fi
+
+echo "buildcheck: autoconf version $ac_version (ok)"
+
+#--------------------------------------------------------------------------
+# autoheader 2.59 or newer
+#
+ah_version=`${AUTOHEADER:-autoheader} --version 2>/dev/null|sed -e 's/^[^0-9]*//' -e 's/[a-z]* *$//' -e 1q`
+if test -z "$ah_version"; then
+ echo "buildcheck: autoheader not found."
+ echo " You need autoheader version 2.59 or newer installed."
+ exit 1
+fi
+IFS=.; set $ah_version; IFS=' '
+if test "$1" = "2" -a "$2" -lt "59" || test "$1" -lt "2"; then
+ echo "buildcheck: autoheader version $ah_version found."
+ echo " You need autoheader version 2.59 or newer installed."
+ echo " If you have a sufficient autoheader installed, but it"
+ echo " is not named 'autoheader', then try setting the"
+ echo " AUTOHEADER environment variable. (See the INSTALL file"
+ echo " for details.)"
+ exit 1
+fi
+
+echo "buildcheck: autoheader version $ah_version (ok)"
+
+#--------------------------------------------------------------------------
+# libtool 1.4 or newer
+#
+LIBTOOL_WANTED_MAJOR=1
+LIBTOOL_WANTED_MINOR=4
+LIBTOOL_WANTED_PATCH=
+LIBTOOL_WANTED_VERSION=1.4
+
+# The minimum version for source releases is 1.4.3,
+# because it's required by (at least) Solaris.
+if test "$VERSION_CHECK" = "--release"; then
+ LIBTOOL_WANTED_PATCH=3
+ LIBTOOL_WANTED_VERSION=1.4.3
+else
+ case `uname -sr` in
+ SunOS\ 5.*)
+ LIBTOOL_WANTED_PATCH=3
+ LIBTOOL_WANTED_VERSION=1.4.3
+ ;;
+ esac
+fi
+
+libtool=${LIBTOOL:-`./build/PrintPath glibtool libtool libtool15`}
+# Extract the libtool version number: everything from the first number in
+# the version text until a hyphen or space.
+lt_pversion=`$libtool --version 2>/dev/null |
+ sed -e 's/^[^0-9]*//' -e 's/[- ].*//' -e '/^$/d' |
+ sed -e 1q`
+if test -z "$lt_pversion"; then
+ echo "buildcheck: libtool not found."
+ echo " You need libtool version $LIBTOOL_WANTED_VERSION or newer installed"
+ exit 1
+fi
+lt_version=`echo $lt_pversion|sed -e 's/\([a-z]*\)$/.\1/'`
+IFS=.; set $lt_version; IFS=' '
+lt_status="good"
+if test "$1" = "$LIBTOOL_WANTED_MAJOR"; then
+ if test "$2" -gt "$LIBTOOL_WANTED_MINOR"; then
+ lt_status="good"
+ elif test "$2" -lt "$LIBTOOL_WANTED_MINOR"; then
+ lt_status="bad"
+ elif test ! -z "$LIBTOOL_WANTED_PATCH"; then
+ if test "$3" -lt "$LIBTOOL_WANTED_PATCH"; then
+ lt_status="bad"
+ fi
+ fi
+fi
+if test $lt_status != "good"; then
+ echo "buildcheck: libtool version $lt_pversion found."
+ echo " You need libtool version $LIBTOOL_WANTED_VERSION or newer installed"
+ exit 1
+fi
+
+echo "buildcheck: libtool version $lt_pversion (ok)"
+
+#--------------------------------------------------------------------------
+# check that our local copies of files match up with those in APR(UTIL)
+#
+if test -d ./apr; then
+ if cmp -s ./build/ac-macros/find_apr.m4 ./apr/build/find_apr.m4; then
+ :
+ else
+ echo "buildcheck: local copy of find_apr.m4 does not match APR's copy."
+ echo " An updated copy of find_apr.m4 may need to be checked in."
+ fi
+ if cmp -s ./build/PrintPath ./apr/build/PrintPath; then
+ :
+ else
+ echo "buildcheck: local copy of PrintPath does not match APR's copy."
+ echo " An updated copy of PrintPath may need to be checked in."
+ fi
+fi
+
+if test -d ./apr-util; then
+ if cmp -s ./build/ac-macros/find_apu.m4 ./apr-util/build/find_apu.m4; then
+ :
+ else
+ echo "buildcheck: local copy of find_apu.m4 does not match APRUTIL's copy."
+ echo " An updated copy of find_apu.m4 may need to be checked in."
+ fi
+fi
+
+#--------------------------------------------------------------------------
+exit 0
diff --git a/build/config.guess b/build/config.guess
new file mode 100755
index 0000000..2852378
--- /dev/null
+++ b/build/config.guess
@@ -0,0 +1,1505 @@
+#! /bin/sh
+# Attempt to guess a canonical system name.
+# Copyright (C) 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
+# 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010
+# Free Software Foundation, Inc.
+
+timestamp='2010-08-21'
+
+# This file is free software; you can redistribute it and/or modify it
+# under the terms of the GNU General Public License as published by
+# the Free Software Foundation; either version 2 of the License, or
+# (at your option) any later version.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+# General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with this program; if not, write to the Free Software
+# Foundation, Inc., 51 Franklin Street - Fifth Floor, Boston, MA
+# 02110-1301, USA.
+#
+# As a special exception to the GNU General Public License, if you
+# distribute this file as part of a program that contains a
+# configuration script generated by Autoconf, you may include it under
+# the same distribution terms that you use for the rest of that program.
+
+
+# Originally written by Per Bothner. Please send patches (context
+# diff format) to <config-patches@gnu.org> and include a ChangeLog
+# entry.
+#
+# This script attempts to guess a canonical system name similar to
+# config.sub. If it succeeds, it prints the system name on stdout, and
+# exits with 0. Otherwise, it exits with 1.
+#
+# You can get the latest version of this script from:
+# http://git.savannah.gnu.org/gitweb/?p=config.git;a=blob_plain;f=config.guess;hb=HEAD
+
+me=`echo "$0" | sed -e 's,.*/,,'`
+
+usage="\
+Usage: $0 [OPTION]
+
+Output the configuration name of the system \`$me' is run on.
+
+Operation modes:
+ -h, --help print this help, then exit
+ -t, --time-stamp print date of last modification, then exit
+ -v, --version print version number, then exit
+
+Report bugs and patches to <config-patches@gnu.org>."
+
+version="\
+GNU config.guess ($timestamp)
+
+Originally written by Per Bothner.
+Copyright (C) 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999, 2000,
+2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010 Free
+Software Foundation, Inc.
+
+This is free software; see the source for copying conditions. There is NO
+warranty; not even for MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE."
+
+help="
+Try \`$me --help' for more information."
+
+# Parse command line
+while test $# -gt 0 ; do
+ case $1 in
+ --time-stamp | --time* | -t )
+ echo "$timestamp" ; exit ;;
+ --version | -v )
+ echo "$version" ; exit ;;
+ --help | --h* | -h )
+ echo "$usage"; exit ;;
+ -- ) # Stop option processing
+ shift; break ;;
+ - ) # Use stdin as input.
+ break ;;
+ -* )
+ echo "$me: invalid option $1$help" >&2
+ exit 1 ;;
+ * )
+ break ;;
+ esac
+done
+
+if test $# != 0; then
+ echo "$me: too many arguments$help" >&2
+ exit 1
+fi
+
+trap 'exit 1' HUP INT TERM
+
+# CC_FOR_BUILD -- compiler used by this script. Note that the use of a
+# compiler to aid in system detection is discouraged as it requires
+# temporary files to be created and, as you can see below, it is a
+# headache to deal with in a portable fashion.
+
+# Historically, `CC_FOR_BUILD' used to be named `HOST_CC'. We still
+# use `HOST_CC' if defined, but it is deprecated.
+
+# Portable tmp directory creation inspired by the Autoconf team.
+
+set_cc_for_build='
+trap "exitcode=\$?; (rm -f \$tmpfiles 2>/dev/null; rmdir \$tmp 2>/dev/null) && exit \$exitcode" 0 ;
+trap "rm -f \$tmpfiles 2>/dev/null; rmdir \$tmp 2>/dev/null; exit 1" HUP INT PIPE TERM ;
+: ${TMPDIR=/tmp} ;
+ { tmp=`(umask 077 && mktemp -d "$TMPDIR/cgXXXXXX") 2>/dev/null` && test -n "$tmp" && test -d "$tmp" ; } ||
+ { test -n "$RANDOM" && tmp=$TMPDIR/cg$$-$RANDOM && (umask 077 && mkdir $tmp) ; } ||
+ { tmp=$TMPDIR/cg-$$ && (umask 077 && mkdir $tmp) && echo "Warning: creating insecure temp directory" >&2 ; } ||
+ { echo "$me: cannot create a temporary directory in $TMPDIR" >&2 ; exit 1 ; } ;
+dummy=$tmp/dummy ;
+tmpfiles="$dummy.c $dummy.o $dummy.rel $dummy" ;
+case $CC_FOR_BUILD,$HOST_CC,$CC in
+ ,,) echo "int x;" > $dummy.c ;
+ for c in cc gcc c89 c99 ; do
+ if ($c -c -o $dummy.o $dummy.c) >/dev/null 2>&1 ; then
+ CC_FOR_BUILD="$c"; break ;
+ fi ;
+ done ;
+ if test x"$CC_FOR_BUILD" = x ; then
+ CC_FOR_BUILD=no_compiler_found ;
+ fi
+ ;;
+ ,,*) CC_FOR_BUILD=$CC ;;
+ ,*,*) CC_FOR_BUILD=$HOST_CC ;;
+esac ; set_cc_for_build= ;'
+
+# This is needed to find uname on a Pyramid OSx when run in the BSD universe.
+# (ghazi@noc.rutgers.edu 1994-08-24)
+if (test -f /.attbin/uname) >/dev/null 2>&1 ; then
+ PATH=$PATH:/.attbin ; export PATH
+fi
+
+UNAME_MACHINE=`(uname -m) 2>/dev/null` || UNAME_MACHINE=unknown
+UNAME_RELEASE=`(uname -r) 2>/dev/null` || UNAME_RELEASE=unknown
+UNAME_SYSTEM=`(uname -s) 2>/dev/null` || UNAME_SYSTEM=unknown
+UNAME_VERSION=`(uname -v) 2>/dev/null` || UNAME_VERSION=unknown
+
+# Note: order is significant - the case branches are not exclusive.
+
+case "${UNAME_MACHINE}:${UNAME_SYSTEM}:${UNAME_RELEASE}:${UNAME_VERSION}" in
+ *:NetBSD:*:*)
+ # NetBSD (nbsd) targets should (where applicable) match one or
+ # more of the tupples: *-*-netbsdelf*, *-*-netbsdaout*,
+ # *-*-netbsdecoff* and *-*-netbsd*. For targets that recently
+ # switched to ELF, *-*-netbsd* would select the old
+ # object file format. This provides both forward
+ # compatibility and a consistent mechanism for selecting the
+ # object file format.
+ #
+ # Note: NetBSD doesn't particularly care about the vendor
+ # portion of the name. We always set it to "unknown".
+ sysctl="sysctl -n hw.machine_arch"
+ UNAME_MACHINE_ARCH=`(/sbin/$sysctl 2>/dev/null || \
+ /usr/sbin/$sysctl 2>/dev/null || echo unknown)`
+ case "${UNAME_MACHINE_ARCH}" in
+ armeb) machine=armeb-unknown ;;
+ arm*) machine=arm-unknown ;;
+ sh3el) machine=shl-unknown ;;
+ sh3eb) machine=sh-unknown ;;
+ sh5el) machine=sh5le-unknown ;;
+ *) machine=${UNAME_MACHINE_ARCH}-unknown ;;
+ esac
+ # The Operating System including object format, if it has switched
+ # to ELF recently, or will in the future.
+ case "${UNAME_MACHINE_ARCH}" in
+ arm*|i386|m68k|ns32k|sh3*|sparc|vax)
+ eval $set_cc_for_build
+ if echo __ELF__ | $CC_FOR_BUILD -E - 2>/dev/null \
+ | grep -q __ELF__
+ then
+ # Once all utilities can be ECOFF (netbsdecoff) or a.out (netbsdaout).
+ # Return netbsd for either. FIX?
+ os=netbsd
+ else
+ os=netbsdelf
+ fi
+ ;;
+ *)
+ os=netbsd
+ ;;
+ esac
+ # The OS release
+ # Debian GNU/NetBSD machines have a different userland, and
+ # thus, need a distinct triplet. However, they do not need
+ # kernel version information, so it can be replaced with a
+ # suitable tag, in the style of linux-gnu.
+ case "${UNAME_VERSION}" in
+ Debian*)
+ release='-gnu'
+ ;;
+ *)
+ release=`echo ${UNAME_RELEASE}|sed -e 's/[-_].*/\./'`
+ ;;
+ esac
+ # Since CPU_TYPE-MANUFACTURER-KERNEL-OPERATING_SYSTEM:
+ # contains redundant information, the shorter form:
+ # CPU_TYPE-MANUFACTURER-OPERATING_SYSTEM is used.
+ echo "${machine}-${os}${release}"
+ exit ;;
+ *:OpenBSD:*:*)
+ UNAME_MACHINE_ARCH=`arch | sed 's/OpenBSD.//'`
+ echo ${UNAME_MACHINE_ARCH}-unknown-openbsd${UNAME_RELEASE}
+ exit ;;
+ *:ekkoBSD:*:*)
+ echo ${UNAME_MACHINE}-unknown-ekkobsd${UNAME_RELEASE}
+ exit ;;
+ *:SolidBSD:*:*)
+ echo ${UNAME_MACHINE}-unknown-solidbsd${UNAME_RELEASE}
+ exit ;;
+ macppc:MirBSD:*:*)
+ echo powerpc-unknown-mirbsd${UNAME_RELEASE}
+ exit ;;
+ *:MirBSD:*:*)
+ echo ${UNAME_MACHINE}-unknown-mirbsd${UNAME_RELEASE}
+ exit ;;
+ alpha:OSF1:*:*)
+ case $UNAME_RELEASE in
+ *4.0)
+ UNAME_RELEASE=`/usr/sbin/sizer -v | awk '{print $3}'`
+ ;;
+ *5.*)
+ UNAME_RELEASE=`/usr/sbin/sizer -v | awk '{print $4}'`
+ ;;
+ esac
+ # According to Compaq, /usr/sbin/psrinfo has been available on
+ # OSF/1 and Tru64 systems produced since 1995. I hope that
+ # covers most systems running today. This code pipes the CPU
+ # types through head -n 1, so we only detect the type of CPU 0.
+ ALPHA_CPU_TYPE=`/usr/sbin/psrinfo -v | sed -n -e 's/^ The alpha \(.*\) processor.*$/\1/p' | head -n 1`
+ case "$ALPHA_CPU_TYPE" in
+ "EV4 (21064)")
+ UNAME_MACHINE="alpha" ;;
+ "EV4.5 (21064)")
+ UNAME_MACHINE="alpha" ;;
+ "LCA4 (21066/21068)")
+ UNAME_MACHINE="alpha" ;;
+ "EV5 (21164)")
+ UNAME_MACHINE="alphaev5" ;;
+ "EV5.6 (21164A)")
+ UNAME_MACHINE="alphaev56" ;;
+ "EV5.6 (21164PC)")
+ UNAME_MACHINE="alphapca56" ;;
+ "EV5.7 (21164PC)")
+ UNAME_MACHINE="alphapca57" ;;
+ "EV6 (21264)")
+ UNAME_MACHINE="alphaev6" ;;
+ "EV6.7 (21264A)")
+ UNAME_MACHINE="alphaev67" ;;
+ "EV6.8CB (21264C)")
+ UNAME_MACHINE="alphaev68" ;;
+ "EV6.8AL (21264B)")
+ UNAME_MACHINE="alphaev68" ;;
+ "EV6.8CX (21264D)")
+ UNAME_MACHINE="alphaev68" ;;
+ "EV6.9A (21264/EV69A)")
+ UNAME_MACHINE="alphaev69" ;;
+ "EV7 (21364)")
+ UNAME_MACHINE="alphaev7" ;;
+ "EV7.9 (21364A)")
+ UNAME_MACHINE="alphaev79" ;;
+ esac
+ # A Pn.n version is a patched version.
+ # A Vn.n version is a released version.
+ # A Tn.n version is a released field test version.
+ # A Xn.n version is an unreleased experimental baselevel.
+ # 1.2 uses "1.2" for uname -r.
+ echo ${UNAME_MACHINE}-dec-osf`echo ${UNAME_RELEASE} | sed -e 's/^[PVTX]//' | tr 'ABCDEFGHIJKLMNOPQRSTUVWXYZ' 'abcdefghijklmnopqrstuvwxyz'`
+ exit ;;
+ Alpha\ *:Windows_NT*:*)
+ # How do we know it's Interix rather than the generic POSIX subsystem?
+ # Should we change UNAME_MACHINE based on the output of uname instead
+ # of the specific Alpha model?
+ echo alpha-pc-interix
+ exit ;;
+ 21064:Windows_NT:50:3)
+ echo alpha-dec-winnt3.5
+ exit ;;
+ Amiga*:UNIX_System_V:4.0:*)
+ echo m68k-unknown-sysv4
+ exit ;;
+ *:[Aa]miga[Oo][Ss]:*:*)
+ echo ${UNAME_MACHINE}-unknown-amigaos
+ exit ;;
+ *:[Mm]orph[Oo][Ss]:*:*)
+ echo ${UNAME_MACHINE}-unknown-morphos
+ exit ;;
+ *:OS/390:*:*)
+ echo i370-ibm-openedition
+ exit ;;
+ *:z/VM:*:*)
+ echo s390-ibm-zvmoe
+ exit ;;
+ *:OS400:*:*)
+ echo powerpc-ibm-os400
+ exit ;;
+ arm:RISC*:1.[012]*:*|arm:riscix:1.[012]*:*)
+ echo arm-acorn-riscix${UNAME_RELEASE}
+ exit ;;
+ arm:riscos:*:*|arm:RISCOS:*:*)
+ echo arm-unknown-riscos
+ exit ;;
+ SR2?01:HI-UX/MPP:*:* | SR8000:HI-UX/MPP:*:*)
+ echo hppa1.1-hitachi-hiuxmpp
+ exit ;;
+ Pyramid*:OSx*:*:* | MIS*:OSx*:*:* | MIS*:SMP_DC-OSx*:*:*)
+ # akee@wpdis03.wpafb.af.mil (Earle F. Ake) contributed MIS and NILE.
+ if test "`(/bin/universe) 2>/dev/null`" = att ; then
+ echo pyramid-pyramid-sysv3
+ else
+ echo pyramid-pyramid-bsd
+ fi
+ exit ;;
+ NILE*:*:*:dcosx)
+ echo pyramid-pyramid-svr4
+ exit ;;
+ DRS?6000:unix:4.0:6*)
+ echo sparc-icl-nx6
+ exit ;;
+ DRS?6000:UNIX_SV:4.2*:7* | DRS?6000:isis:4.2*:7*)
+ case `/usr/bin/uname -p` in
+ sparc) echo sparc-icl-nx7; exit ;;
+ esac ;;
+ s390x:SunOS:*:*)
+ echo ${UNAME_MACHINE}-ibm-solaris2`echo ${UNAME_RELEASE}|sed -e 's/[^.]*//'`
+ exit ;;
+ sun4H:SunOS:5.*:*)
+ echo sparc-hal-solaris2`echo ${UNAME_RELEASE}|sed -e 's/[^.]*//'`
+ exit ;;
+ sun4*:SunOS:5.*:* | tadpole*:SunOS:5.*:*)
+ echo sparc-sun-solaris2`echo ${UNAME_RELEASE}|sed -e 's/[^.]*//'`
+ exit ;;
+ i86pc:AuroraUX:5.*:* | i86xen:AuroraUX:5.*:*)
+ echo i386-pc-auroraux${UNAME_RELEASE}
+ exit ;;
+ i86pc:SunOS:5.*:* | i86xen:SunOS:5.*:*)
+ eval $set_cc_for_build
+ SUN_ARCH="i386"
+ # If there is a compiler, see if it is configured for 64-bit objects.
+ # Note that the Sun cc does not turn __LP64__ into 1 like gcc does.
+ # This test works for both compilers.
+ if [ "$CC_FOR_BUILD" != 'no_compiler_found' ]; then
+ if (echo '#ifdef __amd64'; echo IS_64BIT_ARCH; echo '#endif') | \
+ (CCOPTS= $CC_FOR_BUILD -E - 2>/dev/null) | \
+ grep IS_64BIT_ARCH >/dev/null
+ then
+ SUN_ARCH="x86_64"
+ fi
+ fi
+ echo ${SUN_ARCH}-pc-solaris2`echo ${UNAME_RELEASE}|sed -e 's/[^.]*//'`
+ exit ;;
+ sun4*:SunOS:6*:*)
+ # According to config.sub, this is the proper way to canonicalize
+ # SunOS6. Hard to guess exactly what SunOS6 will be like, but
+ # it's likely to be more like Solaris than SunOS4.
+ echo sparc-sun-solaris3`echo ${UNAME_RELEASE}|sed -e 's/[^.]*//'`
+ exit ;;
+ sun4*:SunOS:*:*)
+ case "`/usr/bin/arch -k`" in
+ Series*|S4*)
+ UNAME_RELEASE=`uname -v`
+ ;;
+ esac
+ # Japanese Language versions have a version number like `4.1.3-JL'.
+ echo sparc-sun-sunos`echo ${UNAME_RELEASE}|sed -e 's/-/_/'`
+ exit ;;
+ sun3*:SunOS:*:*)
+ echo m68k-sun-sunos${UNAME_RELEASE}
+ exit ;;
+ sun*:*:4.2BSD:*)
+ UNAME_RELEASE=`(sed 1q /etc/motd | awk '{print substr($5,1,3)}') 2>/dev/null`
+ test "x${UNAME_RELEASE}" = "x" && UNAME_RELEASE=3
+ case "`/bin/arch`" in
+ sun3)
+ echo m68k-sun-sunos${UNAME_RELEASE}
+ ;;
+ sun4)
+ echo sparc-sun-sunos${UNAME_RELEASE}
+ ;;
+ esac
+ exit ;;
+ aushp:SunOS:*:*)
+ echo sparc-auspex-sunos${UNAME_RELEASE}
+ exit ;;
+ # The situation for MiNT is a little confusing. The machine name
+ # can be virtually everything (everything which is not
+ # "atarist" or "atariste" at least should have a processor
+ # > m68000). The system name ranges from "MiNT" over "FreeMiNT"
+ # to the lowercase version "mint" (or "freemint"). Finally
+ # the system name "TOS" denotes a system which is actually not
+ # MiNT. But MiNT is downward compatible to TOS, so this should
+ # be no problem.
+ atarist[e]:*MiNT:*:* | atarist[e]:*mint:*:* | atarist[e]:*TOS:*:*)
+ echo m68k-atari-mint${UNAME_RELEASE}
+ exit ;;
+ atari*:*MiNT:*:* | atari*:*mint:*:* | atarist[e]:*TOS:*:*)
+ echo m68k-atari-mint${UNAME_RELEASE}
+ exit ;;
+ *falcon*:*MiNT:*:* | *falcon*:*mint:*:* | *falcon*:*TOS:*:*)
+ echo m68k-atari-mint${UNAME_RELEASE}
+ exit ;;
+ milan*:*MiNT:*:* | milan*:*mint:*:* | *milan*:*TOS:*:*)
+ echo m68k-milan-mint${UNAME_RELEASE}
+ exit ;;
+ hades*:*MiNT:*:* | hades*:*mint:*:* | *hades*:*TOS:*:*)
+ echo m68k-hades-mint${UNAME_RELEASE}
+ exit ;;
+ *:*MiNT:*:* | *:*mint:*:* | *:*TOS:*:*)
+ echo m68k-unknown-mint${UNAME_RELEASE}
+ exit ;;
+ m68k:machten:*:*)
+ echo m68k-apple-machten${UNAME_RELEASE}
+ exit ;;
+ powerpc:machten:*:*)
+ echo powerpc-apple-machten${UNAME_RELEASE}
+ exit ;;
+ RISC*:Mach:*:*)
+ echo mips-dec-mach_bsd4.3
+ exit ;;
+ RISC*:ULTRIX:*:*)
+ echo mips-dec-ultrix${UNAME_RELEASE}
+ exit ;;
+ VAX*:ULTRIX*:*:*)
+ echo vax-dec-ultrix${UNAME_RELEASE}
+ exit ;;
+ 2020:CLIX:*:* | 2430:CLIX:*:*)
+ echo clipper-intergraph-clix${UNAME_RELEASE}
+ exit ;;
+ mips:*:*:UMIPS | mips:*:*:RISCos)
+ eval $set_cc_for_build
+ sed 's/^ //' << EOF >$dummy.c
+#ifdef __cplusplus
+#include <stdio.h> /* for printf() prototype */
+ int main (int argc, char *argv[]) {
+#else
+ int main (argc, argv) int argc; char *argv[]; {
+#endif
+ #if defined (host_mips) && defined (MIPSEB)
+ #if defined (SYSTYPE_SYSV)
+ printf ("mips-mips-riscos%ssysv\n", argv[1]); exit (0);
+ #endif
+ #if defined (SYSTYPE_SVR4)
+ printf ("mips-mips-riscos%ssvr4\n", argv[1]); exit (0);
+ #endif
+ #if defined (SYSTYPE_BSD43) || defined(SYSTYPE_BSD)
+ printf ("mips-mips-riscos%sbsd\n", argv[1]); exit (0);
+ #endif
+ #endif
+ exit (-1);
+ }
+EOF
+ $CC_FOR_BUILD -o $dummy $dummy.c &&
+ dummyarg=`echo "${UNAME_RELEASE}" | sed -n 's/\([0-9]*\).*/\1/p'` &&
+ SYSTEM_NAME=`$dummy $dummyarg` &&
+ { echo "$SYSTEM_NAME"; exit; }
+ echo mips-mips-riscos${UNAME_RELEASE}
+ exit ;;
+ Motorola:PowerMAX_OS:*:*)
+ echo powerpc-motorola-powermax
+ exit ;;
+ Motorola:*:4.3:PL8-*)
+ echo powerpc-harris-powermax
+ exit ;;
+ Night_Hawk:*:*:PowerMAX_OS | Synergy:PowerMAX_OS:*:*)
+ echo powerpc-harris-powermax
+ exit ;;
+ Night_Hawk:Power_UNIX:*:*)
+ echo powerpc-harris-powerunix
+ exit ;;
+ m88k:CX/UX:7*:*)
+ echo m88k-harris-cxux7
+ exit ;;
+ m88k:*:4*:R4*)
+ echo m88k-motorola-sysv4
+ exit ;;
+ m88k:*:3*:R3*)
+ echo m88k-motorola-sysv3
+ exit ;;
+ AViiON:dgux:*:*)
+ # DG/UX returns AViiON for all architectures
+ UNAME_PROCESSOR=`/usr/bin/uname -p`
+ if [ $UNAME_PROCESSOR = mc88100 ] || [ $UNAME_PROCESSOR = mc88110 ]
+ then
+ if [ ${TARGET_BINARY_INTERFACE}x = m88kdguxelfx ] || \
+ [ ${TARGET_BINARY_INTERFACE}x = x ]
+ then
+ echo m88k-dg-dgux${UNAME_RELEASE}
+ else
+ echo m88k-dg-dguxbcs${UNAME_RELEASE}
+ fi
+ else
+ echo i586-dg-dgux${UNAME_RELEASE}
+ fi
+ exit ;;
+ M88*:DolphinOS:*:*) # DolphinOS (SVR3)
+ echo m88k-dolphin-sysv3
+ exit ;;
+ M88*:*:R3*:*)
+ # Delta 88k system running SVR3
+ echo m88k-motorola-sysv3
+ exit ;;
+ XD88*:*:*:*) # Tektronix XD88 system running UTekV (SVR3)
+ echo m88k-tektronix-sysv3
+ exit ;;
+ Tek43[0-9][0-9]:UTek:*:*) # Tektronix 4300 system running UTek (BSD)
+ echo m68k-tektronix-bsd
+ exit ;;
+ *:IRIX*:*:*)
+ echo mips-sgi-irix`echo ${UNAME_RELEASE}|sed -e 's/-/_/g'`
+ exit ;;
+ ????????:AIX?:[12].1:2) # AIX 2.2.1 or AIX 2.1.1 is RT/PC AIX.
+ echo romp-ibm-aix # uname -m gives an 8 hex-code CPU id
+ exit ;; # Note that: echo "'`uname -s`'" gives 'AIX '
+ i*86:AIX:*:*)
+ echo i386-ibm-aix
+ exit ;;
+ ia64:AIX:*:*)
+ if [ -x /usr/bin/oslevel ] ; then
+ IBM_REV=`/usr/bin/oslevel`
+ else
+ IBM_REV=${UNAME_VERSION}.${UNAME_RELEASE}
+ fi
+ echo ${UNAME_MACHINE}-ibm-aix${IBM_REV}
+ exit ;;
+ *:AIX:2:3)
+ if grep bos325 /usr/include/stdio.h >/dev/null 2>&1; then
+ eval $set_cc_for_build
+ sed 's/^ //' << EOF >$dummy.c
+ #include <sys/systemcfg.h>
+
+ main()
+ {
+ if (!__power_pc())
+ exit(1);
+ puts("powerpc-ibm-aix3.2.5");
+ exit(0);
+ }
+EOF
+ if $CC_FOR_BUILD -o $dummy $dummy.c && SYSTEM_NAME=`$dummy`
+ then
+ echo "$SYSTEM_NAME"
+ else
+ echo rs6000-ibm-aix3.2.5
+ fi
+ elif grep bos324 /usr/include/stdio.h >/dev/null 2>&1; then
+ echo rs6000-ibm-aix3.2.4
+ else
+ echo rs6000-ibm-aix3.2
+ fi
+ exit ;;
+ *:AIX:*:[4567])
+ IBM_CPU_ID=`/usr/sbin/lsdev -C -c processor -S available | sed 1q | awk '{ print $1 }'`
+ if /usr/sbin/lsattr -El ${IBM_CPU_ID} | grep ' POWER' >/dev/null 2>&1; then
+ IBM_ARCH=rs6000
+ else
+ IBM_ARCH=powerpc
+ fi
+ if [ -x /usr/bin/oslevel ] ; then
+ IBM_REV=`/usr/bin/oslevel`
+ else
+ IBM_REV=${UNAME_VERSION}.${UNAME_RELEASE}
+ fi
+ echo ${IBM_ARCH}-ibm-aix${IBM_REV}
+ exit ;;
+ *:AIX:*:*)
+ echo rs6000-ibm-aix
+ exit ;;
+ ibmrt:4.4BSD:*|romp-ibm:BSD:*)
+ echo romp-ibm-bsd4.4
+ exit ;;
+ ibmrt:*BSD:*|romp-ibm:BSD:*) # covers RT/PC BSD and
+ echo romp-ibm-bsd${UNAME_RELEASE} # 4.3 with uname added to
+ exit ;; # report: romp-ibm BSD 4.3
+ *:BOSX:*:*)
+ echo rs6000-bull-bosx
+ exit ;;
+ DPX/2?00:B.O.S.:*:*)
+ echo m68k-bull-sysv3
+ exit ;;
+ 9000/[34]??:4.3bsd:1.*:*)
+ echo m68k-hp-bsd
+ exit ;;
+ hp300:4.4BSD:*:* | 9000/[34]??:4.3bsd:2.*:*)
+ echo m68k-hp-bsd4.4
+ exit ;;
+ 9000/[34678]??:HP-UX:*:*)
+ HPUX_REV=`echo ${UNAME_RELEASE}|sed -e 's/[^.]*.[0B]*//'`
+ case "${UNAME_MACHINE}" in
+ 9000/31? ) HP_ARCH=m68000 ;;
+ 9000/[34]?? ) HP_ARCH=m68k ;;
+ 9000/[678][0-9][0-9])
+ if [ -x /usr/bin/getconf ]; then
+ sc_cpu_version=`/usr/bin/getconf SC_CPU_VERSION 2>/dev/null`
+ sc_kernel_bits=`/usr/bin/getconf SC_KERNEL_BITS 2>/dev/null`
+ case "${sc_cpu_version}" in
+ 523) HP_ARCH="hppa1.0" ;; # CPU_PA_RISC1_0
+ 528) HP_ARCH="hppa1.1" ;; # CPU_PA_RISC1_1
+ 532) # CPU_PA_RISC2_0
+ case "${sc_kernel_bits}" in
+ 32) HP_ARCH="hppa2.0n" ;;
+ 64) HP_ARCH="hppa2.0w" ;;
+ '') HP_ARCH="hppa2.0" ;; # HP-UX 10.20
+ esac ;;
+ esac
+ fi
+ if [ "${HP_ARCH}" = "" ]; then
+ eval $set_cc_for_build
+ sed 's/^ //' << EOF >$dummy.c
+
+ #define _HPUX_SOURCE
+ #include <stdlib.h>
+ #include <unistd.h>
+
+ int main ()
+ {
+ #if defined(_SC_KERNEL_BITS)
+ long bits = sysconf(_SC_KERNEL_BITS);
+ #endif
+ long cpu = sysconf (_SC_CPU_VERSION);
+
+ switch (cpu)
+ {
+ case CPU_PA_RISC1_0: puts ("hppa1.0"); break;
+ case CPU_PA_RISC1_1: puts ("hppa1.1"); break;
+ case CPU_PA_RISC2_0:
+ #if defined(_SC_KERNEL_BITS)
+ switch (bits)
+ {
+ case 64: puts ("hppa2.0w"); break;
+ case 32: puts ("hppa2.0n"); break;
+ default: puts ("hppa2.0"); break;
+ } break;
+ #else /* !defined(_SC_KERNEL_BITS) */
+ puts ("hppa2.0"); break;
+ #endif
+ default: puts ("hppa1.0"); break;
+ }
+ exit (0);
+ }
+EOF
+ (CCOPTS= $CC_FOR_BUILD -o $dummy $dummy.c 2>/dev/null) && HP_ARCH=`$dummy`
+ test -z "$HP_ARCH" && HP_ARCH=hppa
+ fi ;;
+ esac
+ if [ ${HP_ARCH} = "hppa2.0w" ]
+ then
+ eval $set_cc_for_build
+
+ # hppa2.0w-hp-hpux* has a 64-bit kernel and a compiler generating
+ # 32-bit code. hppa64-hp-hpux* has the same kernel and a compiler
+ # generating 64-bit code. GNU and HP use different nomenclature:
+ #
+ # $ CC_FOR_BUILD=cc ./config.guess
+ # => hppa2.0w-hp-hpux11.23
+ # $ CC_FOR_BUILD="cc +DA2.0w" ./config.guess
+ # => hppa64-hp-hpux11.23
+
+ if echo __LP64__ | (CCOPTS= $CC_FOR_BUILD -E - 2>/dev/null) |
+ grep -q __LP64__
+ then
+ HP_ARCH="hppa2.0w"
+ else
+ HP_ARCH="hppa64"
+ fi
+ fi
+ echo ${HP_ARCH}-hp-hpux${HPUX_REV}
+ exit ;;
+ ia64:HP-UX:*:*)
+ HPUX_REV=`echo ${UNAME_RELEASE}|sed -e 's/[^.]*.[0B]*//'`
+ echo ia64-hp-hpux${HPUX_REV}
+ exit ;;
+ 3050*:HI-UX:*:*)
+ eval $set_cc_for_build
+ sed 's/^ //' << EOF >$dummy.c
+ #include <unistd.h>
+ int
+ main ()
+ {
+ long cpu = sysconf (_SC_CPU_VERSION);
+ /* The order matters, because CPU_IS_HP_MC68K erroneously returns
+ true for CPU_PA_RISC1_0. CPU_IS_PA_RISC returns correct
+ results, however. */
+ if (CPU_IS_PA_RISC (cpu))
+ {
+ switch (cpu)
+ {
+ case CPU_PA_RISC1_0: puts ("hppa1.0-hitachi-hiuxwe2"); break;
+ case CPU_PA_RISC1_1: puts ("hppa1.1-hitachi-hiuxwe2"); break;
+ case CPU_PA_RISC2_0: puts ("hppa2.0-hitachi-hiuxwe2"); break;
+ default: puts ("hppa-hitachi-hiuxwe2"); break;
+ }
+ }
+ else if (CPU_IS_HP_MC68K (cpu))
+ puts ("m68k-hitachi-hiuxwe2");
+ else puts ("unknown-hitachi-hiuxwe2");
+ exit (0);
+ }
+EOF
+ $CC_FOR_BUILD -o $dummy $dummy.c && SYSTEM_NAME=`$dummy` &&
+ { echo "$SYSTEM_NAME"; exit; }
+ echo unknown-hitachi-hiuxwe2
+ exit ;;
+ 9000/7??:4.3bsd:*:* | 9000/8?[79]:4.3bsd:*:* )
+ echo hppa1.1-hp-bsd
+ exit ;;
+ 9000/8??:4.3bsd:*:*)
+ echo hppa1.0-hp-bsd
+ exit ;;
+ *9??*:MPE/iX:*:* | *3000*:MPE/iX:*:*)
+ echo hppa1.0-hp-mpeix
+ exit ;;
+ hp7??:OSF1:*:* | hp8?[79]:OSF1:*:* )
+ echo hppa1.1-hp-osf
+ exit ;;
+ hp8??:OSF1:*:*)
+ echo hppa1.0-hp-osf
+ exit ;;
+ i*86:OSF1:*:*)
+ if [ -x /usr/sbin/sysversion ] ; then
+ echo ${UNAME_MACHINE}-unknown-osf1mk
+ else
+ echo ${UNAME_MACHINE}-unknown-osf1
+ fi
+ exit ;;
+ parisc*:Lites*:*:*)
+ echo hppa1.1-hp-lites
+ exit ;;
+ C1*:ConvexOS:*:* | convex:ConvexOS:C1*:*)
+ echo c1-convex-bsd
+ exit ;;
+ C2*:ConvexOS:*:* | convex:ConvexOS:C2*:*)
+ if getsysinfo -f scalar_acc
+ then echo c32-convex-bsd
+ else echo c2-convex-bsd
+ fi
+ exit ;;
+ C34*:ConvexOS:*:* | convex:ConvexOS:C34*:*)
+ echo c34-convex-bsd
+ exit ;;
+ C38*:ConvexOS:*:* | convex:ConvexOS:C38*:*)
+ echo c38-convex-bsd
+ exit ;;
+ C4*:ConvexOS:*:* | convex:ConvexOS:C4*:*)
+ echo c4-convex-bsd
+ exit ;;
+ CRAY*Y-MP:*:*:*)
+ echo ymp-cray-unicos${UNAME_RELEASE} | sed -e 's/\.[^.]*$/.X/'
+ exit ;;
+ CRAY*[A-Z]90:*:*:*)
+ echo ${UNAME_MACHINE}-cray-unicos${UNAME_RELEASE} \
+ | sed -e 's/CRAY.*\([A-Z]90\)/\1/' \
+ -e y/ABCDEFGHIJKLMNOPQRSTUVWXYZ/abcdefghijklmnopqrstuvwxyz/ \
+ -e 's/\.[^.]*$/.X/'
+ exit ;;
+ CRAY*TS:*:*:*)
+ echo t90-cray-unicos${UNAME_RELEASE} | sed -e 's/\.[^.]*$/.X/'
+ exit ;;
+ CRAY*T3E:*:*:*)
+ echo alphaev5-cray-unicosmk${UNAME_RELEASE} | sed -e 's/\.[^.]*$/.X/'
+ exit ;;
+ CRAY*SV1:*:*:*)
+ echo sv1-cray-unicos${UNAME_RELEASE} | sed -e 's/\.[^.]*$/.X/'
+ exit ;;
+ *:UNICOS/mp:*:*)
+ echo craynv-cray-unicosmp${UNAME_RELEASE} | sed -e 's/\.[^.]*$/.X/'
+ exit ;;
+ F30[01]:UNIX_System_V:*:* | F700:UNIX_System_V:*:*)
+ FUJITSU_PROC=`uname -m | tr 'ABCDEFGHIJKLMNOPQRSTUVWXYZ' 'abcdefghijklmnopqrstuvwxyz'`
+ FUJITSU_SYS=`uname -p | tr 'ABCDEFGHIJKLMNOPQRSTUVWXYZ' 'abcdefghijklmnopqrstuvwxyz' | sed -e 's/\///'`
+ FUJITSU_REL=`echo ${UNAME_RELEASE} | sed -e 's/ /_/'`
+ echo "${FUJITSU_PROC}-fujitsu-${FUJITSU_SYS}${FUJITSU_REL}"
+ exit ;;
+ 5000:UNIX_System_V:4.*:*)
+ FUJITSU_SYS=`uname -p | tr 'ABCDEFGHIJKLMNOPQRSTUVWXYZ' 'abcdefghijklmnopqrstuvwxyz' | sed -e 's/\///'`
+ FUJITSU_REL=`echo ${UNAME_RELEASE} | tr 'ABCDEFGHIJKLMNOPQRSTUVWXYZ' 'abcdefghijklmnopqrstuvwxyz' | sed -e 's/ /_/'`
+ echo "sparc-fujitsu-${FUJITSU_SYS}${FUJITSU_REL}"
+ exit ;;
+ i*86:BSD/386:*:* | i*86:BSD/OS:*:* | *:Ascend\ Embedded/OS:*:*)
+ echo ${UNAME_MACHINE}-pc-bsdi${UNAME_RELEASE}
+ exit ;;
+ sparc*:BSD/OS:*:*)
+ echo sparc-unknown-bsdi${UNAME_RELEASE}
+ exit ;;
+ *:BSD/OS:*:*)
+ echo ${UNAME_MACHINE}-unknown-bsdi${UNAME_RELEASE}
+ exit ;;
+ *:FreeBSD:*:*)
+ case ${UNAME_MACHINE} in
+ pc98)
+ echo i386-unknown-freebsd`echo ${UNAME_RELEASE}|sed -e 's/[-(].*//'` ;;
+ amd64)
+ echo x86_64-unknown-freebsd`echo ${UNAME_RELEASE}|sed -e 's/[-(].*//'` ;;
+ *)
+ echo ${UNAME_MACHINE}-unknown-freebsd`echo ${UNAME_RELEASE}|sed -e 's/[-(].*//'` ;;
+ esac
+ exit ;;
+ i*:CYGWIN*:*)
+ echo ${UNAME_MACHINE}-pc-cygwin
+ exit ;;
+ *:MINGW*:*)
+ echo ${UNAME_MACHINE}-pc-mingw32
+ exit ;;
+ i*:windows32*:*)
+ # uname -m includes "-pc" on this system.
+ echo ${UNAME_MACHINE}-mingw32
+ exit ;;
+ i*:PW*:*)
+ echo ${UNAME_MACHINE}-pc-pw32
+ exit ;;
+ *:Interix*:*)
+ case ${UNAME_MACHINE} in
+ x86)
+ echo i586-pc-interix${UNAME_RELEASE}
+ exit ;;
+ authenticamd | genuineintel | EM64T)
+ echo x86_64-unknown-interix${UNAME_RELEASE}
+ exit ;;
+ IA64)
+ echo ia64-unknown-interix${UNAME_RELEASE}
+ exit ;;
+ esac ;;
+ [345]86:Windows_95:* | [345]86:Windows_98:* | [345]86:Windows_NT:*)
+ echo i${UNAME_MACHINE}-pc-mks
+ exit ;;
+ 8664:Windows_NT:*)
+ echo x86_64-pc-mks
+ exit ;;
+ i*:Windows_NT*:* | Pentium*:Windows_NT*:*)
+ # How do we know it's Interix rather than the generic POSIX subsystem?
+ # It also conflicts with pre-2.0 versions of AT&T UWIN. Should we
+ # UNAME_MACHINE based on the output of uname instead of i386?
+ echo i586-pc-interix
+ exit ;;
+ i*:UWIN*:*)
+ echo ${UNAME_MACHINE}-pc-uwin
+ exit ;;
+ amd64:CYGWIN*:*:* | x86_64:CYGWIN*:*:*)
+ echo x86_64-unknown-cygwin
+ exit ;;
+ p*:CYGWIN*:*)
+ echo powerpcle-unknown-cygwin
+ exit ;;
+ prep*:SunOS:5.*:*)
+ echo powerpcle-unknown-solaris2`echo ${UNAME_RELEASE}|sed -e 's/[^.]*//'`
+ exit ;;
+ *:GNU:*:*)
+ # the GNU system
+ echo `echo ${UNAME_MACHINE}|sed -e 's,[-/].*$,,'`-unknown-gnu`echo ${UNAME_RELEASE}|sed -e 's,/.*$,,'`
+ exit ;;
+ *:GNU/*:*:*)
+ # other systems with GNU libc and userland
+ echo ${UNAME_MACHINE}-unknown-`echo ${UNAME_SYSTEM} | sed 's,^[^/]*/,,' | tr '[A-Z]' '[a-z]'``echo ${UNAME_RELEASE}|sed -e 's/[-(].*//'`-gnu
+ exit ;;
+ i*86:Minix:*:*)
+ echo ${UNAME_MACHINE}-pc-minix
+ exit ;;
+ alpha:Linux:*:*)
+ case `sed -n '/^cpu model/s/^.*: \(.*\)/\1/p' < /proc/cpuinfo` in
+ EV5) UNAME_MACHINE=alphaev5 ;;
+ EV56) UNAME_MACHINE=alphaev56 ;;
+ PCA56) UNAME_MACHINE=alphapca56 ;;
+ PCA57) UNAME_MACHINE=alphapca56 ;;
+ EV6) UNAME_MACHINE=alphaev6 ;;
+ EV67) UNAME_MACHINE=alphaev67 ;;
+ EV68*) UNAME_MACHINE=alphaev68 ;;
+ esac
+ objdump --private-headers /bin/sh | grep -q ld.so.1
+ if test "$?" = 0 ; then LIBC="libc1" ; else LIBC="" ; fi
+ echo ${UNAME_MACHINE}-unknown-linux-gnu${LIBC}
+ exit ;;
+ arm*:Linux:*:*)
+ eval $set_cc_for_build
+ if echo __ARM_EABI__ | $CC_FOR_BUILD -E - 2>/dev/null \
+ | grep -q __ARM_EABI__
+ then
+ echo ${UNAME_MACHINE}-unknown-linux-gnu
+ else
+ echo ${UNAME_MACHINE}-unknown-linux-gnueabi
+ fi
+ exit ;;
+ avr32*:Linux:*:*)
+ echo ${UNAME_MACHINE}-unknown-linux-gnu
+ exit ;;
+ cris:Linux:*:*)
+ echo cris-axis-linux-gnu
+ exit ;;
+ crisv32:Linux:*:*)
+ echo crisv32-axis-linux-gnu
+ exit ;;
+ frv:Linux:*:*)
+ echo frv-unknown-linux-gnu
+ exit ;;
+ i*86:Linux:*:*)
+ LIBC=gnu
+ eval $set_cc_for_build
+ sed 's/^ //' << EOF >$dummy.c
+ #ifdef __dietlibc__
+ LIBC=dietlibc
+ #endif
+EOF
+ eval `$CC_FOR_BUILD -E $dummy.c 2>/dev/null | grep '^LIBC'`
+ echo "${UNAME_MACHINE}-pc-linux-${LIBC}"
+ exit ;;
+ ia64:Linux:*:*)
+ echo ${UNAME_MACHINE}-unknown-linux-gnu
+ exit ;;
+ m32r*:Linux:*:*)
+ echo ${UNAME_MACHINE}-unknown-linux-gnu
+ exit ;;
+ m68*:Linux:*:*)
+ echo ${UNAME_MACHINE}-unknown-linux-gnu
+ exit ;;
+ mips:Linux:*:* | mips64:Linux:*:*)
+ eval $set_cc_for_build
+ sed 's/^ //' << EOF >$dummy.c
+ #undef CPU
+ #undef ${UNAME_MACHINE}
+ #undef ${UNAME_MACHINE}el
+ #if defined(__MIPSEL__) || defined(__MIPSEL) || defined(_MIPSEL) || defined(MIPSEL)
+ CPU=${UNAME_MACHINE}el
+ #else
+ #if defined(__MIPSEB__) || defined(__MIPSEB) || defined(_MIPSEB) || defined(MIPSEB)
+ CPU=${UNAME_MACHINE}
+ #else
+ CPU=
+ #endif
+ #endif
+EOF
+ eval `$CC_FOR_BUILD -E $dummy.c 2>/dev/null | grep '^CPU'`
+ test x"${CPU}" != x && { echo "${CPU}-unknown-linux-gnu"; exit; }
+ ;;
+ or32:Linux:*:*)
+ echo or32-unknown-linux-gnu
+ exit ;;
+ padre:Linux:*:*)
+ echo sparc-unknown-linux-gnu
+ exit ;;
+ parisc64:Linux:*:* | hppa64:Linux:*:*)
+ echo hppa64-unknown-linux-gnu
+ exit ;;
+ parisc:Linux:*:* | hppa:Linux:*:*)
+ # Look for CPU level
+ case `grep '^cpu[^a-z]*:' /proc/cpuinfo 2>/dev/null | cut -d' ' -f2` in
+ PA7*) echo hppa1.1-unknown-linux-gnu ;;
+ PA8*) echo hppa2.0-unknown-linux-gnu ;;
+ *) echo hppa-unknown-linux-gnu ;;
+ esac
+ exit ;;
+ ppc64:Linux:*:*)
+ echo powerpc64-unknown-linux-gnu
+ exit ;;
+ ppc:Linux:*:*)
+ echo powerpc-unknown-linux-gnu
+ exit ;;
+ s390:Linux:*:* | s390x:Linux:*:*)
+ echo ${UNAME_MACHINE}-ibm-linux
+ exit ;;
+ sh64*:Linux:*:*)
+ echo ${UNAME_MACHINE}-unknown-linux-gnu
+ exit ;;
+ sh*:Linux:*:*)
+ echo ${UNAME_MACHINE}-unknown-linux-gnu
+ exit ;;
+ sparc:Linux:*:* | sparc64:Linux:*:*)
+ echo ${UNAME_MACHINE}-unknown-linux-gnu
+ exit ;;
+ tile*:Linux:*:*)
+ echo ${UNAME_MACHINE}-tilera-linux-gnu
+ exit ;;
+ vax:Linux:*:*)
+ echo ${UNAME_MACHINE}-dec-linux-gnu
+ exit ;;
+ x86_64:Linux:*:*)
+ echo x86_64-unknown-linux-gnu
+ exit ;;
+ xtensa*:Linux:*:*)
+ echo ${UNAME_MACHINE}-unknown-linux-gnu
+ exit ;;
+ i*86:DYNIX/ptx:4*:*)
+ # ptx 4.0 does uname -s correctly, with DYNIX/ptx in there.
+ # earlier versions are messed up and put the nodename in both
+ # sysname and nodename.
+ echo i386-sequent-sysv4
+ exit ;;
+ i*86:UNIX_SV:4.2MP:2.*)
+ # Unixware is an offshoot of SVR4, but it has its own version
+ # number series starting with 2...
+ # I am not positive that other SVR4 systems won't match this,
+ # I just have to hope. -- rms.
+ # Use sysv4.2uw... so that sysv4* matches it.
+ echo ${UNAME_MACHINE}-pc-sysv4.2uw${UNAME_VERSION}
+ exit ;;
+ i*86:OS/2:*:*)
+ # If we were able to find `uname', then EMX Unix compatibility
+ # is probably installed.
+ echo ${UNAME_MACHINE}-pc-os2-emx
+ exit ;;
+ i*86:XTS-300:*:STOP)
+ echo ${UNAME_MACHINE}-unknown-stop
+ exit ;;
+ i*86:atheos:*:*)
+ echo ${UNAME_MACHINE}-unknown-atheos
+ exit ;;
+ i*86:syllable:*:*)
+ echo ${UNAME_MACHINE}-pc-syllable
+ exit ;;
+ i*86:LynxOS:2.*:* | i*86:LynxOS:3.[01]*:* | i*86:LynxOS:4.[02]*:*)
+ echo i386-unknown-lynxos${UNAME_RELEASE}
+ exit ;;
+ i*86:*DOS:*:*)
+ echo ${UNAME_MACHINE}-pc-msdosdjgpp
+ exit ;;
+ i*86:*:4.*:* | i*86:SYSTEM_V:4.*:*)
+ UNAME_REL=`echo ${UNAME_RELEASE} | sed 's/\/MP$//'`
+ if grep Novell /usr/include/link.h >/dev/null 2>/dev/null; then
+ echo ${UNAME_MACHINE}-univel-sysv${UNAME_REL}
+ else
+ echo ${UNAME_MACHINE}-pc-sysv${UNAME_REL}
+ fi
+ exit ;;
+ i*86:*:5:[678]*)
+ # UnixWare 7.x, OpenUNIX and OpenServer 6.
+ case `/bin/uname -X | grep "^Machine"` in
+ *486*) UNAME_MACHINE=i486 ;;
+ *Pentium) UNAME_MACHINE=i586 ;;
+ *Pent*|*Celeron) UNAME_MACHINE=i686 ;;
+ esac
+ echo ${UNAME_MACHINE}-unknown-sysv${UNAME_RELEASE}${UNAME_SYSTEM}${UNAME_VERSION}
+ exit ;;
+ i*86:*:3.2:*)
+ if test -f /usr/options/cb.name; then
+ UNAME_REL=`sed -n 's/.*Version //p' </usr/options/cb.name`
+ echo ${UNAME_MACHINE}-pc-isc$UNAME_REL
+ elif /bin/uname -X 2>/dev/null >/dev/null ; then
+ UNAME_REL=`(/bin/uname -X|grep Release|sed -e 's/.*= //')`
+ (/bin/uname -X|grep i80486 >/dev/null) && UNAME_MACHINE=i486
+ (/bin/uname -X|grep '^Machine.*Pentium' >/dev/null) \
+ && UNAME_MACHINE=i586
+ (/bin/uname -X|grep '^Machine.*Pent *II' >/dev/null) \
+ && UNAME_MACHINE=i686
+ (/bin/uname -X|grep '^Machine.*Pentium Pro' >/dev/null) \
+ && UNAME_MACHINE=i686
+ echo ${UNAME_MACHINE}-pc-sco$UNAME_REL
+ else
+ echo ${UNAME_MACHINE}-pc-sysv32
+ fi
+ exit ;;
+ pc:*:*:*)
+ # Left here for compatibility:
+ # uname -m prints for DJGPP always 'pc', but it prints nothing about
+ # the processor, so we play safe by assuming i586.
+ # Note: whatever this is, it MUST be the same as what config.sub
+ # prints for the "djgpp" host, or else GDB configury will decide that
+ # this is a cross-build.
+ echo i586-pc-msdosdjgpp
+ exit ;;
+ Intel:Mach:3*:*)
+ echo i386-pc-mach3
+ exit ;;
+ paragon:*:*:*)
+ echo i860-intel-osf1
+ exit ;;
+ i860:*:4.*:*) # i860-SVR4
+ if grep Stardent /usr/include/sys/uadmin.h >/dev/null 2>&1 ; then
+ echo i860-stardent-sysv${UNAME_RELEASE} # Stardent Vistra i860-SVR4
+ else # Add other i860-SVR4 vendors below as they are discovered.
+ echo i860-unknown-sysv${UNAME_RELEASE} # Unknown i860-SVR4
+ fi
+ exit ;;
+ mini*:CTIX:SYS*5:*)
+ # "miniframe"
+ echo m68010-convergent-sysv
+ exit ;;
+ mc68k:UNIX:SYSTEM5:3.51m)
+ echo m68k-convergent-sysv
+ exit ;;
+ M680?0:D-NIX:5.3:*)
+ echo m68k-diab-dnix
+ exit ;;
+ M68*:*:R3V[5678]*:*)
+ test -r /sysV68 && { echo 'm68k-motorola-sysv'; exit; } ;;
+ 3[345]??:*:4.0:3.0 | 3[34]??A:*:4.0:3.0 | 3[34]??,*:*:4.0:3.0 | 3[34]??/*:*:4.0:3.0 | 4400:*:4.0:3.0 | 4850:*:4.0:3.0 | SKA40:*:4.0:3.0 | SDS2:*:4.0:3.0 | SHG2:*:4.0:3.0 | S7501*:*:4.0:3.0)
+ OS_REL=''
+ test -r /etc/.relid \
+ && OS_REL=.`sed -n 's/[^ ]* [^ ]* \([0-9][0-9]\).*/\1/p' < /etc/.relid`
+ /bin/uname -p 2>/dev/null | grep 86 >/dev/null \
+ && { echo i486-ncr-sysv4.3${OS_REL}; exit; }
+ /bin/uname -p 2>/dev/null | /bin/grep entium >/dev/null \
+ && { echo i586-ncr-sysv4.3${OS_REL}; exit; } ;;
+ 3[34]??:*:4.0:* | 3[34]??,*:*:4.0:*)
+ /bin/uname -p 2>/dev/null | grep 86 >/dev/null \
+ && { echo i486-ncr-sysv4; exit; } ;;
+ NCR*:*:4.2:* | MPRAS*:*:4.2:*)
+ OS_REL='.3'
+ test -r /etc/.relid \
+ && OS_REL=.`sed -n 's/[^ ]* [^ ]* \([0-9][0-9]\).*/\1/p' < /etc/.relid`
+ /bin/uname -p 2>/dev/null | grep 86 >/dev/null \
+ && { echo i486-ncr-sysv4.3${OS_REL}; exit; }
+ /bin/uname -p 2>/dev/null | /bin/grep entium >/dev/null \
+ && { echo i586-ncr-sysv4.3${OS_REL}; exit; }
+ /bin/uname -p 2>/dev/null | /bin/grep pteron >/dev/null \
+ && { echo i586-ncr-sysv4.3${OS_REL}; exit; } ;;
+ m68*:LynxOS:2.*:* | m68*:LynxOS:3.0*:*)
+ echo m68k-unknown-lynxos${UNAME_RELEASE}
+ exit ;;
+ mc68030:UNIX_System_V:4.*:*)
+ echo m68k-atari-sysv4
+ exit ;;
+ TSUNAMI:LynxOS:2.*:*)
+ echo sparc-unknown-lynxos${UNAME_RELEASE}
+ exit ;;
+ rs6000:LynxOS:2.*:*)
+ echo rs6000-unknown-lynxos${UNAME_RELEASE}
+ exit ;;
+ PowerPC:LynxOS:2.*:* | PowerPC:LynxOS:3.[01]*:* | PowerPC:LynxOS:4.[02]*:*)
+ echo powerpc-unknown-lynxos${UNAME_RELEASE}
+ exit ;;
+ SM[BE]S:UNIX_SV:*:*)
+ echo mips-dde-sysv${UNAME_RELEASE}
+ exit ;;
+ RM*:ReliantUNIX-*:*:*)
+ echo mips-sni-sysv4
+ exit ;;
+ RM*:SINIX-*:*:*)
+ echo mips-sni-sysv4
+ exit ;;
+ *:SINIX-*:*:*)
+ if uname -p 2>/dev/null >/dev/null ; then
+ UNAME_MACHINE=`(uname -p) 2>/dev/null`
+ echo ${UNAME_MACHINE}-sni-sysv4
+ else
+ echo ns32k-sni-sysv
+ fi
+ exit ;;
+ PENTIUM:*:4.0*:*) # Unisys `ClearPath HMP IX 4000' SVR4/MP effort
+ # says <Richard.M.Bartel@ccMail.Census.GOV>
+ echo i586-unisys-sysv4
+ exit ;;
+ *:UNIX_System_V:4*:FTX*)
+ # From Gerald Hewes <hewes@openmarket.com>.
+ # How about differentiating between stratus architectures? -djm
+ echo hppa1.1-stratus-sysv4
+ exit ;;
+ *:*:*:FTX*)
+ # From seanf@swdc.stratus.com.
+ echo i860-stratus-sysv4
+ exit ;;
+ i*86:VOS:*:*)
+ # From Paul.Green@stratus.com.
+ echo ${UNAME_MACHINE}-stratus-vos
+ exit ;;
+ *:VOS:*:*)
+ # From Paul.Green@stratus.com.
+ echo hppa1.1-stratus-vos
+ exit ;;
+ mc68*:A/UX:*:*)
+ echo m68k-apple-aux${UNAME_RELEASE}
+ exit ;;
+ news*:NEWS-OS:6*:*)
+ echo mips-sony-newsos6
+ exit ;;
+ R[34]000:*System_V*:*:* | R4000:UNIX_SYSV:*:* | R*000:UNIX_SV:*:*)
+ if [ -d /usr/nec ]; then
+ echo mips-nec-sysv${UNAME_RELEASE}
+ else
+ echo mips-unknown-sysv${UNAME_RELEASE}
+ fi
+ exit ;;
+ BeBox:BeOS:*:*) # BeOS running on hardware made by Be, PPC only.
+ echo powerpc-be-beos
+ exit ;;
+ BeMac:BeOS:*:*) # BeOS running on Mac or Mac clone, PPC only.
+ echo powerpc-apple-beos
+ exit ;;
+ BePC:BeOS:*:*) # BeOS running on Intel PC compatible.
+ echo i586-pc-beos
+ exit ;;
+ BePC:Haiku:*:*) # Haiku running on Intel PC compatible.
+ echo i586-pc-haiku
+ exit ;;
+ SX-4:SUPER-UX:*:*)
+ echo sx4-nec-superux${UNAME_RELEASE}
+ exit ;;
+ SX-5:SUPER-UX:*:*)
+ echo sx5-nec-superux${UNAME_RELEASE}
+ exit ;;
+ SX-6:SUPER-UX:*:*)
+ echo sx6-nec-superux${UNAME_RELEASE}
+ exit ;;
+ SX-7:SUPER-UX:*:*)
+ echo sx7-nec-superux${UNAME_RELEASE}
+ exit ;;
+ SX-8:SUPER-UX:*:*)
+ echo sx8-nec-superux${UNAME_RELEASE}
+ exit ;;
+ SX-8R:SUPER-UX:*:*)
+ echo sx8r-nec-superux${UNAME_RELEASE}
+ exit ;;
+ Power*:Rhapsody:*:*)
+ echo powerpc-apple-rhapsody${UNAME_RELEASE}
+ exit ;;
+ *:Rhapsody:*:*)
+ echo ${UNAME_MACHINE}-apple-rhapsody${UNAME_RELEASE}
+ exit ;;
+ *:Darwin:*:*)
+ UNAME_PROCESSOR=`uname -p` || UNAME_PROCESSOR=unknown
+ case $UNAME_PROCESSOR in
+ i386)
+ eval $set_cc_for_build
+ if [ "$CC_FOR_BUILD" != 'no_compiler_found' ]; then
+ if (echo '#ifdef __LP64__'; echo IS_64BIT_ARCH; echo '#endif') | \
+ (CCOPTS= $CC_FOR_BUILD -E - 2>/dev/null) | \
+ grep IS_64BIT_ARCH >/dev/null
+ then
+ UNAME_PROCESSOR="x86_64"
+ fi
+ fi ;;
+ unknown) UNAME_PROCESSOR=powerpc ;;
+ esac
+ echo ${UNAME_PROCESSOR}-apple-darwin${UNAME_RELEASE}
+ exit ;;
+ *:procnto*:*:* | *:QNX:[0123456789]*:*)
+ UNAME_PROCESSOR=`uname -p`
+ if test "$UNAME_PROCESSOR" = "x86"; then
+ UNAME_PROCESSOR=i386
+ UNAME_MACHINE=pc
+ fi
+ echo ${UNAME_PROCESSOR}-${UNAME_MACHINE}-nto-qnx${UNAME_RELEASE}
+ exit ;;
+ *:QNX:*:4*)
+ echo i386-pc-qnx
+ exit ;;
+ NSE-?:NONSTOP_KERNEL:*:*)
+ echo nse-tandem-nsk${UNAME_RELEASE}
+ exit ;;
+ NSR-?:NONSTOP_KERNEL:*:*)
+ echo nsr-tandem-nsk${UNAME_RELEASE}
+ exit ;;
+ *:NonStop-UX:*:*)
+ echo mips-compaq-nonstopux
+ exit ;;
+ BS2000:POSIX*:*:*)
+ echo bs2000-siemens-sysv
+ exit ;;
+ DS/*:UNIX_System_V:*:*)
+ echo ${UNAME_MACHINE}-${UNAME_SYSTEM}-${UNAME_RELEASE}
+ exit ;;
+ *:Plan9:*:*)
+ # "uname -m" is not consistent, so use $cputype instead. 386
+ # is converted to i386 for consistency with other x86
+ # operating systems.
+ if test "$cputype" = "386"; then
+ UNAME_MACHINE=i386
+ else
+ UNAME_MACHINE="$cputype"
+ fi
+ echo ${UNAME_MACHINE}-unknown-plan9
+ exit ;;
+ *:TOPS-10:*:*)
+ echo pdp10-unknown-tops10
+ exit ;;
+ *:TENEX:*:*)
+ echo pdp10-unknown-tenex
+ exit ;;
+ KS10:TOPS-20:*:* | KL10:TOPS-20:*:* | TYPE4:TOPS-20:*:*)
+ echo pdp10-dec-tops20
+ exit ;;
+ XKL-1:TOPS-20:*:* | TYPE5:TOPS-20:*:*)
+ echo pdp10-xkl-tops20
+ exit ;;
+ *:TOPS-20:*:*)
+ echo pdp10-unknown-tops20
+ exit ;;
+ *:ITS:*:*)
+ echo pdp10-unknown-its
+ exit ;;
+ SEI:*:*:SEIUX)
+ echo mips-sei-seiux${UNAME_RELEASE}
+ exit ;;
+ *:DragonFly:*:*)
+ echo ${UNAME_MACHINE}-unknown-dragonfly`echo ${UNAME_RELEASE}|sed -e 's/[-(].*//'`
+ exit ;;
+ *:*VMS:*:*)
+ UNAME_MACHINE=`(uname -p) 2>/dev/null`
+ case "${UNAME_MACHINE}" in
+ A*) echo alpha-dec-vms ; exit ;;
+ I*) echo ia64-dec-vms ; exit ;;
+ V*) echo vax-dec-vms ; exit ;;
+ esac ;;
+ *:XENIX:*:SysV)
+ echo i386-pc-xenix
+ exit ;;
+ i*86:skyos:*:*)
+ echo ${UNAME_MACHINE}-pc-skyos`echo ${UNAME_RELEASE}` | sed -e 's/ .*$//'
+ exit ;;
+ i*86:rdos:*:*)
+ echo ${UNAME_MACHINE}-pc-rdos
+ exit ;;
+ i*86:AROS:*:*)
+ echo ${UNAME_MACHINE}-pc-aros
+ exit ;;
+esac
+
+#echo '(No uname command or uname output not recognized.)' 1>&2
+#echo "${UNAME_MACHINE}:${UNAME_SYSTEM}:${UNAME_RELEASE}:${UNAME_VERSION}" 1>&2
+
+eval $set_cc_for_build
+cat >$dummy.c <<EOF
+#ifdef _SEQUENT_
+# include <sys/types.h>
+# include <sys/utsname.h>
+#endif
+main ()
+{
+#if defined (sony)
+#if defined (MIPSEB)
+ /* BFD wants "bsd" instead of "newsos". Perhaps BFD should be changed,
+ I don't know.... */
+ printf ("mips-sony-bsd\n"); exit (0);
+#else
+#include <sys/param.h>
+ printf ("m68k-sony-newsos%s\n",
+#ifdef NEWSOS4
+ "4"
+#else
+ ""
+#endif
+ ); exit (0);
+#endif
+#endif
+
+#if defined (__arm) && defined (__acorn) && defined (__unix)
+ printf ("arm-acorn-riscix\n"); exit (0);
+#endif
+
+#if defined (hp300) && !defined (hpux)
+ printf ("m68k-hp-bsd\n"); exit (0);
+#endif
+
+#if defined (NeXT)
+#if !defined (__ARCHITECTURE__)
+#define __ARCHITECTURE__ "m68k"
+#endif
+ int version;
+ version=`(hostinfo | sed -n 's/.*NeXT Mach \([0-9]*\).*/\1/p') 2>/dev/null`;
+ if (version < 4)
+ printf ("%s-next-nextstep%d\n", __ARCHITECTURE__, version);
+ else
+ printf ("%s-next-openstep%d\n", __ARCHITECTURE__, version);
+ exit (0);
+#endif
+
+#if defined (MULTIMAX) || defined (n16)
+#if defined (UMAXV)
+ printf ("ns32k-encore-sysv\n"); exit (0);
+#else
+#if defined (CMU)
+ printf ("ns32k-encore-mach\n"); exit (0);
+#else
+ printf ("ns32k-encore-bsd\n"); exit (0);
+#endif
+#endif
+#endif
+
+#if defined (__386BSD__)
+ printf ("i386-pc-bsd\n"); exit (0);
+#endif
+
+#if defined (sequent)
+#if defined (i386)
+ printf ("i386-sequent-dynix\n"); exit (0);
+#endif
+#if defined (ns32000)
+ printf ("ns32k-sequent-dynix\n"); exit (0);
+#endif
+#endif
+
+#if defined (_SEQUENT_)
+ struct utsname un;
+
+ uname(&un);
+
+ if (strncmp(un.version, "V2", 2) == 0) {
+ printf ("i386-sequent-ptx2\n"); exit (0);
+ }
+ if (strncmp(un.version, "V1", 2) == 0) { /* XXX is V1 correct? */
+ printf ("i386-sequent-ptx1\n"); exit (0);
+ }
+ printf ("i386-sequent-ptx\n"); exit (0);
+
+#endif
+
+#if defined (vax)
+# if !defined (ultrix)
+# include <sys/param.h>
+# if defined (BSD)
+# if BSD == 43
+ printf ("vax-dec-bsd4.3\n"); exit (0);
+# else
+# if BSD == 199006
+ printf ("vax-dec-bsd4.3reno\n"); exit (0);
+# else
+ printf ("vax-dec-bsd\n"); exit (0);
+# endif
+# endif
+# else
+ printf ("vax-dec-bsd\n"); exit (0);
+# endif
+# else
+ printf ("vax-dec-ultrix\n"); exit (0);
+# endif
+#endif
+
+#if defined (alliant) && defined (i860)
+ printf ("i860-alliant-bsd\n"); exit (0);
+#endif
+
+ exit (1);
+}
+EOF
+
+$CC_FOR_BUILD -o $dummy $dummy.c 2>/dev/null && SYSTEM_NAME=`$dummy` &&
+ { echo "$SYSTEM_NAME"; exit; }
+
+# Apollos put the system type in the environment.
+
+test -d /usr/apollo && { echo ${ISP}-apollo-${SYSTYPE}; exit; }
+
+# Convex versions that predate uname can use getsysinfo(1)
+
+if [ -x /usr/convex/getsysinfo ]
+then
+ case `getsysinfo -f cpu_type` in
+ c1*)
+ echo c1-convex-bsd
+ exit ;;
+ c2*)
+ if getsysinfo -f scalar_acc
+ then echo c32-convex-bsd
+ else echo c2-convex-bsd
+ fi
+ exit ;;
+ c34*)
+ echo c34-convex-bsd
+ exit ;;
+ c38*)
+ echo c38-convex-bsd
+ exit ;;
+ c4*)
+ echo c4-convex-bsd
+ exit ;;
+ esac
+fi
+
+cat >&2 <<EOF
+$0: unable to guess system type
+
+This script, last modified $timestamp, has failed to recognize
+the operating system you are using. It is advised that you
+download the most up to date version of the config scripts from
+
+ http://git.savannah.gnu.org/gitweb/?p=config.git;a=blob_plain;f=config.guess;hb=HEAD
+and
+ http://git.savannah.gnu.org/gitweb/?p=config.git;a=blob_plain;f=config.sub;hb=HEAD
+
+If the version you run ($0) is already up to date, please
+send the following data and any information you think might be
+pertinent to <config-patches@gnu.org> in order to provide the needed
+information to handle your system.
+
+config.guess timestamp = $timestamp
+
+uname -m = `(uname -m) 2>/dev/null || echo unknown`
+uname -r = `(uname -r) 2>/dev/null || echo unknown`
+uname -s = `(uname -s) 2>/dev/null || echo unknown`
+uname -v = `(uname -v) 2>/dev/null || echo unknown`
+
+/usr/bin/uname -p = `(/usr/bin/uname -p) 2>/dev/null`
+/bin/uname -X = `(/bin/uname -X) 2>/dev/null`
+
+hostinfo = `(hostinfo) 2>/dev/null`
+/bin/universe = `(/bin/universe) 2>/dev/null`
+/usr/bin/arch -k = `(/usr/bin/arch -k) 2>/dev/null`
+/bin/arch = `(/bin/arch) 2>/dev/null`
+/usr/bin/oslevel = `(/usr/bin/oslevel) 2>/dev/null`
+/usr/convex/getsysinfo = `(/usr/convex/getsysinfo) 2>/dev/null`
+
+UNAME_MACHINE = ${UNAME_MACHINE}
+UNAME_RELEASE = ${UNAME_RELEASE}
+UNAME_SYSTEM = ${UNAME_SYSTEM}
+UNAME_VERSION = ${UNAME_VERSION}
+EOF
+
+exit 1
+
+# Local variables:
+# eval: (add-hook 'write-file-hooks 'time-stamp)
+# time-stamp-start: "timestamp='"
+# time-stamp-format: "%:y-%02m-%02d"
+# time-stamp-end: "'"
+# End:
diff --git a/build/config.sub b/build/config.sub
new file mode 100755
index 0000000..320e303
--- /dev/null
+++ b/build/config.sub
@@ -0,0 +1,1739 @@
+#! /bin/sh
+# Configuration validation subroutine script.
+# Copyright (C) 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
+# 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010
+# Free Software Foundation, Inc.
+
+timestamp='2010-09-11'
+
+# This file is (in principle) common to ALL GNU software.
+# The presence of a machine in this file suggests that SOME GNU software
+# can handle that machine. It does not imply ALL GNU software can.
+#
+# This file is free software; you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation; either version 2 of the License, or
+# (at your option) any later version.
+#
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with this program; if not, write to the Free Software
+# Foundation, Inc., 51 Franklin Street - Fifth Floor, Boston, MA
+# 02110-1301, USA.
+#
+# As a special exception to the GNU General Public License, if you
+# distribute this file as part of a program that contains a
+# configuration script generated by Autoconf, you may include it under
+# the same distribution terms that you use for the rest of that program.
+
+
+# Please send patches to <config-patches@gnu.org>. Submit a context
+# diff and a properly formatted GNU ChangeLog entry.
+#
+# Configuration subroutine to validate and canonicalize a configuration type.
+# Supply the specified configuration type as an argument.
+# If it is invalid, we print an error message on stderr and exit with code 1.
+# Otherwise, we print the canonical config type on stdout and succeed.
+
+# You can get the latest version of this script from:
+# http://git.savannah.gnu.org/gitweb/?p=config.git;a=blob_plain;f=config.sub;hb=HEAD
+
+# This file is supposed to be the same for all GNU packages
+# and recognize all the CPU types, system types and aliases
+# that are meaningful with *any* GNU software.
+# Each package is responsible for reporting which valid configurations
+# it does not support. The user should be able to distinguish
+# a failure to support a valid configuration from a meaningless
+# configuration.
+
+# The goal of this file is to map all the various variations of a given
+# machine specification into a single specification in the form:
+# CPU_TYPE-MANUFACTURER-OPERATING_SYSTEM
+# or in some cases, the newer four-part form:
+# CPU_TYPE-MANUFACTURER-KERNEL-OPERATING_SYSTEM
+# It is wrong to echo any other type of specification.
+
+me=`echo "$0" | sed -e 's,.*/,,'`
+
+usage="\
+Usage: $0 [OPTION] CPU-MFR-OPSYS
+ $0 [OPTION] ALIAS
+
+Canonicalize a configuration name.
+
+Operation modes:
+ -h, --help print this help, then exit
+ -t, --time-stamp print date of last modification, then exit
+ -v, --version print version number, then exit
+
+Report bugs and patches to <config-patches@gnu.org>."
+
+version="\
+GNU config.sub ($timestamp)
+
+Copyright (C) 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999, 2000,
+2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010 Free
+Software Foundation, Inc.
+
+This is free software; see the source for copying conditions. There is NO
+warranty; not even for MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE."
+
+help="
+Try \`$me --help' for more information."
+
+# Parse command line
+while test $# -gt 0 ; do
+ case $1 in
+ --time-stamp | --time* | -t )
+ echo "$timestamp" ; exit ;;
+ --version | -v )
+ echo "$version" ; exit ;;
+ --help | --h* | -h )
+ echo "$usage"; exit ;;
+ -- ) # Stop option processing
+ shift; break ;;
+ - ) # Use stdin as input.
+ break ;;
+ -* )
+ echo "$me: invalid option $1$help"
+ exit 1 ;;
+
+ *local*)
+ # First pass through any local machine types.
+ echo $1
+ exit ;;
+
+ * )
+ break ;;
+ esac
+done
+
+case $# in
+ 0) echo "$me: missing argument$help" >&2
+ exit 1;;
+ 1) ;;
+ *) echo "$me: too many arguments$help" >&2
+ exit 1;;
+esac
+
+# Separate what the user gave into CPU-COMPANY and OS or KERNEL-OS (if any).
+# Here we must recognize all the valid KERNEL-OS combinations.
+maybe_os=`echo $1 | sed 's/^\(.*\)-\([^-]*-[^-]*\)$/\2/'`
+case $maybe_os in
+ nto-qnx* | linux-gnu* | linux-android* | linux-dietlibc | linux-newlib* | \
+ linux-uclibc* | uclinux-uclibc* | uclinux-gnu* | kfreebsd*-gnu* | \
+ knetbsd*-gnu* | netbsd*-gnu* | \
+ kopensolaris*-gnu* | \
+ storm-chaos* | os2-emx* | rtmk-nova*)
+ os=-$maybe_os
+ basic_machine=`echo $1 | sed 's/^\(.*\)-\([^-]*-[^-]*\)$/\1/'`
+ ;;
+ *)
+ basic_machine=`echo $1 | sed 's/-[^-]*$//'`
+ if [ $basic_machine != $1 ]
+ then os=`echo $1 | sed 's/.*-/-/'`
+ else os=; fi
+ ;;
+esac
+
+### Let's recognize common machines as not being operating systems so
+### that things like config.sub decstation-3100 work. We also
+### recognize some manufacturers as not being operating systems, so we
+### can provide default operating systems below.
+case $os in
+ -sun*os*)
+ # Prevent following clause from handling this invalid input.
+ ;;
+ -dec* | -mips* | -sequent* | -encore* | -pc532* | -sgi* | -sony* | \
+ -att* | -7300* | -3300* | -delta* | -motorola* | -sun[234]* | \
+ -unicom* | -ibm* | -next | -hp | -isi* | -apollo | -altos* | \
+ -convergent* | -ncr* | -news | -32* | -3600* | -3100* | -hitachi* |\
+ -c[123]* | -convex* | -sun | -crds | -omron* | -dg | -ultra | -tti* | \
+ -harris | -dolphin | -highlevel | -gould | -cbm | -ns | -masscomp | \
+ -apple | -axis | -knuth | -cray | -microblaze)
+ os=
+ basic_machine=$1
+ ;;
+ -bluegene*)
+ os=-cnk
+ ;;
+ -sim | -cisco | -oki | -wec | -winbond)
+ os=
+ basic_machine=$1
+ ;;
+ -scout)
+ ;;
+ -wrs)
+ os=-vxworks
+ basic_machine=$1
+ ;;
+ -chorusos*)
+ os=-chorusos
+ basic_machine=$1
+ ;;
+ -chorusrdb)
+ os=-chorusrdb
+ basic_machine=$1
+ ;;
+ -hiux*)
+ os=-hiuxwe2
+ ;;
+ -sco6)
+ os=-sco5v6
+ basic_machine=`echo $1 | sed -e 's/86-.*/86-pc/'`
+ ;;
+ -sco5)
+ os=-sco3.2v5
+ basic_machine=`echo $1 | sed -e 's/86-.*/86-pc/'`
+ ;;
+ -sco4)
+ os=-sco3.2v4
+ basic_machine=`echo $1 | sed -e 's/86-.*/86-pc/'`
+ ;;
+ -sco3.2.[4-9]*)
+ os=`echo $os | sed -e 's/sco3.2./sco3.2v/'`
+ basic_machine=`echo $1 | sed -e 's/86-.*/86-pc/'`
+ ;;
+ -sco3.2v[4-9]*)
+ # Don't forget version if it is 3.2v4 or newer.
+ basic_machine=`echo $1 | sed -e 's/86-.*/86-pc/'`
+ ;;
+ -sco5v6*)
+ # Don't forget version if it is 3.2v4 or newer.
+ basic_machine=`echo $1 | sed -e 's/86-.*/86-pc/'`
+ ;;
+ -sco*)
+ os=-sco3.2v2
+ basic_machine=`echo $1 | sed -e 's/86-.*/86-pc/'`
+ ;;
+ -udk*)
+ basic_machine=`echo $1 | sed -e 's/86-.*/86-pc/'`
+ ;;
+ -isc)
+ os=-isc2.2
+ basic_machine=`echo $1 | sed -e 's/86-.*/86-pc/'`
+ ;;
+ -clix*)
+ basic_machine=clipper-intergraph
+ ;;
+ -isc*)
+ basic_machine=`echo $1 | sed -e 's/86-.*/86-pc/'`
+ ;;
+ -lynx*)
+ os=-lynxos
+ ;;
+ -ptx*)
+ basic_machine=`echo $1 | sed -e 's/86-.*/86-sequent/'`
+ ;;
+ -windowsnt*)
+ os=`echo $os | sed -e 's/windowsnt/winnt/'`
+ ;;
+ -psos*)
+ os=-psos
+ ;;
+ -mint | -mint[0-9]*)
+ basic_machine=m68k-atari
+ os=-mint
+ ;;
+esac
+
+# Decode aliases for certain CPU-COMPANY combinations.
+case $basic_machine in
+ # Recognize the basic CPU types without company name.
+ # Some are omitted here because they have special meanings below.
+ 1750a | 580 \
+ | a29k \
+ | alpha | alphaev[4-8] | alphaev56 | alphaev6[78] | alphapca5[67] \
+ | alpha64 | alpha64ev[4-8] | alpha64ev56 | alpha64ev6[78] | alpha64pca5[67] \
+ | am33_2.0 \
+ | arc | arm | arm[bl]e | arme[lb] | armv[2345] | armv[345][lb] | avr | avr32 \
+ | bfin \
+ | c4x | clipper \
+ | d10v | d30v | dlx | dsp16xx \
+ | fido | fr30 | frv \
+ | h8300 | h8500 | hppa | hppa1.[01] | hppa2.0 | hppa2.0[nw] | hppa64 \
+ | i370 | i860 | i960 | ia64 \
+ | ip2k | iq2000 \
+ | lm32 \
+ | m32c | m32r | m32rle | m68000 | m68k | m88k \
+ | maxq | mb | microblaze | mcore | mep | metag \
+ | mips | mipsbe | mipseb | mipsel | mipsle \
+ | mips16 \
+ | mips64 | mips64el \
+ | mips64octeon | mips64octeonel \
+ | mips64orion | mips64orionel \
+ | mips64r5900 | mips64r5900el \
+ | mips64vr | mips64vrel \
+ | mips64vr4100 | mips64vr4100el \
+ | mips64vr4300 | mips64vr4300el \
+ | mips64vr5000 | mips64vr5000el \
+ | mips64vr5900 | mips64vr5900el \
+ | mipsisa32 | mipsisa32el \
+ | mipsisa32r2 | mipsisa32r2el \
+ | mipsisa64 | mipsisa64el \
+ | mipsisa64r2 | mipsisa64r2el \
+ | mipsisa64sb1 | mipsisa64sb1el \
+ | mipsisa64sr71k | mipsisa64sr71kel \
+ | mipstx39 | mipstx39el \
+ | mn10200 | mn10300 \
+ | moxie \
+ | mt \
+ | msp430 \
+ | nds32 | nds32le | nds32be \
+ | nios | nios2 \
+ | ns16k | ns32k \
+ | or32 \
+ | pdp10 | pdp11 | pj | pjl \
+ | powerpc | powerpc64 | powerpc64le | powerpcle | ppcbe \
+ | pyramid \
+ | rx \
+ | score \
+ | sh | sh[1234] | sh[24]a | sh[24]aeb | sh[23]e | sh[34]eb | sheb | shbe | shle | sh[1234]le | sh3ele \
+ | sh64 | sh64le \
+ | sparc | sparc64 | sparc64b | sparc64v | sparc86x | sparclet | sparclite \
+ | sparcv8 | sparcv9 | sparcv9b | sparcv9v \
+ | spu | strongarm \
+ | tahoe | thumb | tic4x | tic54x | tic55x | tic6x | tic80 | tron \
+ | ubicom32 \
+ | v850 | v850e \
+ | we32k \
+ | x86 | xc16x | xscale | xscalee[bl] | xstormy16 | xtensa \
+ | z8k | z80)
+ basic_machine=$basic_machine-unknown
+ ;;
+ c54x)
+ basic_machine=tic54x-unknown
+ ;;
+ c55x)
+ basic_machine=tic55x-unknown
+ ;;
+ c6x)
+ basic_machine=tic6x-unknown
+ ;;
+ m6811 | m68hc11 | m6812 | m68hc12 | picochip)
+ # Motorola 68HC11/12.
+ basic_machine=$basic_machine-unknown
+ os=-none
+ ;;
+ m88110 | m680[12346]0 | m683?2 | m68360 | m5200 | v70 | w65 | z8k)
+ ;;
+ ms1)
+ basic_machine=mt-unknown
+ ;;
+
+ # We use `pc' rather than `unknown'
+ # because (1) that's what they normally are, and
+ # (2) the word "unknown" tends to confuse beginning users.
+ i*86 | x86_64)
+ basic_machine=$basic_machine-pc
+ ;;
+ # Object if more than one company name word.
+ *-*-*)
+ echo Invalid configuration \`$1\': machine \`$basic_machine\' not recognized 1>&2
+ exit 1
+ ;;
+ # Recognize the basic CPU types with company name.
+ 580-* \
+ | a29k-* \
+ | alpha-* | alphaev[4-8]-* | alphaev56-* | alphaev6[78]-* \
+ | alpha64-* | alpha64ev[4-8]-* | alpha64ev56-* | alpha64ev6[78]-* \
+ | alphapca5[67]-* | alpha64pca5[67]-* | arc-* \
+ | arm-* | armbe-* | armle-* | armeb-* | armv*-* \
+ | avr-* | avr32-* \
+ | bfin-* | bs2000-* \
+ | c[123]* | c30-* | [cjt]90-* | c4x-* \
+ | clipper-* | craynv-* | cydra-* \
+ | d10v-* | d30v-* | dlx-* \
+ | elxsi-* \
+ | f30[01]-* | f700-* | fido-* | fr30-* | frv-* | fx80-* \
+ | h8300-* | h8500-* \
+ | hppa-* | hppa1.[01]-* | hppa2.0-* | hppa2.0[nw]-* | hppa64-* \
+ | i*86-* | i860-* | i960-* | ia64-* \
+ | ip2k-* | iq2000-* \
+ | lm32-* \
+ | m32c-* | m32r-* | m32rle-* \
+ | m68000-* | m680[012346]0-* | m68360-* | m683?2-* | m68k-* \
+ | m88110-* | m88k-* | maxq-* | mcore-* | metag-* | microblaze-* \
+ | mips-* | mipsbe-* | mipseb-* | mipsel-* | mipsle-* \
+ | mips16-* \
+ | mips64-* | mips64el-* \
+ | mips64octeon-* | mips64octeonel-* \
+ | mips64orion-* | mips64orionel-* \
+ | mips64r5900-* | mips64r5900el-* \
+ | mips64vr-* | mips64vrel-* \
+ | mips64vr4100-* | mips64vr4100el-* \
+ | mips64vr4300-* | mips64vr4300el-* \
+ | mips64vr5000-* | mips64vr5000el-* \
+ | mips64vr5900-* | mips64vr5900el-* \
+ | mipsisa32-* | mipsisa32el-* \
+ | mipsisa32r2-* | mipsisa32r2el-* \
+ | mipsisa64-* | mipsisa64el-* \
+ | mipsisa64r2-* | mipsisa64r2el-* \
+ | mipsisa64sb1-* | mipsisa64sb1el-* \
+ | mipsisa64sr71k-* | mipsisa64sr71kel-* \
+ | mipstx39-* | mipstx39el-* \
+ | mmix-* \
+ | mt-* \
+ | msp430-* \
+ | nds32-* | nds32le-* | nds32be-* \
+ | nios-* | nios2-* \
+ | none-* | np1-* | ns16k-* | ns32k-* \
+ | orion-* \
+ | pdp10-* | pdp11-* | pj-* | pjl-* | pn-* | power-* \
+ | powerpc-* | powerpc64-* | powerpc64le-* | powerpcle-* | ppcbe-* \
+ | pyramid-* \
+ | romp-* | rs6000-* | rx-* \
+ | sh-* | sh[1234]-* | sh[24]a-* | sh[24]aeb-* | sh[23]e-* | sh[34]eb-* | sheb-* | shbe-* \
+ | shle-* | sh[1234]le-* | sh3ele-* | sh64-* | sh64le-* \
+ | sparc-* | sparc64-* | sparc64b-* | sparc64v-* | sparc86x-* | sparclet-* \
+ | sparclite-* \
+ | sparcv8-* | sparcv9-* | sparcv9b-* | sparcv9v-* | strongarm-* | sv1-* | sx?-* \
+ | tahoe-* | thumb-* \
+ | tic30-* | tic4x-* | tic54x-* | tic55x-* | tic6x-* | tic80-* \
+ | tile-* | tilegx-* \
+ | tron-* \
+ | ubicom32-* \
+ | v850-* | v850e-* | vax-* \
+ | we32k-* \
+ | x86-* | x86_64-* | xc16x-* | xps100-* | xscale-* | xscalee[bl]-* \
+ | xstormy16-* | xtensa*-* \
+ | ymp-* \
+ | z8k-* | z80-*)
+ ;;
+ # Recognize the basic CPU types without company name, with glob match.
+ xtensa*)
+ basic_machine=$basic_machine-unknown
+ ;;
+ # Recognize the various machine names and aliases which stand
+ # for a CPU type and a company and sometimes even an OS.
+ 386bsd)
+ basic_machine=i386-unknown
+ os=-bsd
+ ;;
+ 3b1 | 7300 | 7300-att | att-7300 | pc7300 | safari | unixpc)
+ basic_machine=m68000-att
+ ;;
+ 3b*)
+ basic_machine=we32k-att
+ ;;
+ a29khif)
+ basic_machine=a29k-amd
+ os=-udi
+ ;;
+ abacus)
+ basic_machine=abacus-unknown
+ ;;
+ adobe68k)
+ basic_machine=m68010-adobe
+ os=-scout
+ ;;
+ alliant | fx80)
+ basic_machine=fx80-alliant
+ ;;
+ altos | altos3068)
+ basic_machine=m68k-altos
+ ;;
+ am29k)
+ basic_machine=a29k-none
+ os=-bsd
+ ;;
+ amd64)
+ basic_machine=x86_64-pc
+ ;;
+ amd64-*)
+ basic_machine=x86_64-`echo $basic_machine | sed 's/^[^-]*-//'`
+ ;;
+ amdahl)
+ basic_machine=580-amdahl
+ os=-sysv
+ ;;
+ amiga | amiga-*)
+ basic_machine=m68k-unknown
+ ;;
+ amigaos | amigados)
+ basic_machine=m68k-unknown
+ os=-amigaos
+ ;;
+ amigaunix | amix)
+ basic_machine=m68k-unknown
+ os=-sysv4
+ ;;
+ apollo68)
+ basic_machine=m68k-apollo
+ os=-sysv
+ ;;
+ apollo68bsd)
+ basic_machine=m68k-apollo
+ os=-bsd
+ ;;
+ aros)
+ basic_machine=i386-pc
+ os=-aros
+ ;;
+ aux)
+ basic_machine=m68k-apple
+ os=-aux
+ ;;
+ balance)
+ basic_machine=ns32k-sequent
+ os=-dynix
+ ;;
+ blackfin)
+ basic_machine=bfin-unknown
+ os=-linux
+ ;;
+ blackfin-*)
+ basic_machine=bfin-`echo $basic_machine | sed 's/^[^-]*-//'`
+ os=-linux
+ ;;
+ bluegene*)
+ basic_machine=powerpc-ibm
+ os=-cnk
+ ;;
+ c54x-*)
+ basic_machine=tic54x-`echo $basic_machine | sed 's/^[^-]*-//'`
+ ;;
+ c55x-*)
+ basic_machine=tic55x-`echo $basic_machine | sed 's/^[^-]*-//'`
+ ;;
+ c6x-*)
+ basic_machine=tic6x-`echo $basic_machine | sed 's/^[^-]*-//'`
+ ;;
+ c90)
+ basic_machine=c90-cray
+ os=-unicos
+ ;;
+ cegcc)
+ basic_machine=arm-unknown
+ os=-cegcc
+ ;;
+ convex-c1)
+ basic_machine=c1-convex
+ os=-bsd
+ ;;
+ convex-c2)
+ basic_machine=c2-convex
+ os=-bsd
+ ;;
+ convex-c32)
+ basic_machine=c32-convex
+ os=-bsd
+ ;;
+ convex-c34)
+ basic_machine=c34-convex
+ os=-bsd
+ ;;
+ convex-c38)
+ basic_machine=c38-convex
+ os=-bsd
+ ;;
+ cray | j90)
+ basic_machine=j90-cray
+ os=-unicos
+ ;;
+ craynv)
+ basic_machine=craynv-cray
+ os=-unicosmp
+ ;;
+ cr16)
+ basic_machine=cr16-unknown
+ os=-elf
+ ;;
+ crds | unos)
+ basic_machine=m68k-crds
+ ;;
+ crisv32 | crisv32-* | etraxfs*)
+ basic_machine=crisv32-axis
+ ;;
+ cris | cris-* | etrax*)
+ basic_machine=cris-axis
+ ;;
+ crx)
+ basic_machine=crx-unknown
+ os=-elf
+ ;;
+ da30 | da30-*)
+ basic_machine=m68k-da30
+ ;;
+ decstation | decstation-3100 | pmax | pmax-* | pmin | dec3100 | decstatn)
+ basic_machine=mips-dec
+ ;;
+ decsystem10* | dec10*)
+ basic_machine=pdp10-dec
+ os=-tops10
+ ;;
+ decsystem20* | dec20*)
+ basic_machine=pdp10-dec
+ os=-tops20
+ ;;
+ delta | 3300 | motorola-3300 | motorola-delta \
+ | 3300-motorola | delta-motorola)
+ basic_machine=m68k-motorola
+ ;;
+ delta88)
+ basic_machine=m88k-motorola
+ os=-sysv3
+ ;;
+ dicos)
+ basic_machine=i686-pc
+ os=-dicos
+ ;;
+ djgpp)
+ basic_machine=i586-pc
+ os=-msdosdjgpp
+ ;;
+ dpx20 | dpx20-*)
+ basic_machine=rs6000-bull
+ os=-bosx
+ ;;
+ dpx2* | dpx2*-bull)
+ basic_machine=m68k-bull
+ os=-sysv3
+ ;;
+ ebmon29k)
+ basic_machine=a29k-amd
+ os=-ebmon
+ ;;
+ elxsi)
+ basic_machine=elxsi-elxsi
+ os=-bsd
+ ;;
+ encore | umax | mmax)
+ basic_machine=ns32k-encore
+ ;;
+ es1800 | OSE68k | ose68k | ose | OSE)
+ basic_machine=m68k-ericsson
+ os=-ose
+ ;;
+ fx2800)
+ basic_machine=i860-alliant
+ ;;
+ genix)
+ basic_machine=ns32k-ns
+ ;;
+ gmicro)
+ basic_machine=tron-gmicro
+ os=-sysv
+ ;;
+ go32)
+ basic_machine=i386-pc
+ os=-go32
+ ;;
+ h3050r* | hiux*)
+ basic_machine=hppa1.1-hitachi
+ os=-hiuxwe2
+ ;;
+ h8300hms)
+ basic_machine=h8300-hitachi
+ os=-hms
+ ;;
+ h8300xray)
+ basic_machine=h8300-hitachi
+ os=-xray
+ ;;
+ h8500hms)
+ basic_machine=h8500-hitachi
+ os=-hms
+ ;;
+ harris)
+ basic_machine=m88k-harris
+ os=-sysv3
+ ;;
+ hp300-*)
+ basic_machine=m68k-hp
+ ;;
+ hp300bsd)
+ basic_machine=m68k-hp
+ os=-bsd
+ ;;
+ hp300hpux)
+ basic_machine=m68k-hp
+ os=-hpux
+ ;;
+ hp3k9[0-9][0-9] | hp9[0-9][0-9])
+ basic_machine=hppa1.0-hp
+ ;;
+ hp9k2[0-9][0-9] | hp9k31[0-9])
+ basic_machine=m68000-hp
+ ;;
+ hp9k3[2-9][0-9])
+ basic_machine=m68k-hp
+ ;;
+ hp9k6[0-9][0-9] | hp6[0-9][0-9])
+ basic_machine=hppa1.0-hp
+ ;;
+ hp9k7[0-79][0-9] | hp7[0-79][0-9])
+ basic_machine=hppa1.1-hp
+ ;;
+ hp9k78[0-9] | hp78[0-9])
+ # FIXME: really hppa2.0-hp
+ basic_machine=hppa1.1-hp
+ ;;
+ hp9k8[67]1 | hp8[67]1 | hp9k80[24] | hp80[24] | hp9k8[78]9 | hp8[78]9 | hp9k893 | hp893)
+ # FIXME: really hppa2.0-hp
+ basic_machine=hppa1.1-hp
+ ;;
+ hp9k8[0-9][13679] | hp8[0-9][13679])
+ basic_machine=hppa1.1-hp
+ ;;
+ hp9k8[0-9][0-9] | hp8[0-9][0-9])
+ basic_machine=hppa1.0-hp
+ ;;
+ hppa-next)
+ os=-nextstep3
+ ;;
+ hppaosf)
+ basic_machine=hppa1.1-hp
+ os=-osf
+ ;;
+ hppro)
+ basic_machine=hppa1.1-hp
+ os=-proelf
+ ;;
+ i370-ibm* | ibm*)
+ basic_machine=i370-ibm
+ ;;
+# I'm not sure what "Sysv32" means. Should this be sysv3.2?
+ i*86v32)
+ basic_machine=`echo $1 | sed -e 's/86.*/86-pc/'`
+ os=-sysv32
+ ;;
+ i*86v4*)
+ basic_machine=`echo $1 | sed -e 's/86.*/86-pc/'`
+ os=-sysv4
+ ;;
+ i*86v)
+ basic_machine=`echo $1 | sed -e 's/86.*/86-pc/'`
+ os=-sysv
+ ;;
+ i*86sol2)
+ basic_machine=`echo $1 | sed -e 's/86.*/86-pc/'`
+ os=-solaris2
+ ;;
+ i386mach)
+ basic_machine=i386-mach
+ os=-mach
+ ;;
+ i386-vsta | vsta)
+ basic_machine=i386-unknown
+ os=-vsta
+ ;;
+ iris | iris4d)
+ basic_machine=mips-sgi
+ case $os in
+ -irix*)
+ ;;
+ *)
+ os=-irix4
+ ;;
+ esac
+ ;;
+ isi68 | isi)
+ basic_machine=m68k-isi
+ os=-sysv
+ ;;
+ m68knommu)
+ basic_machine=m68k-unknown
+ os=-linux
+ ;;
+ m68knommu-*)
+ basic_machine=m68k-`echo $basic_machine | sed 's/^[^-]*-//'`
+ os=-linux
+ ;;
+ m88k-omron*)
+ basic_machine=m88k-omron
+ ;;
+ magnum | m3230)
+ basic_machine=mips-mips
+ os=-sysv
+ ;;
+ merlin)
+ basic_machine=ns32k-utek
+ os=-sysv
+ ;;
+ microblaze)
+ basic_machine=microblaze-xilinx
+ ;;
+ mingw32)
+ basic_machine=i386-pc
+ os=-mingw32
+ ;;
+ mingw32ce)
+ basic_machine=arm-unknown
+ os=-mingw32ce
+ ;;
+ miniframe)
+ basic_machine=m68000-convergent
+ ;;
+ *mint | -mint[0-9]* | *MiNT | *MiNT[0-9]*)
+ basic_machine=m68k-atari
+ os=-mint
+ ;;
+ mips3*-*)
+ basic_machine=`echo $basic_machine | sed -e 's/mips3/mips64/'`
+ ;;
+ mips3*)
+ basic_machine=`echo $basic_machine | sed -e 's/mips3/mips64/'`-unknown
+ ;;
+ monitor)
+ basic_machine=m68k-rom68k
+ os=-coff
+ ;;
+ morphos)
+ basic_machine=powerpc-unknown
+ os=-morphos
+ ;;
+ msdos)
+ basic_machine=i386-pc
+ os=-msdos
+ ;;
+ ms1-*)
+ basic_machine=`echo $basic_machine | sed -e 's/ms1-/mt-/'`
+ ;;
+ mvs)
+ basic_machine=i370-ibm
+ os=-mvs
+ ;;
+ ncr3000)
+ basic_machine=i486-ncr
+ os=-sysv4
+ ;;
+ netbsd386)
+ basic_machine=i386-unknown
+ os=-netbsd
+ ;;
+ netwinder)
+ basic_machine=armv4l-rebel
+ os=-linux
+ ;;
+ news | news700 | news800 | news900)
+ basic_machine=m68k-sony
+ os=-newsos
+ ;;
+ news1000)
+ basic_machine=m68030-sony
+ os=-newsos
+ ;;
+ news-3600 | risc-news)
+ basic_machine=mips-sony
+ os=-newsos
+ ;;
+ necv70)
+ basic_machine=v70-nec
+ os=-sysv
+ ;;
+ next | m*-next )
+ basic_machine=m68k-next
+ case $os in
+ -nextstep* )
+ ;;
+ -ns2*)
+ os=-nextstep2
+ ;;
+ *)
+ os=-nextstep3
+ ;;
+ esac
+ ;;
+ nh3000)
+ basic_machine=m68k-harris
+ os=-cxux
+ ;;
+ nh[45]000)
+ basic_machine=m88k-harris
+ os=-cxux
+ ;;
+ nindy960)
+ basic_machine=i960-intel
+ os=-nindy
+ ;;
+ mon960)
+ basic_machine=i960-intel
+ os=-mon960
+ ;;
+ nonstopux)
+ basic_machine=mips-compaq
+ os=-nonstopux
+ ;;
+ np1)
+ basic_machine=np1-gould
+ ;;
+ neo-tandem)
+ basic_machine=neo-tandem
+ ;;
+ nse-tandem)
+ basic_machine=nse-tandem
+ ;;
+ nsr-tandem)
+ basic_machine=nsr-tandem
+ ;;
+ op50n-* | op60c-*)
+ basic_machine=hppa1.1-oki
+ os=-proelf
+ ;;
+ openrisc | openrisc-*)
+ basic_machine=or32-unknown
+ ;;
+ os400)
+ basic_machine=powerpc-ibm
+ os=-os400
+ ;;
+ OSE68000 | ose68000)
+ basic_machine=m68000-ericsson
+ os=-ose
+ ;;
+ os68k)
+ basic_machine=m68k-none
+ os=-os68k
+ ;;
+ pa-hitachi)
+ basic_machine=hppa1.1-hitachi
+ os=-hiuxwe2
+ ;;
+ paragon)
+ basic_machine=i860-intel
+ os=-osf
+ ;;
+ parisc)
+ basic_machine=hppa-unknown
+ os=-linux
+ ;;
+ parisc-*)
+ basic_machine=hppa-`echo $basic_machine | sed 's/^[^-]*-//'`
+ os=-linux
+ ;;
+ pbd)
+ basic_machine=sparc-tti
+ ;;
+ pbb)
+ basic_machine=m68k-tti
+ ;;
+ pc532 | pc532-*)
+ basic_machine=ns32k-pc532
+ ;;
+ pc98)
+ basic_machine=i386-pc
+ ;;
+ pc98-*)
+ basic_machine=i386-`echo $basic_machine | sed 's/^[^-]*-//'`
+ ;;
+ pentium | p5 | k5 | k6 | nexgen | viac3)
+ basic_machine=i586-pc
+ ;;
+ pentiumpro | p6 | 6x86 | athlon | athlon_*)
+ basic_machine=i686-pc
+ ;;
+ pentiumii | pentium2 | pentiumiii | pentium3)
+ basic_machine=i686-pc
+ ;;
+ pentium4)
+ basic_machine=i786-pc
+ ;;
+ pentium-* | p5-* | k5-* | k6-* | nexgen-* | viac3-*)
+ basic_machine=i586-`echo $basic_machine | sed 's/^[^-]*-//'`
+ ;;
+ pentiumpro-* | p6-* | 6x86-* | athlon-*)
+ basic_machine=i686-`echo $basic_machine | sed 's/^[^-]*-//'`
+ ;;
+ pentiumii-* | pentium2-* | pentiumiii-* | pentium3-*)
+ basic_machine=i686-`echo $basic_machine | sed 's/^[^-]*-//'`
+ ;;
+ pentium4-*)
+ basic_machine=i786-`echo $basic_machine | sed 's/^[^-]*-//'`
+ ;;
+ pn)
+ basic_machine=pn-gould
+ ;;
+ power) basic_machine=power-ibm
+ ;;
+ ppc) basic_machine=powerpc-unknown
+ ;;
+ ppc-*) basic_machine=powerpc-`echo $basic_machine | sed 's/^[^-]*-//'`
+ ;;
+ ppcle | powerpclittle | ppc-le | powerpc-little)
+ basic_machine=powerpcle-unknown
+ ;;
+ ppcle-* | powerpclittle-*)
+ basic_machine=powerpcle-`echo $basic_machine | sed 's/^[^-]*-//'`
+ ;;
+ ppc64) basic_machine=powerpc64-unknown
+ ;;
+ ppc64-*) basic_machine=powerpc64-`echo $basic_machine | sed 's/^[^-]*-//'`
+ ;;
+ ppc64le | powerpc64little | ppc64-le | powerpc64-little)
+ basic_machine=powerpc64le-unknown
+ ;;
+ ppc64le-* | powerpc64little-*)
+ basic_machine=powerpc64le-`echo $basic_machine | sed 's/^[^-]*-//'`
+ ;;
+ ps2)
+ basic_machine=i386-ibm
+ ;;
+ pw32)
+ basic_machine=i586-unknown
+ os=-pw32
+ ;;
+ rdos)
+ basic_machine=i386-pc
+ os=-rdos
+ ;;
+ rom68k)
+ basic_machine=m68k-rom68k
+ os=-coff
+ ;;
+ rm[46]00)
+ basic_machine=mips-siemens
+ ;;
+ rtpc | rtpc-*)
+ basic_machine=romp-ibm
+ ;;
+ s390 | s390-*)
+ basic_machine=s390-ibm
+ ;;
+ s390x | s390x-*)
+ basic_machine=s390x-ibm
+ ;;
+ sa29200)
+ basic_machine=a29k-amd
+ os=-udi
+ ;;
+ sb1)
+ basic_machine=mipsisa64sb1-unknown
+ ;;
+ sb1el)
+ basic_machine=mipsisa64sb1el-unknown
+ ;;
+ sde)
+ basic_machine=mipsisa32-sde
+ os=-elf
+ ;;
+ sei)
+ basic_machine=mips-sei
+ os=-seiux
+ ;;
+ sequent)
+ basic_machine=i386-sequent
+ ;;
+ sh)
+ basic_machine=sh-hitachi
+ os=-hms
+ ;;
+ sh5el)
+ basic_machine=sh5le-unknown
+ ;;
+ sh64)
+ basic_machine=sh64-unknown
+ ;;
+ sparclite-wrs | simso-wrs)
+ basic_machine=sparclite-wrs
+ os=-vxworks
+ ;;
+ sps7)
+ basic_machine=m68k-bull
+ os=-sysv2
+ ;;
+ spur)
+ basic_machine=spur-unknown
+ ;;
+ st2000)
+ basic_machine=m68k-tandem
+ ;;
+ stratus)
+ basic_machine=i860-stratus
+ os=-sysv4
+ ;;
+ sun2)
+ basic_machine=m68000-sun
+ ;;
+ sun2os3)
+ basic_machine=m68000-sun
+ os=-sunos3
+ ;;
+ sun2os4)
+ basic_machine=m68000-sun
+ os=-sunos4
+ ;;
+ sun3os3)
+ basic_machine=m68k-sun
+ os=-sunos3
+ ;;
+ sun3os4)
+ basic_machine=m68k-sun
+ os=-sunos4
+ ;;
+ sun4os3)
+ basic_machine=sparc-sun
+ os=-sunos3
+ ;;
+ sun4os4)
+ basic_machine=sparc-sun
+ os=-sunos4
+ ;;
+ sun4sol2)
+ basic_machine=sparc-sun
+ os=-solaris2
+ ;;
+ sun3 | sun3-*)
+ basic_machine=m68k-sun
+ ;;
+ sun4)
+ basic_machine=sparc-sun
+ ;;
+ sun386 | sun386i | roadrunner)
+ basic_machine=i386-sun
+ ;;
+ sv1)
+ basic_machine=sv1-cray
+ os=-unicos
+ ;;
+ symmetry)
+ basic_machine=i386-sequent
+ os=-dynix
+ ;;
+ t3e)
+ basic_machine=alphaev5-cray
+ os=-unicos
+ ;;
+ t90)
+ basic_machine=t90-cray
+ os=-unicos
+ ;;
+ # This must be matched before tile*.
+ tilegx*)
+ basic_machine=tilegx-unknown
+ os=-linux-gnu
+ ;;
+ tile*)
+ basic_machine=tile-unknown
+ os=-linux-gnu
+ ;;
+ tx39)
+ basic_machine=mipstx39-unknown
+ ;;
+ tx39el)
+ basic_machine=mipstx39el-unknown
+ ;;
+ toad1)
+ basic_machine=pdp10-xkl
+ os=-tops20
+ ;;
+ tower | tower-32)
+ basic_machine=m68k-ncr
+ ;;
+ tpf)
+ basic_machine=s390x-ibm
+ os=-tpf
+ ;;
+ udi29k)
+ basic_machine=a29k-amd
+ os=-udi
+ ;;
+ ultra3)
+ basic_machine=a29k-nyu
+ os=-sym1
+ ;;
+ v810 | necv810)
+ basic_machine=v810-nec
+ os=-none
+ ;;
+ vaxv)
+ basic_machine=vax-dec
+ os=-sysv
+ ;;
+ vms)
+ basic_machine=vax-dec
+ os=-vms
+ ;;
+ vpp*|vx|vx-*)
+ basic_machine=f301-fujitsu
+ ;;
+ vxworks960)
+ basic_machine=i960-wrs
+ os=-vxworks
+ ;;
+ vxworks68)
+ basic_machine=m68k-wrs
+ os=-vxworks
+ ;;
+ vxworks29k)
+ basic_machine=a29k-wrs
+ os=-vxworks
+ ;;
+ w65*)
+ basic_machine=w65-wdc
+ os=-none
+ ;;
+ w89k-*)
+ basic_machine=hppa1.1-winbond
+ os=-proelf
+ ;;
+ xbox)
+ basic_machine=i686-pc
+ os=-mingw32
+ ;;
+ xps | xps100)
+ basic_machine=xps100-honeywell
+ ;;
+ ymp)
+ basic_machine=ymp-cray
+ os=-unicos
+ ;;
+ z8k-*-coff)
+ basic_machine=z8k-unknown
+ os=-sim
+ ;;
+ z80-*-coff)
+ basic_machine=z80-unknown
+ os=-sim
+ ;;
+ none)
+ basic_machine=none-none
+ os=-none
+ ;;
+
+# Here we handle the default manufacturer of certain CPU types. It is in
+# some cases the only manufacturer, in others, it is the most popular.
+ w89k)
+ basic_machine=hppa1.1-winbond
+ ;;
+ op50n)
+ basic_machine=hppa1.1-oki
+ ;;
+ op60c)
+ basic_machine=hppa1.1-oki
+ ;;
+ romp)
+ basic_machine=romp-ibm
+ ;;
+ mmix)
+ basic_machine=mmix-knuth
+ ;;
+ rs6000)
+ basic_machine=rs6000-ibm
+ ;;
+ vax)
+ basic_machine=vax-dec
+ ;;
+ pdp10)
+ # there are many clones, so DEC is not a safe bet
+ basic_machine=pdp10-unknown
+ ;;
+ pdp11)
+ basic_machine=pdp11-dec
+ ;;
+ we32k)
+ basic_machine=we32k-att
+ ;;
+ sh[1234] | sh[24]a | sh[24]aeb | sh[34]eb | sh[1234]le | sh[23]ele)
+ basic_machine=sh-unknown
+ ;;
+ sparc | sparcv8 | sparcv9 | sparcv9b | sparcv9v)
+ basic_machine=sparc-sun
+ ;;
+ cydra)
+ basic_machine=cydra-cydrome
+ ;;
+ orion)
+ basic_machine=orion-highlevel
+ ;;
+ orion105)
+ basic_machine=clipper-highlevel
+ ;;
+ mac | mpw | mac-mpw)
+ basic_machine=m68k-apple
+ ;;
+ pmac | pmac-mpw)
+ basic_machine=powerpc-apple
+ ;;
+ *-unknown)
+ # Make sure to match an already-canonicalized machine name.
+ ;;
+ *)
+ echo Invalid configuration \`$1\': machine \`$basic_machine\' not recognized 1>&2
+ exit 1
+ ;;
+esac
+
+# Here we canonicalize certain aliases for manufacturers.
+case $basic_machine in
+ *-digital*)
+ basic_machine=`echo $basic_machine | sed 's/digital.*/dec/'`
+ ;;
+ *-commodore*)
+ basic_machine=`echo $basic_machine | sed 's/commodore.*/cbm/'`
+ ;;
+ *)
+ ;;
+esac
+
+# Decode manufacturer-specific aliases for certain operating systems.
+
+if [ x"$os" != x"" ]
+then
+case $os in
+ # First match some system type aliases
+ # that might get confused with valid system types.
+ # -solaris* is a basic system type, with this one exception.
+ -auroraux)
+ os=-auroraux
+ ;;
+ -solaris1 | -solaris1.*)
+ os=`echo $os | sed -e 's|solaris1|sunos4|'`
+ ;;
+ -solaris)
+ os=-solaris2
+ ;;
+ -svr4*)
+ os=-sysv4
+ ;;
+ -unixware*)
+ os=-sysv4.2uw
+ ;;
+ -gnu/linux*)
+ os=`echo $os | sed -e 's|gnu/linux|linux-gnu|'`
+ ;;
+ # First accept the basic system types.
+ # The portable systems comes first.
+ # Each alternative MUST END IN A *, to match a version number.
+ # -sysv* is not here because it comes later, after sysvr4.
+ -gnu* | -bsd* | -mach* | -minix* | -genix* | -ultrix* | -irix* \
+ | -*vms* | -sco* | -esix* | -isc* | -aix* | -cnk* | -sunos | -sunos[34]*\
+ | -hpux* | -unos* | -osf* | -luna* | -dgux* | -auroraux* | -solaris* \
+ | -sym* | -kopensolaris* \
+ | -amigaos* | -amigados* | -msdos* | -newsos* | -unicos* | -aof* \
+ | -aos* | -aros* \
+ | -nindy* | -vxsim* | -vxworks* | -ebmon* | -hms* | -mvs* \
+ | -clix* | -riscos* | -uniplus* | -iris* | -rtu* | -xenix* \
+ | -hiux* | -386bsd* | -knetbsd* | -mirbsd* | -netbsd* \
+ | -openbsd* | -solidbsd* \
+ | -ekkobsd* | -kfreebsd* | -freebsd* | -riscix* | -lynxos* \
+ | -bosx* | -nextstep* | -cxux* | -aout* | -elf* | -oabi* \
+ | -ptx* | -coff* | -ecoff* | -winnt* | -domain* | -vsta* \
+ | -udi* | -eabi* | -lites* | -ieee* | -go32* | -aux* \
+ | -chorusos* | -chorusrdb* | -cegcc* \
+ | -cygwin* | -pe* | -psos* | -moss* | -proelf* | -rtems* \
+ | -mingw32* | -linux-gnu* | -linux-android* \
+ | -linux-newlib* | -linux-uclibc* \
+ | -uxpv* | -beos* | -mpeix* | -udk* \
+ | -interix* | -uwin* | -mks* | -rhapsody* | -darwin* | -opened* \
+ | -openstep* | -oskit* | -conix* | -pw32* | -nonstopux* \
+ | -storm-chaos* | -tops10* | -tenex* | -tops20* | -its* \
+ | -os2* | -vos* | -palmos* | -uclinux* | -nucleus* \
+ | -morphos* | -superux* | -rtmk* | -rtmk-nova* | -windiss* \
+ | -powermax* | -dnix* | -nx6 | -nx7 | -sei* | -dragonfly* \
+ | -skyos* | -haiku* | -rdos* | -toppers* | -drops* | -es*)
+ # Remember, each alternative MUST END IN *, to match a version number.
+ ;;
+ -qnx*)
+ case $basic_machine in
+ x86-* | i*86-*)
+ ;;
+ *)
+ os=-nto$os
+ ;;
+ esac
+ ;;
+ -nto-qnx*)
+ ;;
+ -nto*)
+ os=`echo $os | sed -e 's|nto|nto-qnx|'`
+ ;;
+ -sim | -es1800* | -hms* | -xray | -os68k* | -none* | -v88r* \
+ | -windows* | -osx | -abug | -netware* | -os9* | -beos* | -haiku* \
+ | -macos* | -mpw* | -magic* | -mmixware* | -mon960* | -lnews*)
+ ;;
+ -mac*)
+ os=`echo $os | sed -e 's|mac|macos|'`
+ ;;
+ -linux-dietlibc)
+ os=-linux-dietlibc
+ ;;
+ -linux*)
+ os=`echo $os | sed -e 's|linux|linux-gnu|'`
+ ;;
+ -sunos5*)
+ os=`echo $os | sed -e 's|sunos5|solaris2|'`
+ ;;
+ -sunos6*)
+ os=`echo $os | sed -e 's|sunos6|solaris3|'`
+ ;;
+ -opened*)
+ os=-openedition
+ ;;
+ -os400*)
+ os=-os400
+ ;;
+ -wince*)
+ os=-wince
+ ;;
+ -osfrose*)
+ os=-osfrose
+ ;;
+ -osf*)
+ os=-osf
+ ;;
+ -utek*)
+ os=-bsd
+ ;;
+ -dynix*)
+ os=-bsd
+ ;;
+ -acis*)
+ os=-aos
+ ;;
+ -atheos*)
+ os=-atheos
+ ;;
+ -syllable*)
+ os=-syllable
+ ;;
+ -386bsd)
+ os=-bsd
+ ;;
+ -ctix* | -uts*)
+ os=-sysv
+ ;;
+ -nova*)
+ os=-rtmk-nova
+ ;;
+ -ns2 )
+ os=-nextstep2
+ ;;
+ -nsk*)
+ os=-nsk
+ ;;
+ # Preserve the version number of sinix5.
+ -sinix5.*)
+ os=`echo $os | sed -e 's|sinix|sysv|'`
+ ;;
+ -sinix*)
+ os=-sysv4
+ ;;
+ -tpf*)
+ os=-tpf
+ ;;
+ -triton*)
+ os=-sysv3
+ ;;
+ -oss*)
+ os=-sysv3
+ ;;
+ -svr4)
+ os=-sysv4
+ ;;
+ -svr3)
+ os=-sysv3
+ ;;
+ -sysvr4)
+ os=-sysv4
+ ;;
+ # This must come after -sysvr4.
+ -sysv*)
+ ;;
+ -ose*)
+ os=-ose
+ ;;
+ -es1800*)
+ os=-ose
+ ;;
+ -xenix)
+ os=-xenix
+ ;;
+ -*mint | -mint[0-9]* | -*MiNT | -MiNT[0-9]*)
+ os=-mint
+ ;;
+ -aros*)
+ os=-aros
+ ;;
+ -kaos*)
+ os=-kaos
+ ;;
+ -zvmoe)
+ os=-zvmoe
+ ;;
+ -dicos*)
+ os=-dicos
+ ;;
+ -nacl*)
+ ;;
+ -none)
+ ;;
+ *)
+ # Get rid of the `-' at the beginning of $os.
+ os=`echo $os | sed 's/[^-]*-//'`
+ echo Invalid configuration \`$1\': system \`$os\' not recognized 1>&2
+ exit 1
+ ;;
+esac
+else
+
+# Here we handle the default operating systems that come with various machines.
+# The value should be what the vendor currently ships out the door with their
+# machine or put another way, the most popular os provided with the machine.
+
+# Note that if you're going to try to match "-MANUFACTURER" here (say,
+# "-sun"), then you have to tell the case statement up towards the top
+# that MANUFACTURER isn't an operating system. Otherwise, code above
+# will signal an error saying that MANUFACTURER isn't an operating
+# system, and we'll never get to this point.
+
+case $basic_machine in
+ score-*)
+ os=-elf
+ ;;
+ spu-*)
+ os=-elf
+ ;;
+ *-acorn)
+ os=-riscix1.2
+ ;;
+ arm*-rebel)
+ os=-linux
+ ;;
+ arm*-semi)
+ os=-aout
+ ;;
+ c4x-* | tic4x-*)
+ os=-coff
+ ;;
+ tic54x-*)
+ os=-coff
+ ;;
+ tic55x-*)
+ os=-coff
+ ;;
+ tic6x-*)
+ os=-coff
+ ;;
+ # This must come before the *-dec entry.
+ pdp10-*)
+ os=-tops20
+ ;;
+ pdp11-*)
+ os=-none
+ ;;
+ *-dec | vax-*)
+ os=-ultrix4.2
+ ;;
+ m68*-apollo)
+ os=-domain
+ ;;
+ i386-sun)
+ os=-sunos4.0.2
+ ;;
+ m68000-sun)
+ os=-sunos3
+ # This also exists in the configure program, but was not the
+ # default.
+ # os=-sunos4
+ ;;
+ m68*-cisco)
+ os=-aout
+ ;;
+ mep-*)
+ os=-elf
+ ;;
+ mips*-cisco)
+ os=-elf
+ ;;
+ mips*-*)
+ os=-elf
+ ;;
+ or32-*)
+ os=-coff
+ ;;
+ *-tti) # must be before sparc entry or we get the wrong os.
+ os=-sysv3
+ ;;
+ sparc-* | *-sun)
+ os=-sunos4.1.1
+ ;;
+ *-be)
+ os=-beos
+ ;;
+ *-haiku)
+ os=-haiku
+ ;;
+ *-ibm)
+ os=-aix
+ ;;
+ *-knuth)
+ os=-mmixware
+ ;;
+ *-wec)
+ os=-proelf
+ ;;
+ *-winbond)
+ os=-proelf
+ ;;
+ *-oki)
+ os=-proelf
+ ;;
+ *-hp)
+ os=-hpux
+ ;;
+ *-hitachi)
+ os=-hiux
+ ;;
+ i860-* | *-att | *-ncr | *-altos | *-motorola | *-convergent)
+ os=-sysv
+ ;;
+ *-cbm)
+ os=-amigaos
+ ;;
+ *-dg)
+ os=-dgux
+ ;;
+ *-dolphin)
+ os=-sysv3
+ ;;
+ m68k-ccur)
+ os=-rtu
+ ;;
+ m88k-omron*)
+ os=-luna
+ ;;
+ *-next )
+ os=-nextstep
+ ;;
+ *-sequent)
+ os=-ptx
+ ;;
+ *-crds)
+ os=-unos
+ ;;
+ *-ns)
+ os=-genix
+ ;;
+ i370-*)
+ os=-mvs
+ ;;
+ *-next)
+ os=-nextstep3
+ ;;
+ *-gould)
+ os=-sysv
+ ;;
+ *-highlevel)
+ os=-bsd
+ ;;
+ *-encore)
+ os=-bsd
+ ;;
+ *-sgi)
+ os=-irix
+ ;;
+ *-siemens)
+ os=-sysv4
+ ;;
+ *-masscomp)
+ os=-rtu
+ ;;
+ f30[01]-fujitsu | f700-fujitsu)
+ os=-uxpv
+ ;;
+ *-rom68k)
+ os=-coff
+ ;;
+ *-*bug)
+ os=-coff
+ ;;
+ *-apple)
+ os=-macos
+ ;;
+ *-atari*)
+ os=-mint
+ ;;
+ *)
+ os=-none
+ ;;
+esac
+fi
+
+# Here we handle the case where we know the os, and the CPU type, but not the
+# manufacturer. We pick the logical manufacturer.
+vendor=unknown
+case $basic_machine in
+ *-unknown)
+ case $os in
+ -riscix*)
+ vendor=acorn
+ ;;
+ -sunos*)
+ vendor=sun
+ ;;
+ -cnk*|-aix*)
+ vendor=ibm
+ ;;
+ -beos*)
+ vendor=be
+ ;;
+ -hpux*)
+ vendor=hp
+ ;;
+ -mpeix*)
+ vendor=hp
+ ;;
+ -hiux*)
+ vendor=hitachi
+ ;;
+ -unos*)
+ vendor=crds
+ ;;
+ -dgux*)
+ vendor=dg
+ ;;
+ -luna*)
+ vendor=omron
+ ;;
+ -genix*)
+ vendor=ns
+ ;;
+ -mvs* | -opened*)
+ vendor=ibm
+ ;;
+ -os400*)
+ vendor=ibm
+ ;;
+ -ptx*)
+ vendor=sequent
+ ;;
+ -tpf*)
+ vendor=ibm
+ ;;
+ -vxsim* | -vxworks* | -windiss*)
+ vendor=wrs
+ ;;
+ -aux*)
+ vendor=apple
+ ;;
+ -hms*)
+ vendor=hitachi
+ ;;
+ -mpw* | -macos*)
+ vendor=apple
+ ;;
+ -*mint | -mint[0-9]* | -*MiNT | -MiNT[0-9]*)
+ vendor=atari
+ ;;
+ -vos*)
+ vendor=stratus
+ ;;
+ esac
+ basic_machine=`echo $basic_machine | sed "s/unknown/$vendor/"`
+ ;;
+esac
+
+echo $basic_machine$os
+exit
+
+# Local variables:
+# eval: (add-hook 'write-file-hooks 'time-stamp)
+# time-stamp-start: "timestamp='"
+# time-stamp-format: "%:y-%02m-%02d"
+# time-stamp-end: "'"
+# End:
diff --git a/build/find_python.sh b/build/find_python.sh
new file mode 100755
index 0000000..0c4b609
--- /dev/null
+++ b/build/find_python.sh
@@ -0,0 +1,37 @@
+#!/bin/sh
+#
+#
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements. See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership. The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied. See the License for the
+# specific language governing permissions and limitations
+# under the License.
+#
+#
+
+# Required version of Python
+# Python 2.0 = 0x2000000
+# Python 2.4 = 0x2040000
+VERSION=${1:-0x2040000}
+
+for pypath in "$PYTHON" "$PYTHON2" "$PYTHON3" python python2 python3; do
+ if [ "x$pypath" != "x" ]; then
+ DETECT_PYTHON="import sys;sys.exit((sys.hexversion < $VERSION) and 1 or 0)"
+ if "$pypath" -c "$DETECT_PYTHON" >/dev/null 2>/dev/null; then
+ echo $pypath
+ exit 0
+ fi
+ fi
+done
+exit 1
diff --git a/build/generator/__init__.py b/build/generator/__init__.py
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/build/generator/__init__.py
diff --git a/build/generator/extractor.py b/build/generator/extractor.py
new file mode 100755
index 0000000..413b892
--- /dev/null
+++ b/build/generator/extractor.py
@@ -0,0 +1,70 @@
+#!/usr/bin/env python
+#
+# extractor.py: extract function names from declarations in header files
+#
+# ====================================================================
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements. See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership. The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied. See the License for the
+# specific language governing permissions and limitations
+# under the License.
+# ====================================================================
+#
+
+import os
+import re
+
+#
+# This parses the following two types of declarations:
+#
+# void
+# svn_foo_bar (args)
+# or
+# void svn_foo_bar (args)
+#
+_funcs = re.compile(r'^(?:(?:(?:\w+|\*) )+\*?)?((?:svn|apr)_[a-z_0-9]+)\s*\(', re.M)
+
+def extract_funcs(fname):
+ funcs = [ ]
+ for name in _funcs.findall(open(fname).read()):
+ if name not in _filter_names:
+ funcs.append(name)
+ return funcs
+
+_filter_names = [
+ 'svn_boolean_t', # svn_config_enumerator_t looks like (to our regex) a
+ # function declaration for svn_boolean_t
+
+ # Not available on Windows
+ 'svn_auth_get_keychain_simple_provider',
+ 'svn_auth_get_keychain_ssl_client_cert_pw_provider',
+ 'svn_auth_get_gnome_keyring_simple_provider',
+ 'svn_auth_get_gnome_keyring_ssl_client_cert_pw_provider',
+ 'svn_auth_get_kwallet_simple_provider',
+ 'svn_auth_get_kwallet_ssl_client_cert_pw_provider',
+ 'svn_auth_gnome_keyring_version',
+ 'svn_auth_kwallet_version',
+ ]
+
+if __name__ == '__main__':
+ # run the extractor over each file mentioned
+ import sys
+ print("EXPORTS")
+ for fname in sys.argv[1:]:
+ for func in extract_funcs(fname):
+ print(func)
+ if os.path.basename(fname) == 'svn_ctype.h':
+ print('svn_ctype_table = svn_ctype_table_internal CONSTANT')
+ elif os.path.basename(fname) == 'svn_wc_private.h':
+ print('svn_wc__internal_walk_children')
diff --git a/build/generator/ezt.py b/build/generator/ezt.py
new file mode 100755
index 0000000..d7603c0
--- /dev/null
+++ b/build/generator/ezt.py
@@ -0,0 +1,875 @@
+#!/usr/bin/env python
+#
+#
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements. See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership. The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied. See the License for the
+# specific language governing permissions and limitations
+# under the License.
+#
+#
+"""ezt.py -- easy templating
+
+ezt templates are simply text files in whatever format you so desire
+(such as XML, HTML, etc.) which contain directives sprinkled
+throughout. With these directives it is possible to generate the
+dynamic content from the ezt templates.
+
+These directives are enclosed in square brackets. If you are a
+C-programmer, you might be familar with the #ifdef directives of the C
+preprocessor 'cpp'. ezt provides a similar concept. Additionally EZT
+has a 'for' directive, which allows it to iterate (repeat) certain
+subsections of the template according to sequence of data items
+provided by the application.
+
+The final rendering is performed by the method generate() of the Template
+class. Building template instances can either be done using external
+EZT files (convention: use the suffix .ezt for such files):
+
+ >>> template = Template("../templates/log.ezt")
+
+or by calling the parse() method of a template instance directly with
+a EZT template string:
+
+ >>> template = Template()
+ >>> template.parse('''<html><head>
+ ... <title>[title_string]</title></head>
+ ... <body><h1>[title_string]</h1>
+ ... [for a_sequence] <p>[a_sequence]</p>
+ ... [end] <hr>
+ ... The [person] is [if-any state]in[else]out[end].
+ ... </body>
+ ... </html>
+ ... ''')
+
+The application should build a dictionary 'data' and pass it together
+with the output fileobject to the templates generate method:
+
+ >>> data = {'title_string' : "A Dummy Page",
+ ... 'a_sequence' : ['list item 1', 'list item 2', 'another element'],
+ ... 'person': "doctor",
+ ... 'state' : None }
+ >>> import sys
+ >>> template.generate(sys.stdout, data)
+ <html><head>
+ <title>A Dummy Page</title></head>
+ <body><h1>A Dummy Page</h1>
+ <p>list item 1</p>
+ <p>list item 2</p>
+ <p>another element</p>
+ <hr>
+ The doctor is out.
+ </body>
+ </html>
+
+Template syntax error reporting should be improved. Currently it is
+very sparse (template line numbers would be nice):
+
+ >>> Template().parse("[if-any where] foo [else] bar [end unexpected args]")
+ Traceback (innermost last):
+ File "<stdin>", line 1, in ?
+ File "ezt.py", line 220, in parse
+ self.program = self._parse(text)
+ File "ezt.py", line 275, in _parse
+ raise ArgCountSyntaxError(str(args[1:]))
+ ArgCountSyntaxError: ['unexpected', 'args']
+ >>> Template().parse("[if unmatched_end]foo[end]")
+ Traceback (innermost last):
+ File "<stdin>", line 1, in ?
+ File "ezt.py", line 206, in parse
+ self.program = self._parse(text)
+ File "ezt.py", line 266, in _parse
+ raise UnmatchedEndError()
+ UnmatchedEndError
+
+
+Directives
+==========
+
+ Several directives allow the use of dotted qualified names refering to objects
+ or attributes of objects contained in the data dictionary given to the
+ .generate() method.
+
+ Qualified names
+ ---------------
+
+ Qualified names have two basic forms: a variable reference, or a string
+ constant. References are a name from the data dictionary with optional
+ dotted attributes (where each intermediary is an object with attributes,
+ of course).
+
+ Examples:
+
+ [varname]
+
+ [ob.attr]
+
+ ["string"]
+
+ Simple directives
+ -----------------
+
+ [QUAL_NAME]
+
+ This directive is simply replaced by the value of the qualified name.
+ Numbers are converted to a string, and None becomes an empty string.
+
+ [QUAL_NAME QUAL_NAME ...]
+
+ The first value defines a substitution format, specifying constant
+ text and indices of the additional arguments. The arguments are then
+ substituted and the resulting is inserted into the output stream.
+
+ Example:
+ ["abc %0 def %1 ghi %0" foo bar.baz]
+
+ Note that the first value can be any type of qualified name -- a string
+ constant or a variable reference. Use %% to substitute a percent sign.
+ Argument indices are 0-based.
+
+ [include "filename"] or [include QUAL_NAME]
+
+ This directive is replaced by content of the named include file. Note
+ that a string constant is more efficient -- the target file is compiled
+ inline. In the variable form, the target file is compiled and executed
+ at runtime.
+
+ [insertfile "filename"] or [insertfile QUAL_NAME]
+
+ This directive is replace by content from the named file, but as a
+ literal string: directives in the target file are not expanded. As
+ in the case of the "include" directive, using a string constant for
+ the filename is more efficient than the variable form.
+
+ Block directives
+ ----------------
+
+ [for QUAL_NAME] ... [end]
+
+ The text within the [for ...] directive and the corresponding [end]
+ is repeated for each element in the sequence referred to by the
+ qualified name in the for directive. Within the for block this
+ identifiers now refers to the actual item indexed by this loop
+ iteration.
+
+ [if-any QUAL_NAME [QUAL_NAME2 ...]] ... [else] ... [end]
+
+ Test if any QUAL_NAME value is not None or an empty string or list.
+ The [else] clause is optional. CAUTION: Numeric values are
+ converted to string, so if QUAL_NAME refers to a numeric value 0,
+ the then-clause is substituted!
+
+ [if-index INDEX_FROM_FOR odd] ... [else] ... [end]
+ [if-index INDEX_FROM_FOR even] ... [else] ... [end]
+ [if-index INDEX_FROM_FOR first] ... [else] ... [end]
+ [if-index INDEX_FROM_FOR last] ... [else] ... [end]
+ [if-index INDEX_FROM_FOR NUMBER] ... [else] ... [end]
+
+ These five directives work similar to [if-any], but are only useful
+ within a [for ...]-block (see above). The odd/even directives are
+ for example useful to choose different background colors for
+ adjacent rows in a table. Similar the first/last directives might
+ be used to remove certain parts (for example "Diff to previous"
+ doesn't make sense, if there is no previous).
+
+ [is QUAL_NAME STRING] ... [else] ... [end]
+ [is QUAL_NAME QUAL_NAME] ... [else] ... [end]
+
+ The [is ...] directive is similar to the other conditional
+ directives above. But it allows to compare two value references or
+ a value reference with some constant string.
+
+ [define VARIABLE] ... [end]
+
+ The [define ...] directive allows you to create and modify template
+ variables from within the template itself. Essentially, any data
+ between inside the [define ...] and its matching [end] will be
+ expanded using the other template parsing and output generation
+ rules, and then stored as a string value assigned to the variable
+ VARIABLE. The new (or changed) variable is then available for use
+ with other mechanisms such as [is ...] or [if-any ...], as long as
+ they appear later in the template.
+
+ [format "html|xml|js|url|raw"] ... [end]
+
+ The [format ...] directive creates a block in which any substitutions
+ are processed as though the template has been instantiated with the
+ the corresponding 'base_format' argument. Comma-separated format
+ specifiers perform nested encodings. In this case the encodings are
+ applied left-to-right. For example the directive: [format "html,js"]
+ will HTML and then Javascript encode any inserted template variables.
+"""
+#
+# Copyright (C) 2001-2009 Greg Stein. All Rights Reserved.
+#
+# Redistribution and use in source and binary forms, with or without
+# modification, are permitted provided that the following conditions are
+# met:
+#
+# * Redistributions of source code must retain the above copyright
+# notice, this list of conditions and the following disclaimer.
+#
+# * Redistributions in binary form must reproduce the above copyright
+# notice, this list of conditions and the following disclaimer in the
+# documentation and/or other materials provided with the distribution.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS
+# IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO,
+# THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
+# PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE REGENTS OR CONTRIBUTORS BE
+# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
+# SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
+# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
+# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
+# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
+# POSSIBILITY OF SUCH DAMAGE.
+#
+#
+# This software is maintained by Greg and is available at:
+# http://code.google.com/p/ezt/
+#
+
+import os, re, sys
+
+if sys.version_info[0] >= 3:
+ # Python >=3.0
+ long = int
+ unicode = str
+ from io import StringIO
+ from urllib.parse import quote_plus as urllib_parse_quote_plus
+else:
+ # Python <3.0
+ from urllib import quote_plus as urllib_parse_quote_plus
+ try:
+ from cStringIO import StringIO
+ except ImportError:
+ from StringIO import StringIO
+
+#
+# Formatting types
+#
+FORMAT_RAW = 'raw'
+FORMAT_HTML = 'html'
+FORMAT_XML = 'xml'
+FORMAT_JS = 'js'
+FORMAT_URL = 'url'
+
+#
+# This regular expression matches three alternatives:
+# expr: NEWLINE | DIRECTIVE | BRACKET | COMMENT
+# DIRECTIVE: '[' ITEM (whitespace ITEM)* ']
+# ITEM: STRING | NAME
+# STRING: '"' (not-slash-or-dquote | '\' anychar)* '"'
+# NAME: (alphanum | '_' | '-' | '.')+
+# BRACKET: '[[]'
+# COMMENT: '[#' not-rbracket* ']'
+#
+# When used with the split() method, the return value will be composed of
+# non-matching text and the three paren groups (NEWLINE, DIRECTIVE and
+# BRACKET). Since the COMMENT matches are not placed into a group, they are
+# considered a "splitting" value and simply dropped.
+#
+_item = r'(?:"(?:[^\\"]|\\.)*"|[-\w.]+)'
+_re_parse = re.compile(r'(\r?\n)|\[(%s(?: +%s)*)\]|(\[\[\])|\[#[^\]]*\]' %
+ (_item, _item))
+
+_re_args = re.compile(r'"(?:[^\\"]|\\.)*"|[-\w.]+')
+
+# block commands and their argument counts
+_block_cmd_specs = { 'if-index':2, 'for':1, 'is':2, 'define':1, 'format':1 }
+_block_cmds = _block_cmd_specs.keys()
+
+# two regular expressions for compressing whitespace. the first is used to
+# compress any whitespace including a newline into a single newline. the
+# second regex is used to compress runs of whitespace into a single space.
+_re_newline = re.compile('[ \t\r\f\v]*\n\\s*')
+_re_whitespace = re.compile(r'\s\s+')
+
+# this regex is used to substitute arguments into a value. we split the value,
+# replace the relevant pieces, and then put it all back together. splitting
+# will produce a list of: TEXT ( splitter TEXT )*. splitter will be '%' or
+# an integer.
+_re_subst = re.compile('%(%|[0-9]+)')
+
+class Template:
+
+ def __init__(self, fname=None, compress_whitespace=1,
+ base_format=FORMAT_RAW):
+ self.compress_whitespace = compress_whitespace
+ if fname:
+ self.parse_file(fname, base_format)
+
+ def parse_file(self, fname, base_format=FORMAT_RAW):
+ "fname -> a string object with pathname of file containg an EZT template."
+
+ self.parse(_FileReader(fname), base_format)
+
+ def parse(self, text_or_reader, base_format=FORMAT_RAW):
+ """Parse the template specified by text_or_reader.
+
+ The argument should be a string containing the template, or it should
+ specify a subclass of ezt.Reader which can read templates. The base
+ format for printing values is given by base_format.
+ """
+ if not isinstance(text_or_reader, Reader):
+ # assume the argument is a plain text string
+ text_or_reader = _TextReader(text_or_reader)
+
+ self.program = self._parse(text_or_reader,
+ base_printer=_parse_format(base_format))
+
+ def generate(self, fp, data):
+ if hasattr(data, '__getitem__') or hasattr(getattr(data, 'keys', None), '__call__'):
+ # a dictionary-like object was passed. convert it to an
+ # attribute-based object.
+ class _data_ob:
+ def __init__(self, d):
+ vars(self).update(d)
+ data = _data_ob(data)
+
+ ctx = _context()
+ ctx.data = data
+ ctx.for_index = { }
+ ctx.defines = { }
+ self._execute(self.program, fp, ctx)
+
+ def _parse(self, reader, for_names=None, file_args=(), base_printer=None):
+ """text -> string object containing the template.
+
+ This is a private helper function doing the real work for method parse.
+ It returns the parsed template as a 'program'. This program is a sequence
+ made out of strings or (function, argument) 2-tuples.
+
+ Note: comment directives [# ...] are automatically dropped by _re_parse.
+ """
+
+ filename = reader.filename()
+ # parse the template program into: (TEXT NEWLINE DIRECTIVE BRACKET)* TEXT
+ parts = _re_parse.split(reader.text)
+
+ program = [ ]
+ stack = [ ]
+ if not for_names:
+ for_names = [ ]
+
+ if base_printer is None:
+ base_printer = ()
+ printers = [ base_printer ]
+
+ one_newline_copied = False
+ line_number = 1
+ for i in range(len(parts)):
+ piece = parts[i]
+ which = i % 4 # discriminate between: TEXT NEWLINE DIRECTIVE BRACKET
+ if which == 0:
+ # TEXT. append if non-empty.
+ if piece:
+ if self.compress_whitespace:
+ piece = _re_whitespace.sub(' ', piece)
+ program.append(piece)
+ one_newline_copied = False
+ elif which == 1:
+ # NEWLINE. append unless compress_whitespace requested
+ if piece:
+ line_number += 1
+ if self.compress_whitespace:
+ if not one_newline_copied:
+ program.append('\n')
+ one_newline_copied = True
+ else:
+ program.append(piece)
+ elif which == 3:
+ # BRACKET directive. append '[' if present.
+ if piece:
+ program.append('[')
+ one_newline_copied = False
+ elif piece:
+ # DIRECTIVE is present.
+ one_newline_copied = False
+ args = _re_args.findall(piece)
+ cmd = args[0]
+ if cmd == 'else':
+ if len(args) > 1:
+ raise ArgCountSyntaxError(str(args[1:]), filename, line_number)
+ ### check: don't allow for 'for' cmd
+ idx = stack[-1][1]
+ true_section = program[idx:]
+ del program[idx:]
+ stack[-1][3] = true_section
+ elif cmd == 'end':
+ if len(args) > 1:
+ raise ArgCountSyntaxError(str(args[1:]), filename, line_number)
+ # note: true-section may be None
+ try:
+ cmd, idx, args, true_section, start_line_number = stack.pop()
+ except IndexError:
+ raise UnmatchedEndError(None, filename, line_number)
+ else_section = program[idx:]
+ if cmd == 'format':
+ printers.pop()
+ else:
+ func = getattr(self, '_cmd_' + re.sub('-', '_', cmd))
+ program[idx:] = [ (func, (args, true_section, else_section),
+ filename, line_number) ]
+ if cmd == 'for':
+ for_names.pop()
+ elif cmd in _block_cmds:
+ if len(args) > _block_cmd_specs[cmd] + 1:
+ raise ArgCountSyntaxError(str(args[1:]), filename, line_number)
+ ### this assumes arg1 is always a ref unless cmd is 'define'
+ if cmd != 'define':
+ args[1] = _prepare_ref(args[1], for_names, file_args)
+
+ # handle arg2 for the 'is' command
+ if cmd == 'is':
+ args[2] = _prepare_ref(args[2], for_names, file_args)
+ elif cmd == 'for':
+ for_names.append(args[1][0]) # append the refname
+ elif cmd == 'format':
+ if args[1][0]:
+ raise BadFormatConstantError(str(args[1:]), filename, line_number)
+ printers.append(_parse_format(args[1][1]))
+
+ # remember the cmd, current pos, args, and a section placeholder
+ stack.append([cmd, len(program), args[1:], None, line_number])
+ elif cmd == 'include' or cmd == 'insertfile':
+ is_insertfile = (cmd == 'insertfile')
+ # extra arguments are meaningless when using insertfile
+ if is_insertfile and len(args) != 2:
+ raise ArgCountSyntaxError(str(args), filename, line_number)
+ if args[1][0] == '"':
+ include_filename = args[1][1:-1]
+ if is_insertfile:
+ program.append(reader.read_other(include_filename).text)
+ else:
+ f_args = [ ]
+ for arg in args[2:]:
+ f_args.append(_prepare_ref(arg, for_names, file_args))
+ program.extend(self._parse(reader.read_other(include_filename),
+ for_names, f_args, printers[-1]))
+ else:
+ if len(args) != 2:
+ raise ArgCountSyntaxError(str(args), filename, line_number)
+ if is_insertfile:
+ cmd = self._cmd_insertfile
+ else:
+ cmd = self._cmd_include
+ program.append((cmd,
+ (_prepare_ref(args[1], for_names, file_args),
+ reader, printers[-1]), filename, line_number))
+ elif cmd == 'if-any':
+ f_args = [ ]
+ for arg in args[1:]:
+ f_args.append(_prepare_ref(arg, for_names, file_args))
+ stack.append(['if-any', len(program), f_args, None, line_number])
+ else:
+ # implied PRINT command
+ if len(args) > 1:
+ f_args = [ ]
+ for arg in args:
+ f_args.append(_prepare_ref(arg, for_names, file_args))
+ program.append((self._cmd_subst,
+ (printers[-1], f_args[0], f_args[1:]),
+ filename, line_number))
+ else:
+ valref = _prepare_ref(args[0], for_names, file_args)
+ program.append((self._cmd_print, (printers[-1], valref),
+ filename, line_number))
+
+ if stack:
+ raise UnclosedBlocksError('Block opened at line %s' % stack[-1][4],
+ filename=filename)
+ return program
+
+ def _execute(self, program, fp, ctx):
+ """This private helper function takes a 'program' sequence as created
+ by the method '_parse' and executes it step by step. strings are written
+ to the file object 'fp' and functions are called.
+ """
+ for step in program:
+ if isinstance(step, str):
+ fp.write(step)
+ else:
+ method, method_args, filename, line_number = step
+ method(method_args, fp, ctx, filename, line_number)
+
+ def _cmd_print(self, transforms_valref, fp, ctx, filename, line_number):
+ (transforms, valref) = transforms_valref
+ value = _get_value(valref, ctx, filename, line_number)
+ # if the value has a 'read' attribute, then it is a stream: copy it
+ if hasattr(value, 'read'):
+ while 1:
+ chunk = value.read(16384)
+ if not chunk:
+ break
+ for t in transforms:
+ chunk = t(chunk)
+ fp.write(chunk)
+ else:
+ for t in transforms:
+ value = t(value)
+ fp.write(value)
+
+ def _cmd_subst(self, transforms_valref_args, fp, ctx, filename,
+ line_number):
+ (transforms, valref, args) = transforms_valref_args
+ fmt = _get_value(valref, ctx, filename, line_number)
+ parts = _re_subst.split(fmt)
+ for i in range(len(parts)):
+ piece = parts[i]
+ if i%2 == 1 and piece != '%':
+ idx = int(piece)
+ if idx < len(args):
+ piece = _get_value(args[idx], ctx, filename, line_number)
+ else:
+ piece = '<undef>'
+ for t in transforms:
+ piece = t(piece)
+ fp.write(piece)
+
+ def _cmd_include(self, valref_reader_printer, fp, ctx, filename,
+ line_number):
+ (valref, reader, printer) = valref_reader_printer
+ fname = _get_value(valref, ctx, filename, line_number)
+ ### note: we don't have the set of for_names to pass into this parse.
+ ### I don't think there is anything to do but document it
+ self._execute(self._parse(reader.read_other(fname), base_printer=printer),
+ fp, ctx)
+
+ def _cmd_insertfile(self, valref_reader_printer, fp, ctx, filename,
+ line_number):
+ (valref, reader, printer) = valref_reader_printer
+ fname = _get_value(valref, ctx, filename, line_number)
+ fp.write(reader.read_other(fname).text)
+
+ def _cmd_if_any(self, args, fp, ctx, filename, line_number):
+ "If any value is a non-empty string or non-empty list, then T else F."
+ (valrefs, t_section, f_section) = args
+ value = 0
+ for valref in valrefs:
+ if _get_value(valref, ctx, filename, line_number):
+ value = 1
+ break
+ self._do_if(value, t_section, f_section, fp, ctx)
+
+ def _cmd_if_index(self, args, fp, ctx, filename, line_number):
+ ((valref, value), t_section, f_section) = args
+ list, idx = ctx.for_index[valref[0]]
+ if value == 'even':
+ value = idx % 2 == 0
+ elif value == 'odd':
+ value = idx % 2 == 1
+ elif value == 'first':
+ value = idx == 0
+ elif value == 'last':
+ value = idx == len(list)-1
+ else:
+ value = idx == int(value)
+ self._do_if(value, t_section, f_section, fp, ctx)
+
+ def _cmd_is(self, args, fp, ctx, filename, line_number):
+ ((left_ref, right_ref), t_section, f_section) = args
+ right_value = _get_value(right_ref, ctx, filename, line_number)
+ left_value = _get_value(left_ref, ctx, filename, line_number)
+ value = left_value.lower() == right_value.lower()
+ self._do_if(value, t_section, f_section, fp, ctx)
+
+ def _do_if(self, value, t_section, f_section, fp, ctx):
+ if t_section is None:
+ t_section = f_section
+ f_section = None
+ if value:
+ section = t_section
+ else:
+ section = f_section
+ if section is not None:
+ self._execute(section, fp, ctx)
+
+ def _cmd_for(self, args, fp, ctx, filename, line_number):
+ ((valref,), unused, section) = args
+ list = _get_value(valref, ctx, filename, line_number)
+ refname = valref[0]
+ if isinstance(list, str):
+ raise NeedSequenceError(refname, filename, line_number)
+ ctx.for_index[refname] = idx = [ list, 0 ]
+ for item in list:
+ self._execute(section, fp, ctx)
+ idx[1] = idx[1] + 1
+ del ctx.for_index[refname]
+
+ def _cmd_define(self, args, fp, ctx, filename, line_number):
+ ((name,), unused, section) = args
+ valfp = StringIO()
+ if section is not None:
+ self._execute(section, valfp, ctx)
+ ctx.defines[name] = valfp.getvalue()
+
+def boolean(value):
+ "Return a value suitable for [if-any bool_var] usage in a template."
+ if value:
+ return 'yes'
+ return None
+
+
+def _prepare_ref(refname, for_names, file_args):
+ """refname -> a string containing a dotted identifier. example:"foo.bar.bang"
+ for_names -> a list of active for sequences.
+
+ Returns a `value reference', a 3-tuple made out of (refname, start, rest),
+ for fast access later.
+ """
+ # is the reference a string constant?
+ if refname[0] == '"':
+ return None, refname[1:-1], None
+
+ parts = refname.split('.')
+ start = parts[0]
+ rest = parts[1:]
+
+ # if this is an include-argument, then just return the prepared ref
+ if start[:3] == 'arg':
+ try:
+ idx = int(start[3:])
+ except ValueError:
+ pass
+ else:
+ if idx < len(file_args):
+ orig_refname, start, more_rest = file_args[idx]
+ if more_rest is None:
+ # the include-argument was a string constant
+ return None, start, None
+
+ # prepend the argument's "rest" for our further processing
+ rest[:0] = more_rest
+
+ # rewrite the refname to ensure that any potential 'for' processing
+ # has the correct name
+ ### this can make it hard for debugging include files since we lose
+ ### the 'argNNN' names
+ if not rest:
+ return start, start, [ ]
+ refname = start + '.' + '.'.join(rest)
+
+ if for_names:
+ # From last to first part, check if this reference is part of a for loop
+ for i in range(len(parts), 0, -1):
+ name = '.'.join(parts[:i])
+ if name in for_names:
+ return refname, name, parts[i:]
+
+ return refname, start, rest
+
+def _get_value(refname_start_rest, ctx, filename, line_number):
+ """refname_start_rest -> a prepared `value reference' (see above).
+ ctx -> an execution context instance.
+
+ Does a name space lookup within the template name space. Active
+ for blocks take precedence over data dictionary members with the
+ same name.
+ """
+ (refname, start, rest) = refname_start_rest
+ if rest is None:
+ # it was a string constant
+ return start
+
+ # get the starting object
+ if start in ctx.for_index:
+ list, idx = ctx.for_index[start]
+ ob = list[idx]
+ elif start in ctx.defines:
+ ob = ctx.defines[start]
+ elif hasattr(ctx.data, start):
+ ob = getattr(ctx.data, start)
+ else:
+ raise UnknownReference(refname, filename, line_number)
+
+ # walk the rest of the dotted reference
+ for attr in rest:
+ try:
+ ob = getattr(ob, attr)
+ except AttributeError:
+ raise UnknownReference(refname, filename, line_number)
+
+ # make sure we return a string instead of some various Python types
+ if isinstance(ob, (int, long, float)):
+ return str(ob)
+ if ob is None:
+ return ''
+
+ # string or a sequence
+ return ob
+
+def _replace(s, replace_map):
+ for orig, repl in replace_map:
+ s = s.replace(orig, repl)
+ return s
+
+REPLACE_JS_MAP = (
+ ('\\', r'\\'), ('\t', r'\t'), ('\n', r'\n'), ('\r', r'\r'),
+ ('"', r'\x22'), ('\'', r'\x27'), ('&', r'\x26'),
+ ('<', r'\x3c'), ('>', r'\x3e'), ('=', r'\x3d'),
+)
+
+# Various unicode whitespace
+if sys.version_info[0] >= 3:
+ # Python >=3.0
+ REPLACE_JS_UNICODE_MAP = (
+ ('\u0085', r'\u0085'), ('\u2028', r'\u2028'), ('\u2029', r'\u2029')
+ )
+else:
+ # Python <3.0
+ REPLACE_JS_UNICODE_MAP = eval("((u'\u0085', r'\u0085'), (u'\u2028', r'\u2028'), (u'\u2029', r'\u2029'))")
+
+# Why not cgi.escape? It doesn't do single quotes which are occasionally
+# used to contain HTML attributes and event handler definitions (unfortunately)
+REPLACE_HTML_MAP = (
+ ('&', '&amp;'), ('<', '&lt;'), ('>', '&gt;'),
+ ('"', '&quot;'), ('\'', '&#39;'),
+)
+
+def _js_escape(s):
+ s = _replace(s, REPLACE_JS_MAP)
+ ### perhaps attempt to coerce the string to unicode and then replace?
+ if isinstance(s, unicode):
+ s = _replace(s, REPLACE_JS_UNICODE_MAP)
+ return s
+
+def _html_escape(s):
+ return _replace(s, REPLACE_HTML_MAP)
+
+def _url_escape(s):
+ ### quote_plus barfs on non-ASCII characters. According to
+ ### http://www.w3.org/International/O-URL-code.html URIs should be
+ ### UTF-8 encoded first.
+ if isinstance(s, unicode):
+ s = s.encode('utf8')
+ return urllib_parse_quote_plus(s)
+
+FORMATTERS = {
+ FORMAT_RAW: None,
+ FORMAT_HTML: _html_escape,
+ FORMAT_XML: _html_escape, ### use the same quoting as HTML for now
+ FORMAT_JS: _js_escape,
+ FORMAT_URL: _url_escape,
+}
+
+def _parse_format(format_string=FORMAT_RAW):
+ format_funcs = []
+ try:
+ for fspec in format_string.split(','):
+ format_func = FORMATTERS[fspec]
+ if format_func is not None:
+ format_funcs.append(format_func)
+ except KeyError:
+ raise UnknownFormatConstantError(format_string)
+ return format_funcs
+
+class _context:
+ """A container for the execution context"""
+
+
+class Reader:
+ """Abstract class which allows EZT to detect Reader objects."""
+ def filename(self):
+ return '(%s does not provide filename() method)' % repr(self)
+
+class _FileReader(Reader):
+ """Reads templates from the filesystem."""
+ def __init__(self, fname):
+ self.text = open(fname, 'rb').read()
+ if sys.version_info[0] >= 3:
+ # Python >=3.0
+ self.text = self.text.decode()
+ self._dir = os.path.dirname(fname)
+ self.fname = fname
+ def read_other(self, relative):
+ return _FileReader(os.path.join(self._dir, relative))
+ def filename(self):
+ return self.fname
+
+class _TextReader(Reader):
+ """'Reads' a template from provided text."""
+ def __init__(self, text):
+ self.text = text
+ def read_other(self, relative):
+ raise BaseUnavailableError()
+ def filename(self):
+ return '(text)'
+
+
+class EZTException(Exception):
+ """Parent class of all EZT exceptions."""
+ def __init__(self, message=None, filename=None, line_number=None):
+ self.message = message
+ self.filename = filename
+ self.line_number = line_number
+ def __str__(self):
+ ret = []
+ if self.message is not None:
+ ret.append(self.message)
+ if self.filename is not None:
+ ret.append('in file ' + str(self.filename))
+ if self.line_number is not None:
+ ret.append('at line ' + str(self.line_number))
+ return ' '.join(ret)
+
+class ArgCountSyntaxError(EZTException):
+ """A bracket directive got the wrong number of arguments."""
+
+class UnknownReference(EZTException):
+ """The template references an object not contained in the data dictionary."""
+
+class NeedSequenceError(EZTException):
+ """The object dereferenced by the template is no sequence (tuple or list)."""
+
+class UnclosedBlocksError(EZTException):
+ """This error may be simply a missing [end]."""
+
+class UnmatchedEndError(EZTException):
+ """This error may be caused by a misspelled if directive."""
+
+class BaseUnavailableError(EZTException):
+ """Base location is unavailable, which disables includes."""
+
+class BadFormatConstantError(EZTException):
+ """Format specifiers must be string constants."""
+
+class UnknownFormatConstantError(EZTException):
+ """The format specifier is an unknown value."""
+
+
+# --- standard test environment ---
+def test_parse():
+ assert _re_parse.split('[a]') == ['', '[a]', None, '']
+ assert _re_parse.split('[a] [b]') == \
+ ['', '[a]', None, ' ', '[b]', None, '']
+ assert _re_parse.split('[a c] [b]') == \
+ ['', '[a c]', None, ' ', '[b]', None, '']
+ assert _re_parse.split('x [a] y [b] z') == \
+ ['x ', '[a]', None, ' y ', '[b]', None, ' z']
+ assert _re_parse.split('[a "b" c "d"]') == \
+ ['', '[a "b" c "d"]', None, '']
+ assert _re_parse.split(r'["a \"b[foo]" c.d f]') == \
+ ['', '["a \\"b[foo]" c.d f]', None, '']
+
+def _test(argv):
+ import doctest, ezt
+ verbose = "-v" in argv
+ return doctest.testmod(ezt, verbose=verbose)
+
+if __name__ == "__main__":
+ # invoke unit test for this module:
+ import sys
+ sys.exit(_test(sys.argv)[0])
diff --git a/build/generator/gen_base.py b/build/generator/gen_base.py
new file mode 100644
index 0000000..cad81c2
--- /dev/null
+++ b/build/generator/gen_base.py
@@ -0,0 +1,1195 @@
+#
+#
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements. See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership. The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied. See the License for the
+# specific language governing permissions and limitations
+# under the License.
+#
+#
+#
+# gen_base.py -- infrastructure for generating makefiles, dependencies, etc.
+#
+
+import os
+import sys
+import glob
+import re
+import fileinput
+import filecmp
+try:
+ # Python >=3.0
+ import configparser
+except ImportError:
+ # Python <3.0
+ import ConfigParser as configparser
+import generator.swig
+
+import getversion
+
+
+def _warning(msg):
+ sys.stderr.write("WARNING: %s\n" % msg)
+
+def _error(msg):
+ sys.stderr.write("ERROR: %s\n" % msg)
+ sys.exit(1)
+
+class GeneratorBase:
+
+ #
+ # Derived classes should define a class attribute named _extension_map.
+ # This attribute should be a dictionary of the form:
+ # { (target-type, file-type): file-extension ...}
+ #
+ # where: target-type is 'exe', 'lib', ...
+ # file-type is 'target', 'object', ...
+ #
+
+ def __init__(self, fname, verfname, options=None):
+ # Retrieve major version from the C header, to avoid duplicating it in
+ # build.conf - it is required because some file names include it.
+ try:
+ vsn_parser = getversion.Parser()
+ vsn_parser.search('SVN_VER_MAJOR', 'libver')
+ self.version = vsn_parser.parse(verfname).libver
+ except:
+ raise GenError('Unable to extract version.')
+
+ # Read options
+ self.release_mode = None
+ for opt, val in options:
+ if opt == '--release':
+ self.release_mode = 1
+
+ # Now read and parse build.conf
+ parser = configparser.ConfigParser()
+ parser.read(fname)
+
+ self.conf = build_path(os.path.abspath(fname))
+
+ self.sections = { }
+ self.graph = DependencyGraph()
+
+ # Allow derived classes to suppress certain configuration sections
+ if not hasattr(self, 'skip_sections'):
+ self.skip_sections = { }
+
+ # The 'options' section does not represent a build target,
+ # it simply contains global options
+ self.skip_sections['options'] = None
+
+ # Read in the global options
+ self.includes = \
+ _collect_paths(parser.get('options', 'includes'))
+ self.private_includes = \
+ _collect_paths(parser.get('options', 'private-includes'))
+ self.private_built_includes = \
+ parser.get('options', 'private-built-includes').split()
+ self.scripts = \
+ _collect_paths(parser.get('options', 'test-scripts'))
+ self.bdb_scripts = \
+ _collect_paths(parser.get('options', 'bdb-test-scripts'))
+
+ self.include_wildcards = \
+ parser.get('options', 'include-wildcards').split()
+ self.swig_lang = parser.get('options', 'swig-languages').split()
+ self.swig_dirs = parser.get('options', 'swig-dirs').split()
+
+ # SWIG Generator
+ self.swig = generator.swig.Generator(self.conf, "swig")
+
+ # Visual C++ projects - contents are either TargetProject instances,
+ # or other targets with an external-project attribute.
+ self.projects = []
+
+ # Lists of pathnames of various kinds
+ self.test_deps = [] # Non-BDB dependent items to build for the tests
+ self.test_progs = [] # Subset of the above to actually execute
+ self.bdb_test_deps = [] # BDB-dependent items to build for the tests
+ self.bdb_test_progs = [] # Subset of the above to actually execute
+ self.target_dirs = [] # Directories in which files are built
+ self.manpages = [] # Manpages
+
+ # Collect the build targets and have a reproducible ordering
+ parser_sections = sorted(parser.sections())
+ for section_name in parser_sections:
+ if section_name in self.skip_sections:
+ continue
+
+ options = {}
+ for option in parser.options(section_name):
+ options[option] = parser.get(section_name, option)
+
+ type = options.get('type')
+
+ target_class = _build_types.get(type)
+ if not target_class:
+ raise GenError('ERROR: unknown build type for ' + section_name)
+
+ section = target_class.Section(target_class, section_name, options, self)
+
+ self.sections[section_name] = section
+
+ section.create_targets()
+
+ # Compute intra-library dependencies
+ for section in self.sections.values():
+ dependencies = (( DT_LINK, section.options.get('libs', "") ),
+ ( DT_NONLIB, section.options.get('nonlibs', "") ))
+
+ for dep_type, dep_names in dependencies:
+ # Translate string names to Section objects
+ dep_section_objects = []
+ for section_name in dep_names.split():
+ if section_name in self.sections:
+ dep_section_objects.append(self.sections[section_name])
+
+ # For each dep_section that this section declares a dependency on,
+ # take the targets of this section, and register a dependency on
+ # any 'matching' targets of the dep_section.
+ #
+ # At the moment, the concept of multiple targets per section is
+ # employed only for the SWIG modules, which have 1 target
+ # per language. Then, 'matching' means being of the same language.
+ for dep_section in dep_section_objects:
+ for target in section.get_targets():
+ self.graph.bulk_add(dep_type, target.name,
+ dep_section.get_dep_targets(target))
+
+ def compute_hdrs(self):
+ """Get a list of the header files"""
+ all_includes = list(map(native_path, self.includes + self.private_includes))
+ for d in unique(self.target_dirs):
+ for wildcard in self.include_wildcards:
+ hdrs = glob.glob(os.path.join(native_path(d), wildcard))
+ all_includes.extend(hdrs)
+ return all_includes
+
+ def compute_hdr_deps(self):
+ """Compute the dependencies of each header file"""
+
+ include_deps = IncludeDependencyInfo(self.compute_hdrs(),
+ list(map(native_path, self.private_built_includes)))
+
+ for objectfile, sources in self.graph.get_deps(DT_OBJECT):
+ assert len(sources) == 1
+ source = sources[0]
+
+ # Generated .c files must depend on all headers their parent .i file
+ # includes
+ if isinstance(objectfile, SWIGObject):
+ swigsources = self.graph.get_sources(DT_SWIG_C, source)
+ assert len(swigsources) == 1
+ ifile = swigsources[0]
+ assert isinstance(ifile, SWIGSource)
+
+ c_includes, swig_includes = \
+ include_deps.query_swig(native_path(ifile.filename))
+ for include_file in c_includes:
+ self.graph.add(DT_OBJECT, objectfile, build_path(include_file))
+ for include_file in swig_includes:
+ self.graph.add(DT_SWIG_C, source, build_path(include_file))
+
+ # Any non-swig C/C++ object must depend on the headers its parent
+ # .c or .cpp includes. Note that 'object' includes gettext .mo files,
+ # Java .class files, and .h files generated from Java classes, so
+ # we must filter here.
+ elif isinstance(source, SourceFile) and \
+ os.path.splitext(source.filename)[1] in ('.c', '.cpp'):
+ for include_file in include_deps.query(native_path(source.filename)):
+ self.graph.add(DT_OBJECT, objectfile, build_path(include_file))
+
+ def write_sqlite_headers(self):
+ "Transform sql files into header files"
+
+ import transform_sql
+ for hdrfile, sqlfile in self.graph.get_deps(DT_SQLHDR):
+ new_hdrfile = hdrfile + ".new"
+ new_file = open(new_hdrfile, 'w')
+ transform_sql.main(sqlfile[0], new_file)
+ new_file.close()
+
+ def identical(file1, file2):
+ try:
+ if filecmp.cmp(new_hdrfile, hdrfile):
+ return True
+ else:
+ return False
+ except:
+ return False
+
+ if identical(new_hdrfile, hdrfile):
+ os.remove(new_hdrfile)
+ else:
+ try:
+ os.remove(hdrfile)
+ except: pass
+ os.rename(new_hdrfile, hdrfile)
+
+
+class DependencyGraph:
+ """Record dependencies between build items.
+
+ See the DT_* values for the different dependency types. For each type,
+ the target and source objects recorded will be different. They could
+ be file names, Target objects, install types, etc.
+ """
+
+ def __init__(self):
+ self.deps = { } # type -> { target -> [ source ... ] }
+ for dt in dep_types:
+ self.deps[dt] = { }
+
+ def add(self, type, target, source):
+ if target in self.deps[type]:
+ self.deps[type][target].append(source)
+ else:
+ self.deps[type][target] = [ source ]
+
+ def bulk_add(self, type, target, sources):
+ if target in self.deps[type]:
+ self.deps[type][target].extend(sources)
+ else:
+ self.deps[type][target] = sources[:]
+
+ def get_sources(self, type, target, cls=None):
+ sources = self.deps[type].get(target, [ ])
+ if not cls:
+ return sources
+ filtered = [ ]
+ for src in sources:
+ if isinstance(src, cls):
+ filtered.append(src)
+ return filtered
+
+ def get_all_sources(self, type):
+ sources = [ ]
+ for group in self.deps[type].values():
+ sources.extend(group)
+ return sources
+
+ def get_deps(self, type):
+ return list(self.deps[type].items())
+
+# dependency types
+dep_types = [
+ 'DT_INSTALL', # install areas. e.g. 'lib', 'base-lib'
+ 'DT_OBJECT', # an object filename, depending upon .c filenames
+ 'DT_SWIG_C', # a swig-generated .c file, depending upon .i filename(s)
+ 'DT_LINK', # a libtool-linked filename, depending upon object fnames
+ 'DT_NONLIB', # filename depends on object fnames, but isn't linked to them
+ 'DT_SQLHDR', # header generated from a .sql file
+ ]
+
+# create some variables for these
+for _dt in dep_types:
+ # e.g. DT_INSTALL = 'DT_INSTALL'
+ globals()[_dt] = _dt
+
+class DependencyNode:
+ def __init__(self, filename):
+ self.filename = filename
+
+ def __str__(self):
+ return self.filename
+
+class ObjectFile(DependencyNode):
+ def __init__(self, filename, compile_cmd = None):
+ DependencyNode.__init__(self, filename)
+ self.compile_cmd = compile_cmd
+ self.source_generated = 0
+
+class SWIGObject(ObjectFile):
+ def __init__(self, filename, lang):
+ ObjectFile.__init__(self, filename)
+ self.lang = lang
+ self.lang_abbrev = lang_abbrev[lang]
+ self.source_generated = 1
+ ### hmm. this is Makefile-specific
+ self.compile_cmd = '$(COMPILE_%s_WRAPPER)' % self.lang_abbrev.upper()
+
+class HeaderFile(DependencyNode):
+ def __init__(self, filename, classname = None, compile_cmd = None):
+ DependencyNode.__init__(self, filename)
+ self.classname = classname
+ self.compile_cmd = compile_cmd
+
+class SourceFile(DependencyNode):
+ def __init__(self, filename, reldir):
+ DependencyNode.__init__(self, filename)
+ self.reldir = reldir
+
+class SWIGSource(SourceFile):
+ def __init__(self, filename):
+ SourceFile.__init__(self, filename, build_path_dirname(filename))
+
+
+lang_abbrev = {
+ 'python' : 'py',
+ 'perl' : 'pl',
+ 'ruby' : 'rb',
+ }
+
+lang_full_name = {
+ 'python' : 'Python',
+ 'perl' : 'Perl',
+ 'ruby' : 'Ruby',
+ }
+
+lang_utillib_suffix = {
+ 'python' : 'py',
+ 'perl' : 'perl',
+ 'ruby' : 'ruby',
+ }
+
+class Target(DependencyNode):
+ "A build target is a node in our dependency graph."
+
+ def __init__(self, name, options, gen_obj):
+ self.name = name
+ self.gen_obj = gen_obj
+ self.desc = options.get('description')
+ self.path = options.get('path', '')
+ self.add_deps = options.get('add-deps', '')
+ self.add_install_deps = options.get('add-install-deps', '')
+ self.msvc_name = options.get('msvc-name') # override project name
+
+ def add_dependencies(self):
+ # subclasses should override to provide behavior, as appropriate
+ raise NotImplementedError
+
+ class Section:
+ """Represents an individual section of build.conf
+
+ The Section class is sort of a factory class which is responsible for
+ creating and keeping track of Target instances associated with a section
+ of the configuration file. By default it only allows one Target per
+ section, but subclasses may create multiple Targets.
+ """
+
+ def __init__(self, target_class, name, options, gen_obj):
+ self.target_class = target_class
+ self.name = name
+ self.options = options
+ self.gen_obj = gen_obj
+
+ def create_targets(self):
+ """Create target instances"""
+ self.target = self.target_class(self.name, self.options, self.gen_obj)
+ self.target.add_dependencies()
+
+ def get_targets(self):
+ """Return list of target instances associated with this section"""
+ return [self.target]
+
+ def get_dep_targets(self, target):
+ """Return list of targets from this section that "target" depends on"""
+ return [self.target]
+
+class TargetLinked(Target):
+ "The target is linked (by libtool) against other libraries."
+
+ def __init__(self, name, options, gen_obj):
+ Target.__init__(self, name, options, gen_obj)
+ self.install = options.get('install')
+ self.compile_cmd = options.get('compile-cmd')
+ self.sources = options.get('sources', '*.c *.cpp')
+ self.link_cmd = options.get('link-cmd', '$(LINK)')
+
+ self.external_lib = options.get('external-lib')
+ self.external_project = options.get('external-project')
+ self.msvc_libs = options.get('msvc-libs', '').split()
+
+ def add_dependencies(self):
+ if self.external_lib or self.external_project:
+ if self.external_project:
+ self.gen_obj.projects.append(self)
+ return
+
+ # the specified install area depends upon this target
+ self.gen_obj.graph.add(DT_INSTALL, self.install, self)
+
+ sources = sorted(_collect_paths(self.sources or '*.c' or '*.cpp', self.path))
+
+ for srcs, reldir in sources:
+ for src in srcs.split(" "):
+ if glob.glob(src):
+ if src[-2:] == '.c':
+ objname = src[:-2] + self.objext
+ elif src[-4:] == '.cpp':
+ objname = src[:-4] + self.objext
+ else:
+ raise GenError('ERROR: unknown file extension on ' + src)
+
+ ofile = ObjectFile(objname, self.compile_cmd)
+
+ # object depends upon source
+ self.gen_obj.graph.add(DT_OBJECT, ofile, SourceFile(src, reldir))
+
+ # target (a linked item) depends upon object
+ self.gen_obj.graph.add(DT_LINK, self.name, ofile)
+
+ # collect all the paths where stuff might get built
+ ### we should collect this from the dependency nodes rather than
+ ### the sources. "what dir are you going to put yourself into?"
+ self.gen_obj.target_dirs.append(self.path)
+ for pattern in self.sources.split():
+ dirname = build_path_dirname(pattern)
+ if dirname:
+ self.gen_obj.target_dirs.append(build_path_join(self.path, dirname))
+
+class TargetExe(TargetLinked):
+ def __init__(self, name, options, gen_obj):
+ TargetLinked.__init__(self, name, options, gen_obj)
+
+ if not (self.external_lib or self.external_project):
+ extmap = self.gen_obj._extension_map
+ self.objext = extmap['exe', 'object']
+ self.filename = build_path_join(self.path, name + extmap['exe', 'target'])
+
+ self.manpages = options.get('manpages', '')
+ self.testing = options.get('testing')
+
+ def add_dependencies(self):
+ TargetLinked.add_dependencies(self)
+
+ # collect test programs
+ if self.install == 'test':
+ self.gen_obj.test_deps.append(self.filename)
+ if self.testing != 'skip':
+ self.gen_obj.test_progs.append(self.filename)
+ elif self.install == 'bdb-test':
+ self.gen_obj.bdb_test_deps.append(self.filename)
+ if self.testing != 'skip':
+ self.gen_obj.bdb_test_progs.append(self.filename)
+
+ self.gen_obj.manpages.extend(self.manpages.split())
+
+class TargetScript(Target):
+ def add_dependencies(self):
+ # we don't need to "compile" the sources, so there are no dependencies
+ # to add here, except to get the script installed in the proper area.
+ # note that the script might itself be generated, but that isn't a
+ # concern here.
+ self.gen_obj.graph.add(DT_INSTALL, self.install, self)
+
+class TargetLib(TargetLinked):
+ def __init__(self, name, options, gen_obj):
+ TargetLinked.__init__(self, name, options, gen_obj)
+
+ if not (self.external_lib or self.external_project):
+ extmap = gen_obj._extension_map
+ self.objext = extmap['lib', 'object']
+
+ # the target file is the name, version, and appropriate extension
+ tfile = '%s-%s%s' % (name, gen_obj.version, extmap['lib', 'target'])
+ self.filename = build_path_join(self.path, tfile)
+
+ # Is a library referencing symbols which are undefined at link time.
+ self.undefined_lib_symbols = options.get('undefined-lib-symbols') == 'yes'
+
+ self.link_cmd = options.get('link-cmd', '$(LINK_LIB)')
+
+ self.msvc_static = options.get('msvc-static') == 'yes' # is a static lib
+ self.msvc_fake = options.get('msvc-fake') == 'yes' # has fake target
+ self.msvc_export = options.get('msvc-export', '').split()
+
+class TargetApacheMod(TargetLib):
+
+ def __init__(self, name, options, gen_obj):
+ TargetLib.__init__(self, name, options, gen_obj)
+
+ tfile = name + self.gen_obj._extension_map['lib', 'target']
+ self.filename = build_path_join(self.path, tfile)
+
+ # we have a custom linking rule
+ ### hmm. this is Makefile-specific
+ self.compile_cmd = '$(COMPILE_APACHE_MOD)'
+ self.link_cmd = '$(LINK_APACHE_MOD)'
+
+class TargetRaModule(TargetLib):
+ pass
+
+class TargetFsModule(TargetLib):
+ pass
+
+class TargetDoc(Target):
+ pass
+
+class TargetI18N(Target):
+ "The target is a collection of .po files to be compiled by msgfmt."
+
+ def __init__(self, name, options, gen_obj):
+ Target.__init__(self, name, options, gen_obj)
+ self.install = options.get('install')
+ self.sources = options.get('sources')
+ # Let the Makefile determine this via .SUFFIXES
+ self.compile_cmd = None
+ self.objext = '.mo'
+ self.external_project = options.get('external-project')
+
+ def add_dependencies(self):
+ self.gen_obj.graph.add(DT_INSTALL, self.install, self)
+
+ sources = sorted(_collect_paths(self.sources or '*.po', self.path))
+
+ for src, reldir in sources:
+ if src[-3:] == '.po':
+ objname = src[:-3] + self.objext
+ else:
+ raise GenError('ERROR: unknown file extension on ' + src)
+
+ ofile = ObjectFile(objname, self.compile_cmd)
+
+ # object depends upon source
+ self.gen_obj.graph.add(DT_OBJECT, ofile, SourceFile(src, reldir))
+
+ # target depends upon object
+ self.gen_obj.graph.add(DT_LINK, self.name, ofile)
+
+ # Add us to the list of target dirs, so we're created in mkdir-init.
+ self.gen_obj.target_dirs.append(self.path)
+
+class TargetSWIG(TargetLib):
+ def __init__(self, name, options, gen_obj, lang):
+ TargetLib.__init__(self, name, options, gen_obj)
+ self.lang = lang
+ self.desc = self.desc + ' for ' + lang_full_name[lang]
+ self.include_runtime = options.get('include-runtime') == 'yes'
+
+ ### hmm. this is Makefile-specific
+ self.link_cmd = '$(LINK_%s_WRAPPER)' % lang_abbrev[lang].upper()
+
+ def add_dependencies(self):
+ # Look in source directory for dependencies
+ self.gen_obj.target_dirs.append(self.path)
+
+ sources = _collect_paths(self.sources, self.path)
+ assert len(sources) == 1 ### simple assertions for now
+
+ # get path to SWIG .i file
+ ipath = sources[0][0]
+ iname = build_path_basename(ipath)
+
+ assert iname[-2:] == '.i'
+ cname = iname[:-2] + '.c'
+ oname = iname[:-2] + self.gen_obj._extension_map['pyd', 'object']
+
+ # Extract SWIG module name from .i file name
+ module_name = iname[:4] != 'svn_' and iname[:-2] or iname[4:-2]
+
+ lib_extension = self.gen_obj._extension_map['lib', 'target']
+ if self.lang == "ruby":
+ lib_filename = module_name + lib_extension
+ elif self.lang == "perl":
+ lib_filename = '_' + module_name.capitalize() + lib_extension
+ else:
+ lib_extension = self.gen_obj._extension_map['pyd', 'target']
+ lib_filename = '_' + module_name + lib_extension
+
+ self.name = self.lang + '_' + module_name
+ self.path = build_path_join(self.path, self.lang)
+ if self.lang == "perl":
+ self.path = build_path_join(self.path, "native")
+ self.filename = build_path_join(self.path, lib_filename)
+
+ ifile = SWIGSource(ipath)
+ cfile = SWIGObject(build_path_join(self.path, cname), self.lang)
+ ofile = SWIGObject(build_path_join(self.path, oname), self.lang)
+
+ # the .c file depends upon the .i file
+ self.gen_obj.graph.add(DT_SWIG_C, cfile, ifile)
+
+ # the object depends upon the .c file
+ self.gen_obj.graph.add(DT_OBJECT, ofile, cfile)
+
+ # the library depends upon the object
+ self.gen_obj.graph.add(DT_LINK, self.name, ofile)
+
+ # the specified install area depends upon the library
+ self.gen_obj.graph.add(DT_INSTALL, 'swig-' + lang_abbrev[self.lang], self)
+
+ class Section(TargetLib.Section):
+ def create_targets(self):
+ self.targets = { }
+ for lang in self.gen_obj.swig_lang:
+ target = self.target_class(self.name, self.options, self.gen_obj, lang)
+ target.add_dependencies()
+ self.targets[lang] = target
+
+ def get_targets(self):
+ return list(self.targets.values())
+
+ def get_dep_targets(self, target):
+ target = self.targets.get(target.lang, None)
+ return target and [target] or [ ]
+
+class TargetSWIGLib(TargetLib):
+ def __init__(self, name, options, gen_obj):
+ TargetLib.__init__(self, name, options, gen_obj)
+ self.lang = options.get('lang')
+
+ class Section(TargetLib.Section):
+ def get_dep_targets(self, target):
+ if target.lang == self.target.lang:
+ return [ self.target ]
+ return [ ]
+
+class TargetProject(Target):
+ def __init__(self, name, options, gen_obj):
+ Target.__init__(self, name, options, gen_obj)
+ self.cmd = options.get('cmd')
+ self.release = options.get('release')
+ self.debug = options.get('debug')
+
+ def add_dependencies(self):
+ self.gen_obj.projects.append(self)
+
+class TargetSWIGProject(TargetProject):
+ def __init__(self, name, options, gen_obj):
+ TargetProject.__init__(self, name, options, gen_obj)
+ self.lang = options.get('lang')
+
+class TargetJava(TargetLinked):
+ def __init__(self, name, options, gen_obj):
+ TargetLinked.__init__(self, name, options, gen_obj)
+ self.link_cmd = options.get('link-cmd')
+ self.packages = options.get('package-roots', '').split()
+ self.jar = options.get('jar')
+ self.deps = [ ]
+
+class TargetJavaHeaders(TargetJava):
+ def __init__(self, name, options, gen_obj):
+ TargetJava.__init__(self, name, options, gen_obj)
+ self.objext = '.class'
+ self.javah_objext = '.h'
+ self.headers = options.get('headers')
+ self.classes = options.get('classes')
+ self.package = options.get('package')
+ self.output_dir = self.headers
+
+ def add_dependencies(self):
+ sources = _collect_paths(self.sources, self.path)
+
+ for src, reldir in sources:
+ if src[-5:] != '.java':
+ raise GenError('ERROR: unknown file extension on ' + src)
+
+ class_name = build_path_basename(src[:-5])
+
+ class_header = build_path_join(self.headers, class_name + '.h')
+ class_header_win = build_path_join(self.headers,
+ self.package.replace(".", "_")
+ + "_" + class_name + '.h')
+ class_pkg_list = self.package.split('.')
+ class_pkg = build_path_join(*class_pkg_list)
+ class_file = ObjectFile(build_path_join(self.classes, class_pkg,
+ class_name + self.objext))
+ class_file.source_generated = 1
+ class_file.class_name = class_name
+ hfile = HeaderFile(class_header, self.package + '.' + class_name,
+ self.compile_cmd)
+ hfile.filename_win = class_header_win
+ hfile.source_generated = 1
+ self.gen_obj.graph.add(DT_OBJECT, hfile, class_file)
+ self.deps.append(hfile)
+
+ # target (a linked item) depends upon object
+ self.gen_obj.graph.add(DT_LINK, self.name, hfile)
+
+
+ # collect all the paths where stuff might get built
+ ### we should collect this from the dependency nodes rather than
+ ### the sources. "what dir are you going to put yourself into?"
+ self.gen_obj.target_dirs.append(self.path)
+ self.gen_obj.target_dirs.append(self.classes)
+ self.gen_obj.target_dirs.append(self.headers)
+ for pattern in self.sources.split():
+ dirname = build_path_dirname(pattern)
+ if dirname:
+ self.gen_obj.target_dirs.append(build_path_join(self.path, dirname))
+
+ self.gen_obj.graph.add(DT_INSTALL, self.name, self)
+
+class TargetJavaClasses(TargetJava):
+ def __init__(self, name, options, gen_obj):
+ TargetJava.__init__(self, name, options, gen_obj)
+ self.objext = '.class'
+ self.lang = 'java'
+ self.classes = options.get('classes')
+ self.output_dir = self.classes
+
+ def add_dependencies(self):
+ sources = []
+ for p in self.path.split():
+ sources.extend(_collect_paths(self.sources, p))
+
+ for src, reldir in sources:
+ if src[-5:] == '.java':
+ objname = src[:-5] + self.objext
+
+ # As .class files are likely not generated into the same
+ # directory as the source files, the object path may need
+ # adjustment. To this effect, take "target_ob.classes" into
+ # account.
+ dirs = build_path_split(objname)
+ sourcedirs = dirs[:-1] # Last element is the .class file name.
+ while sourcedirs:
+ if sourcedirs.pop() in self.packages:
+ sourcepath = build_path_join(*sourcedirs)
+ objname = build_path_join(self.classes, *dirs[len(sourcedirs):])
+ break
+ else:
+ raise GenError('Unable to find Java package root in path "%s"' % objname)
+ else:
+ raise GenError('ERROR: unknown file extension on "' + src + '"')
+
+ ofile = ObjectFile(objname, self.compile_cmd)
+ sfile = SourceFile(src, reldir)
+ sfile.sourcepath = sourcepath
+
+ # object depends upon source
+ self.gen_obj.graph.add(DT_OBJECT, ofile, sfile)
+
+ # target (a linked item) depends upon object
+ self.gen_obj.graph.add(DT_LINK, self.name, ofile)
+
+ # Add the class file to the dependency tree for this target
+ self.deps.append(ofile)
+
+ # collect all the paths where stuff might get built
+ ### we should collect this from the dependency nodes rather than
+ ### the sources. "what dir are you going to put yourself into?"
+ self.gen_obj.target_dirs.extend(self.path.split())
+ self.gen_obj.target_dirs.append(self.classes)
+ for pattern in self.sources.split():
+ dirname = build_path_dirname(pattern)
+ if dirname:
+ self.gen_obj.target_dirs.append(build_path_join(self.path, dirname))
+
+ self.gen_obj.graph.add(DT_INSTALL, self.name, self)
+
+class TargetSQLHeader(Target):
+ def __init__(self, name, options, gen_obj):
+ Target.__init__(self, name, options, gen_obj)
+ self.sources = options.get('sources')
+
+ _re_sql_include = re.compile('-- *include: *([-a-z]+)')
+ def add_dependencies(self):
+
+ sources = _collect_paths(self.sources, self.path)
+ assert len(sources) == 1 # support for just one source, for now
+
+ source, reldir = sources[0]
+ assert reldir == '' # no support for reldir right now
+ assert source.endswith('.sql')
+
+ output = source[:-4] + '.h'
+
+ self.gen_obj.graph.add(DT_SQLHDR, output, source)
+
+ for line in fileinput.input(source):
+ match = self._re_sql_include.match(line)
+ if not match:
+ continue
+ file = match.group(1)
+ self.gen_obj.graph.add(DT_SQLHDR, output,
+ os.path.join(os.path.dirname(source), file + '.sql'))
+
+_build_types = {
+ 'exe' : TargetExe,
+ 'script' : TargetScript,
+ 'lib' : TargetLib,
+ 'doc' : TargetDoc,
+ 'swig' : TargetSWIG,
+ 'project' : TargetProject,
+ 'swig_lib' : TargetSWIGLib,
+ 'swig_project' : TargetSWIGProject,
+ 'ra-module': TargetRaModule,
+ 'fs-module': TargetFsModule,
+ 'apache-mod': TargetApacheMod,
+ 'javah' : TargetJavaHeaders,
+ 'java' : TargetJavaClasses,
+ 'i18n' : TargetI18N,
+ 'sql-header' : TargetSQLHeader,
+ }
+
+
+class GenError(Exception):
+ pass
+
+
+# Path Handling Functions
+#
+# Build paths specified in build.conf are assumed to be always separated
+# by forward slashes, regardless of the current running os.
+#
+# Native paths are paths separated by os.sep.
+
+def native_path(path):
+ """Convert a build path to a native path"""
+ return path.replace('/', os.sep)
+
+def build_path(path):
+ """Convert a native path to a build path"""
+ path = path.replace(os.sep, '/')
+ if os.altsep:
+ path = path.replace(os.altsep, '/')
+ return path
+
+def build_path_join(*path_parts):
+ """Join path components into a build path"""
+ return '/'.join(path_parts)
+
+def build_path_split(path):
+ """Return list of components in a build path"""
+ return path.split('/')
+
+def build_path_splitfile(path):
+ """Return the filename and directory portions of a file path"""
+ pos = path.rfind('/')
+ if pos > 0:
+ return path[:pos], path[pos+1:]
+ elif pos == 0:
+ return path[0], path[1:]
+ else:
+ return "", path
+
+def build_path_dirname(path):
+ """Return the directory portion of a file path"""
+ return build_path_splitfile(path)[0]
+
+def build_path_basename(path):
+ """Return the filename portion of a file path"""
+ return build_path_splitfile(path)[1]
+
+def build_path_retreat(path):
+ "Given a relative directory, return ../ paths to retreat to the origin."
+ return ".." + "/.." * path.count('/')
+
+def build_path_strip(path, files):
+ "Strip the given path from each file."
+ l = len(path)
+ result = [ ]
+ for file in files:
+ if len(file) > l and file[:l] == path and file[l] == '/':
+ result.append(file[l+1:])
+ else:
+ result.append(file)
+ return result
+
+def _collect_paths(pats, path=None):
+ """Find files matching a space separated list of globs
+
+ pats (string) is the list of glob patterns
+
+ path (string), if specified, is a path that will be prepended to each
+ glob pattern before it is evaluated
+
+ If path is none the return value is a list of filenames, otherwise
+ the return value is a list of 2-tuples. The first element in each tuple
+ is a matching filename and the second element is the portion of the
+ glob pattern which matched the file before its last forward slash (/)
+ """
+ result = [ ]
+ for base_pat in pats.split():
+ if path:
+ pattern = build_path_join(path, base_pat)
+ else:
+ pattern = base_pat
+ files = sorted(glob.glob(native_path(pattern))) or [pattern]
+
+ if path is None:
+ # just append the names to the result list
+ for file in files:
+ result.append(build_path(file))
+ else:
+ # if we have paths, then we need to record how each source is located
+ # relative to the specified path
+ reldir = build_path_dirname(base_pat)
+ for file in files:
+ result.append((build_path(file), reldir))
+
+ return result
+
+_re_public_include = re.compile(r'^subversion/include/(\w+)\.h$')
+def _is_public_include(fname):
+ return _re_public_include.match(build_path(fname))
+
+def _swig_include_wrapper(fname):
+ return native_path(_re_public_include.sub(
+ r"subversion/bindings/swig/proxy/\1_h.swg", build_path(fname)))
+
+def _path_endswith(path, subpath):
+ """Check if SUBPATH is a true path suffix of PATH.
+ """
+ path_len = len(path)
+ subpath_len = len(subpath)
+
+ return (subpath_len > 0 and path_len >= subpath_len
+ and path[-subpath_len:] == subpath
+ and (path_len == subpath_len
+ or (subpath[0] == os.sep and path[-subpath_len] == os.sep)
+ or path[-subpath_len - 1] == os.sep))
+
+class IncludeDependencyInfo:
+ """Finds all dependencies between a named set of headers, and computes
+ closure, so that individual C and SWIG source files can then be scanned, and
+ the stored dependency data used to return all directly and indirectly
+ referenced headers.
+
+ Note that where SWIG is concerned, there are two different kinds of include:
+ (1) those that include files in SWIG processing, and so matter to the
+ generation of .c files. (These are %include, %import).
+ (2) those that include references to C headers in the generated output,
+ and so are not required at .c generation, only at .o generation.
+ (These are %{ #include ... %}).
+
+ This class works exclusively in native-style paths."""
+
+ def __init__(self, filenames, fnames_nonexist):
+ """Operation of an IncludeDependencyInfo instance is restricted to a
+ 'domain' - a set of header files which are considered interesting when
+ following and reporting dependencies. This is done to avoid creating any
+ dependencies on system header files. The domain is defined by three
+ factors:
+ (1) FILENAMES is a list of headers which are in the domain, and should be
+ scanned to discover how they inter-relate.
+ (2) FNAMES_NONEXIST is a list of headers which are in the domain, but will
+ be created by the build process, and so are not available to be
+ scanned - they will be assumed not to depend on any other interesting
+ headers.
+ (3) Files in subversion/bindings/swig/proxy/, which are based
+ autogenerated based on files in subversion/include/, will be added to
+ the domain when a file in subversion/include/ is processed, and
+ dependencies will be deduced by special-case logic.
+ """
+
+ # This defines the domain (i.e. set of files) in which dependencies are
+ # being located. Its structure is:
+ # { 'basename.h': [ 'path/to/something/named/basename.h',
+ # 'path/to/another/named/basename.h', ] }
+ self._domain = {}
+ for fname in filenames + fnames_nonexist:
+ bname = os.path.basename(fname)
+ self._domain.setdefault(bname, []).append(fname)
+ if _is_public_include(fname):
+ swig_fname = _swig_include_wrapper(fname)
+ swig_bname = os.path.basename(swig_fname)
+ self._domain.setdefault(swig_bname, []).append(swig_fname)
+
+ # This data structure is:
+ # { 'full/path/to/header.h': { 'full/path/to/dependency.h': TYPECODE, } }
+ # TYPECODE is '#', denoting a C include, or '%' denoting a SWIG include.
+ self._deps = {}
+ for fname in filenames:
+ self._deps[fname] = self._scan_for_includes(fname)
+ if _is_public_include(fname):
+ hdrs = { self._domain["proxy.swg"][0]: '%',
+ self._domain["apr.swg"][0]: '%',
+ fname: '%' }
+ for h in self._deps[fname].keys():
+ if (_is_public_include(h)
+ or h == os.path.join('subversion', 'include', 'private',
+ 'svn_debug.h')):
+ hdrs[_swig_include_wrapper(h)] = '%'
+ else:
+ raise RuntimeError("Public include '%s' depends on '%s', " \
+ "which is not a public include! What's going on?" % (fname, h))
+ swig_fname = _swig_include_wrapper(fname)
+ swig_bname = os.path.basename(swig_fname)
+ self._deps[swig_fname] = hdrs
+ for fname in fnames_nonexist:
+ self._deps[fname] = {}
+
+ # Keep recomputing closures until we see no more changes
+ while True:
+ changes = 0
+ for fname in self._deps.keys():
+ changes = self._include_closure(self._deps[fname]) or changes
+ if not changes:
+ break
+
+ def query_swig(self, fname):
+ """Scan the C or SWIG file FNAME, and return the full paths of each
+ include file that is a direct or indirect dependency, as a 2-tuple:
+ (C_INCLUDES, SWIG_INCLUDES)."""
+ if fname in self._deps:
+ hdrs = self._deps[fname]
+ else:
+ hdrs = self._scan_for_includes(fname)
+ self._include_closure(hdrs)
+ c_filenames = []
+ swig_filenames = []
+ for hdr, hdr_type in hdrs.items():
+ if hdr_type == '#':
+ c_filenames.append(hdr)
+ else: # hdr_type == '%'
+ swig_filenames.append(hdr)
+ # Be independent of hash ordering
+ c_filenames.sort()
+ swig_filenames.sort()
+ return (c_filenames, swig_filenames)
+
+ def query(self, fname):
+ """Same as SELF.QUERY_SWIG(FNAME), but assert that there are no SWIG
+ includes, and return only C includes as a single list."""
+ c_includes, swig_includes = self.query_swig(fname)
+ assert len(swig_includes) == 0
+ return c_includes
+
+ def _include_closure(self, hdrs):
+ """Mutate the passed dictionary HDRS, by performing a single pass
+ through the listed headers, adding the headers on which the first group
+ of headers depend, if not already present.
+
+ HDRS is of the form { 'path/to/header.h': TYPECODE, }
+
+ Return a boolean indicating whether any changes were made."""
+ items = list(hdrs.items())
+ for this_hdr, this_type in items:
+ for dependency_hdr, dependency_type in self._deps[this_hdr].items():
+ self._upd_dep_hash(hdrs, dependency_hdr, dependency_type)
+ return (len(items) != len(hdrs))
+
+ def _upd_dep_hash(self, hash, hdr, type):
+ """Mutate HASH (a data structure of the form
+ { 'path/to/header.h': TYPECODE, } ) to include additional info of a
+ dependency of type TYPE on the file HDR."""
+ # '%' (SWIG, .c: .i) has precedence over '#' (C, .o: .c)
+ if hash.get(hdr) != '%':
+ hash[hdr] = type
+
+ _re_include = \
+ re.compile(r'^\s*([#%])\s*(?:include|import)\s*([<"])?([^<">;\s]+)')
+ def _scan_for_includes(self, fname):
+ """Scan C source file FNAME and return the basenames of any headers
+ which are directly included, and within the set defined when this
+ IncludeDependencyProcessor was initialized.
+
+ Return a dictionary with included full file names as keys and None as
+ values."""
+ hdrs = { }
+ for line in fileinput.input(fname):
+ match = self._re_include.match(line)
+ if not match:
+ continue
+ include_param = native_path(match.group(3))
+ type_code = match.group(1)
+ direct_possibility_fname = os.path.normpath(os.path.join(
+ os.path.dirname(fname), include_param))
+ domain_fnames = self._domain.get(os.path.basename(include_param), [])
+ if direct_possibility_fname in domain_fnames:
+ self._upd_dep_hash(hdrs, direct_possibility_fname, type_code)
+ elif (len(domain_fnames) == 1
+ and (include_param.find(os.sep) == -1
+ or _path_endswith(domain_fnames[0], include_param))):
+ self._upd_dep_hash(hdrs, domain_fnames[0], type_code)
+ else:
+ # None found
+ if include_param.find(os.sep) == -1 and len(domain_fnames) > 1:
+ _error(
+ "Unable to determine which file is being included\n"
+ " Include Parameter: '%s'\n"
+ " Including File: '%s'\n"
+ " Direct possibility: '%s'\n"
+ " Other possibilities: %s\n"
+ % (include_param, fname, direct_possibility_fname,
+ domain_fnames))
+ if match.group(2) == '"':
+ _warning('"%s" header not found, file %s' % (include_param, fname))
+ continue
+ if match.group(2) == '<':
+ _warning('<%s> header *found*, file %s' % (include_param, fname))
+ # The above warnings help to avoid the following problems:
+ # - If header is uses the correct <> or "" convention, then the warnings
+ # reveal if the build generator does/does not make dependencies for it
+ # when it should not/should - e.g. might reveal changes needed to
+ # build.conf.
+ # ...and...
+ # - If the generator is correct, them the warnings reveal incorrect use
+ # of <>/"" convention.
+ return hdrs
+
+
+def _sorted_files(graph, area):
+ "Given a list of targets, sort them based on their dependencies."
+
+ # we're going to just go with a naive algorithm here. these lists are
+ # going to be so short, that we can use O(n^2) or whatever this is.
+
+ inst_targets = graph.get_sources(DT_INSTALL, area)
+
+ # first we need our own copy of the target list since we're going to
+ # munge it.
+ targets = inst_targets[:]
+
+ # the output list of the targets' files
+ files = [ ]
+
+ # loop while we have targets remaining:
+ while targets:
+ # find a target that has no dependencies in our current targets list.
+ for t in targets:
+ s = graph.get_sources(DT_LINK, t.name, Target) \
+ + graph.get_sources(DT_NONLIB, t.name, Target)
+ for d in s:
+ if d in targets:
+ break
+ else:
+ # no dependencies found in the targets list. this is a good "base"
+ # to add to the files list now.
+ if isinstance(t, TargetJava):
+ # Java targets have no filename, and we just ignore them.
+ pass
+ elif isinstance(t, TargetI18N):
+ # I18N targets have no filename, we recurse one level deeper, and
+ # get the filenames of their dependencies.
+ s = graph.get_sources(DT_LINK, t.name)
+ for d in s:
+ if d not in targets:
+ files.append(d.filename)
+ else:
+ files.append(t.filename)
+
+ # don't consider this target any more
+ targets.remove(t)
+
+ # break out of search through targets
+ break
+ else:
+ # we went through the entire target list and everything had at least
+ # one dependency on another target. thus, we have a circular dependency
+ # tree. somebody messed up the .conf file, or the app truly does have
+ # a loop (and if so, they're screwed; libtool can't relink a lib at
+ # install time if the dependent libs haven't been installed yet)
+ raise CircularDependencies()
+
+ return files
+
+class CircularDependencies(Exception):
+ pass
+
+def unique(seq):
+ "Eliminate duplicates from a sequence"
+ list = [ ]
+ dupes = { }
+ for e in seq:
+ if e not in dupes:
+ dupes[e] = None
+ list.append(e)
+ return list
+
+### End of file.
diff --git a/build/generator/gen_make.py b/build/generator/gen_make.py
new file mode 100644
index 0000000..697f267
--- /dev/null
+++ b/build/generator/gen_make.py
@@ -0,0 +1,585 @@
+#
+#
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements. See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership. The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied. See the License for the
+# specific language governing permissions and limitations
+# under the License.
+#
+#
+#
+# gen_make.py -- generate makefiles and dependencies
+#
+
+import os
+import stat
+import sys
+try:
+ # Python >=3.0
+ import configparser
+except ImportError:
+ # Python <3.0
+ import ConfigParser as configparser
+
+if sys.version_info[0] >= 3:
+ # Python >=3.0
+ from io import StringIO
+else:
+ # Python <3.0
+ try:
+ from cStringIO import StringIO
+ except ImportError:
+ from StringIO import StringIO
+
+import ezt
+
+import gen_base
+import generator.swig.header_wrappers
+import generator.swig.checkout_swig_header
+import generator.swig.external_runtime
+
+from gen_base import build_path_join, build_path_strip, build_path_splitfile, \
+ build_path_basename, build_path_dirname, build_path_retreat, unique
+
+
+class Generator(gen_base.GeneratorBase):
+
+ _extension_map = {
+ ('exe', 'target'): '$(EXEEXT)',
+ ('exe', 'object'): '.lo',
+ ('lib', 'target'): '.la',
+ ('lib', 'object'): '.lo',
+ ('pyd', 'target'): '.la',
+ ('pyd', 'object'): '.lo',
+ }
+
+ def __init__(self, fname, verfname, options=None):
+ gen_base.GeneratorBase.__init__(self, fname, verfname, options)
+ self.assume_shared_libs = False
+ if ('--assume-shared-libs', '') in options:
+ self.assume_shared_libs = True
+
+ def write(self):
+ install_deps = self.graph.get_deps(gen_base.DT_INSTALL)
+ install_sources = self.graph.get_all_sources(gen_base.DT_INSTALL)
+
+ cp = configparser.ConfigParser()
+ cp.read('gen-make.opts')
+ if cp.has_option('options', '--installed-libs'):
+ self.installed_libs = cp.get('options', '--installed-libs').split(',')
+ else:
+ self.installed_libs = []
+
+ # ensure consistency between runs
+ install_deps.sort()
+ install_sources.sort(key = lambda s: s.name)
+
+ class _eztdata(object):
+ def __init__(self, **kw):
+ vars(self).update(kw)
+
+ data = _eztdata(
+ modules=[ ],
+ swig_langs=[ ],
+ swig_c=[ ],
+ target=[ ],
+ itargets=[ ],
+ areas=[ ],
+ isources=[ ],
+ deps=[ ],
+ sql=[],
+ )
+
+ ########################################
+
+ for target in install_sources:
+ if isinstance(target, gen_base.TargetRaModule) or \
+ isinstance(target, gen_base.TargetFsModule):
+ # name of the module: strip 'libsvn_' and upper-case it
+ name = target.name[7:].upper()
+
+ # construct a list of the other .la libs to link against
+ retreat = build_path_retreat(target.path)
+ if target.name in self.installed_libs:
+ deps = []
+ link = [ '-l%s-%s' % (target.name[3:], self.version) ]
+ else:
+ deps = [ target.filename ]
+ link = [ build_path_join(retreat, target.filename) ]
+ for source in self.graph.get_sources(gen_base.DT_LINK, target.name):
+ if not isinstance(source, gen_base.TargetLib) or source.external_lib:
+ continue
+ elif source.name in self.installed_libs:
+ continue
+ deps.append(source.filename)
+ link.append(build_path_join(retreat, source.filename))
+
+ data.modules.append(_eztdata(name=name, deps=deps, link=link))
+
+ # write a list of directories in which things are built
+ # get all the test scripts' directories
+ script_dirs = list(map(build_path_dirname, self.scripts + self.bdb_scripts))
+
+ # remove duplicate directories between targets and tests
+ build_dirs = unique(self.target_dirs + script_dirs + self.swig_dirs)
+ data.build_dirs = build_dirs
+
+ # write lists of test files
+ # deps = all, progs = not including those marked "testing = skip"
+ data.bdb_test_deps = self.bdb_test_deps + self.bdb_scripts
+ data.bdb_test_progs = self.bdb_test_progs + self.bdb_scripts
+ data.test_deps = self.test_deps + self.scripts
+ data.test_progs = self.test_progs + self.scripts
+
+ # write list of all manpages
+ data.manpages = self.manpages
+
+ # write a list of files to remove during "make clean"
+ cfiles = [ ]
+ for target in install_sources:
+ # .la files are handled by the standard 'clean' rule; clean all the
+ # other targets
+ if not isinstance(target, gen_base.TargetScript) \
+ and not isinstance(target, gen_base.TargetProject) \
+ and not isinstance(target, gen_base.TargetI18N) \
+ and not isinstance(target, gen_base.TargetJava) \
+ and not target.external_lib \
+ and target.filename[-3:] != '.la':
+ cfiles.append(target.filename)
+ for script in self.scripts:
+ if script.endswith('.py'):
+ cfiles.append(script + 'c')
+ data.cfiles = sorted(cfiles)
+
+ # here are all the SQL files and their generated headers. the Makefile
+ # has an implicit rule for generating these, so there isn't much to do
+ # except to clean them out. we only do that for 'make extraclean' since
+ # these are included as part of the tarball. the files are transformed
+ # by gen-make, and developers also get a Make rule to keep them updated.
+ for hdrfile, sqlfile in sorted(self.graph.get_deps(gen_base.DT_SQLHDR),
+ key=lambda t: t[0]):
+ data.sql.append(_eztdata(header=hdrfile, source=sqlfile[0]))
+
+ data.release_mode = ezt.boolean(self.release_mode)
+
+ ########################################
+
+ if not self.release_mode:
+ swig_rules = StringIO()
+ for swig in (generator.swig.header_wrappers,
+ generator.swig.checkout_swig_header,
+ generator.swig.external_runtime):
+ gen = swig.Generator(self.conf, "swig")
+ gen.write_makefile_rules(swig_rules)
+
+ data.swig_rules = swig_rules.getvalue()
+
+ ########################################
+
+ # write dependencies and build rules for generated .c files
+ swig_c_deps = sorted(self.graph.get_deps(gen_base.DT_SWIG_C),
+ key=lambda t: t[0].filename)
+
+ swig_lang_deps = {}
+ for lang in self.swig.langs:
+ swig_lang_deps[lang] = []
+
+ for objname, sources in swig_c_deps:
+ swig_lang_deps[objname.lang].append(str(objname))
+
+ for lang in self.swig.langs:
+ data.swig_langs.append(_eztdata(short=self.swig.short[lang],
+ deps=swig_lang_deps[lang]))
+
+ ########################################
+
+ if not self.release_mode:
+ for objname, sources in swig_c_deps:
+ data.swig_c.append(_eztdata(c_file=str(objname),
+ deps=list(map(str, sources)),
+ opts=self.swig.opts[objname.lang],
+ source=str(sources[0])))
+
+ ########################################
+
+ for target_ob in install_sources:
+
+ if isinstance(target_ob, gen_base.TargetScript):
+ # there is nothing to build
+ continue
+
+ target = target_ob.name
+ if isinstance(target_ob, gen_base.TargetJava):
+ path = target_ob.output_dir
+ else:
+ path = target_ob.path
+
+ retreat = build_path_retreat(path)
+
+ # get the source items (.o and .la) for the link unit
+ objects = [ ]
+ object_srcs = [ ]
+ headers = [ ]
+ header_classes = [ ]
+ header_class_filenames = [ ]
+ deps = [ ]
+ libs = [ ]
+
+ for link_dep in self.graph.get_sources(gen_base.DT_LINK, target_ob.name):
+ if isinstance(link_dep, gen_base.TargetJava):
+ deps.append(link_dep.name)
+ elif isinstance(link_dep, gen_base.TargetLinked):
+ if link_dep.external_lib:
+ libs.append(link_dep.external_lib)
+ elif link_dep.external_project:
+ # FIXME: This is a temporary workaround to fix build breakage
+ # expeditiously. It is of questionable validity for a build
+ # node to have external_project but not have external_lib.
+ pass
+ elif link_dep.name in self.installed_libs:
+ libs.append('-l%s-%s' % (link_dep.name[3:], self.version))
+ else:
+ # append the output of the target to our stated dependencies
+ if not self.assume_shared_libs:
+ deps.append(link_dep.filename)
+
+ # link against the library
+ libs.append(build_path_join(retreat, link_dep.filename))
+ elif isinstance(link_dep, gen_base.ObjectFile):
+ # link in the object file
+ objects.append(link_dep.filename)
+ for dep in self.graph.get_sources(gen_base.DT_OBJECT, link_dep, gen_base.SourceFile):
+ object_srcs.append(
+ build_path_join('$(abs_srcdir)', dep.filename))
+ elif isinstance(link_dep, gen_base.HeaderFile):
+ # link in the header file
+ # N.B. that filename_win contains the '_'-escaped class name
+ headers.append(link_dep.filename_win)
+ header_classes.append(link_dep.classname)
+ for dep in self.graph.get_sources(gen_base.DT_OBJECT, link_dep, gen_base.ObjectFile):
+ header_class_filenames.append(dep.filename)
+ else:
+ ### we don't know what this is, so we don't know what to do with it
+ raise UnknownDependency
+
+ for nonlib in self.graph.get_sources(gen_base.DT_NONLIB, target_ob.name):
+ if isinstance(nonlib, gen_base.TargetLinked):
+ if not nonlib.external_lib:
+ deps.append(nonlib.filename)
+
+ targ_varname = target.replace('-', '_')
+ objnames = build_path_strip(path, objects)
+
+ ezt_target = _eztdata(name=target_ob.name,
+ varname=targ_varname,
+ path=path,
+ install=None,
+ add_deps=target_ob.add_deps,
+ objects=objects,
+ deps=deps,
+ )
+ data.target.append(ezt_target)
+
+ if hasattr(target_ob, 'link_cmd'):
+ ezt_target.link_cmd = target_ob.link_cmd
+ if hasattr(target_ob, 'output_dir'):
+ ezt_target.output_dir = target_ob.output_dir
+
+ # Add additional install dependencies if necessary
+ if target_ob.add_install_deps:
+ ezt_target.install = target_ob.install
+ ezt_target.install_deps = target_ob.add_install_deps
+
+ if isinstance(target_ob, gen_base.TargetJava):
+ ezt_target.type = 'java'
+ ezt_target.headers = headers
+ ezt_target.sources = None
+ ezt_target.jar = None
+ ezt_target.classes = target_ob.classes
+
+ # Build the headers from the header_classes with one 'javah' call
+ if headers:
+ ezt_target.header_class_filenames = header_class_filenames
+ ezt_target.header_classes = header_classes
+
+ # Build the objects from the object_srcs with one 'javac' call
+ if object_srcs:
+ ezt_target.sources = object_srcs
+
+ # Once the bytecodes have been compiled up, we produce the
+ # JAR.
+ if target_ob.jar:
+ ezt_target.jar_path = build_path_join(target_ob.classes,
+ target_ob.jar)
+ ezt_target.packages = target_ob.packages
+
+ elif isinstance(target_ob, gen_base.TargetI18N):
+ ezt_target.type = 'i18n'
+ else:
+ ezt_target.type = 'n/a'
+ ezt_target.filename = target_ob.filename
+ ezt_target.path = path
+ if (isinstance(target_ob, gen_base.TargetLib)
+ and not target_ob.undefined_lib_symbols):
+ ezt_target.undefined_flag = '$(LT_NO_UNDEFINED)'
+ else:
+ ezt_target.undefined_flag = ''
+ ezt_target.libs = gen_base.unique(libs)
+ ezt_target.objnames = objnames
+ ezt_target.basename = build_path_basename(target_ob.filename)
+
+ ########################################
+
+ for itype, i_targets in install_deps:
+
+ # perl bindings do their own thing, "swig-pl" target is
+ # already specified in Makefile.in
+ if itype == "swig-pl":
+ continue
+
+ outputs = [ ]
+
+ for t in i_targets:
+ if hasattr(t, 'filename'):
+ outputs.append(t.filename)
+
+ data.itargets.append(_eztdata(type=itype, outputs=outputs))
+
+ ########################################
+
+ # for each install group, write a rule to install its outputs
+ for area, inst_targets in install_deps:
+
+ # perl bindings do their own thing, "install-swig-pl" target is
+ # already specified in Makefile.in
+ if area == "swig-pl":
+ continue
+
+ # get the output files for these targets, sorted in dependency order
+ files = gen_base._sorted_files(self.graph, area)
+
+ ezt_area = _eztdata(type=area, files=[ ], apache_files=[ ],
+ extra_install=None)
+
+ def apache_file_to_eztdata(file):
+ # cd to dirname before install to work around libtool 1.4.2 bug.
+ dirname, fname = build_path_splitfile(file)
+ base, ext = os.path.splitext(fname)
+ name = base.replace('mod_', '')
+ return _eztdata(fullname=file, dirname=dirname,
+ name=name, filename=fname)
+ if area == 'apache-mod':
+ data.areas.append(ezt_area)
+
+ for file in files:
+ ezt_area.files.append(apache_file_to_eztdata(file))
+
+ elif area != 'test' and area != 'bdb-test':
+ data.areas.append(ezt_area)
+
+ area_var = area.replace('-', '_')
+ upper_var = area_var.upper()
+ ezt_area.varname = area_var
+ ezt_area.uppervar = upper_var
+
+ # ### TODO: This is a hack. See discussion here:
+ # ### http://mid.gmane.org/20120316191639.GA28451@daniel3.local
+ apache_files = [t.filename for t in inst_targets
+ if isinstance(t, gen_base.TargetApacheMod)]
+
+ files = [f for f in files if f not in apache_files]
+ for file in apache_files:
+ ezt_area.apache_files.append(apache_file_to_eztdata(file))
+ for file in files:
+ # cd to dirname before install to work around libtool 1.4.2 bug.
+ dirname, fname = build_path_splitfile(file)
+ ezt_file = _eztdata(dirname=dirname, fullname=file,
+ filename=fname)
+ if area == 'locale':
+ lang, objext = os.path.splitext(fname)
+ installdir = '$(DESTDIR)$(%sdir)/%s/LC_MESSAGES' % (area_var, lang)
+ ezt_file.installdir = installdir
+ ezt_file.objext = objext
+ else:
+ ezt_file.install_fname = build_path_join('$(%sdir)' % area_var,
+ fname)
+
+ ezt_area.files.append(ezt_file)
+
+ # certain areas require hooks for extra install rules defined
+ # in Makefile.in
+ ### we should turn AREA into an object, then test it instead of this
+ if area[:5] == 'swig-' and area[-4:] != '-lib' or \
+ area[:7] == 'javahl-':
+ ezt_area.extra_install = 'yes'
+
+ ########################################
+
+ includedir = build_path_join('$(includedir)',
+ 'subversion-%s' % self.version)
+ data.includes = [_eztdata(file=file,
+ src=build_path_join('$(abs_srcdir)', file),
+ dst=build_path_join(includedir,
+ build_path_basename(file)))
+ for file in self.includes]
+ data.includedir = includedir
+
+ ########################################
+
+ for target in install_sources:
+ if not isinstance(target, gen_base.TargetScript) and \
+ not isinstance(target, gen_base.TargetJava) and \
+ not isinstance(target, gen_base.TargetI18N):
+ data.isources.append(_eztdata(name=target.name,
+ filename=target.filename))
+
+ ########################################
+
+ # write dependencies and build rules (when not using suffix rules)
+ # for all other generated files which will not be installed
+ # (or will be installed, but not by the main generated build)
+ obj_deps = sorted(self.graph.get_deps(gen_base.DT_OBJECT),
+ key=lambda t: t[0].filename)
+
+ for objname, sources in obj_deps:
+ dep = _eztdata(name=str(objname),
+ deps=list(map(str, sources)),
+ cmd=objname.compile_cmd,
+ source=str(sources[0]))
+ data.deps.append(dep)
+ dep.generated = ezt.boolean(getattr(objname, 'source_generated', 0))
+
+ template = ezt.Template(os.path.join('build', 'generator', 'templates',
+ 'makefile.ezt'),
+ compress_whitespace=False)
+ template.generate(open('build-outputs.mk', 'w'), data)
+
+ self.write_standalone()
+
+ self.write_transform_libtool_scripts(install_sources)
+
+ def write_standalone(self):
+ """Write autogen-standalone.mk"""
+
+ standalone = open("autogen-standalone.mk", "w")
+ standalone.write('# DO NOT EDIT -- AUTOMATICALLY GENERATED\n')
+ standalone.write('abs_srcdir = %s\n' % os.getcwd())
+ standalone.write('abs_builddir = %s\n' % os.getcwd())
+ standalone.write('top_srcdir = .\n')
+ standalone.write('top_builddir = .\n')
+ standalone.write('SWIG = swig\n')
+ standalone.write('PYTHON = python\n')
+ standalone.write('\n')
+ standalone.write(open("build-outputs.mk","r").read())
+ standalone.close()
+
+ def write_transform_libtool_scripts(self, install_sources):
+ """Write build/transform_libtool_scripts.sh"""
+ script = 'build/transform_libtool_scripts.sh'
+ fd = open(script, 'w')
+ fd.write('''#!/bin/sh
+# DO NOT EDIT -- AUTOMATICALLY GENERATED
+
+transform()
+{
+ SCRIPT="$1"
+ LIBS="$2"
+ if [ -f $SCRIPT ]; then
+ if grep LD_PRELOAD "$SCRIPT" > /dev/null; then
+ :
+ elif grep LD_LIBRARY_PATH "$SCRIPT" > /dev/null; then
+ echo "Transforming $SCRIPT"
+ EXISTINGLIBS=""
+ for LIB in $LIBS; do
+ # exclude libsvn_test since the undefined test_funcs breaks libtool
+ case $LIB in
+ *libsvn_test-*) continue ;;
+ esac
+ if [ ! -f $LIB ]; then
+ continue
+ fi
+ if [ -z "$EXISTINGLIBS" ]; then
+ EXISTINGLIBS="$LIB"
+ else
+ EXISTINGLIBS="$EXISTINGLIBS $LIB"
+ fi
+ done
+ if [ ! -z "$EXISTINGLIBS" ]; then
+ cat "$SCRIPT" |
+ (
+ read LINE
+ echo "$LINE"
+ read LINE
+ echo "$LINE"
+ read LINE
+ echo "$LINE"
+ read LINE
+ echo "$LINE"
+ echo "LD_PRELOAD=\\"$EXISTINGLIBS\\""
+ echo "export LD_PRELOAD"
+ cat
+ ) < "$SCRIPT" > "$SCRIPT.new"
+ mv -f "$SCRIPT.new" "$SCRIPT"
+ chmod +x "$SCRIPT"
+ fi
+ fi
+ fi
+}
+
+DIR=`pwd`
+
+''')
+ libdep_cache = {}
+ paths = {}
+ for lib in ('libsvn_auth_gnome_keyring', 'libsvn_auth_kwallet'):
+ paths[lib] = self.sections[lib].options.get('path')
+ for target_ob in install_sources:
+ if not isinstance(target_ob, gen_base.TargetExe):
+ continue
+ name = target_ob.name
+ libs = self._get_all_lib_deps(target_ob.name, libdep_cache, paths)
+ path = paths[name]
+ for i in range(0, len(libs)):
+ lib = libs[i]
+ libpath = paths[libs[i]]
+ libs[i] = '$DIR/%s/.libs/%s-%s.so' % (libpath, lib, self.version)
+ fd.write('transform %s/%s "%s"\n' % (path, name, " ".join(libs)))
+ fd.close()
+ mode = stat.S_IRWXU|stat.S_IRGRP|stat.S_IXGRP|stat.S_IROTH|stat.S_IXOTH
+ os.chmod(script, mode)
+
+ def _get_all_lib_deps(self, target_name, libdep_cache, paths):
+ if not target_name in libdep_cache:
+ libs = set()
+ path = None
+ if target_name in self.sections:
+ section = self.sections[target_name]
+ opt_libs = self.sections[target_name].options.get('libs')
+ paths[target_name] = section.options.get('path')
+ if opt_libs:
+ for lib_name in opt_libs.split():
+ if lib_name.startswith('libsvn_'):
+ libs.add(lib_name)
+ for lib in self._get_all_lib_deps(lib_name, libdep_cache, paths):
+ libs.add(lib)
+ if target_name == 'libsvn_subr':
+ libs.update(('libsvn_auth_gnome_keyring', 'libsvn_auth_kwallet'))
+ libdep_cache[target_name] = sorted(libs)
+ return libdep_cache[target_name]
+
+class UnknownDependency(Exception):
+ "We don't know how to deal with the dependent to link it in."
+ pass
+
+### End of file.
diff --git a/build/generator/gen_msvc_dsp.py b/build/generator/gen_msvc_dsp.py
new file mode 100644
index 0000000..85bbe65
--- /dev/null
+++ b/build/generator/gen_msvc_dsp.py
@@ -0,0 +1,173 @@
+#
+#
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements. See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership. The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied. See the License for the
+# specific language governing permissions and limitations
+# under the License.
+#
+#
+#
+# gen_dsp.py -- generate Microsoft Visual C++ 6 projects
+#
+
+import os
+import sys
+
+import gen_base
+import gen_win
+import ezt
+
+
+class Generator(gen_win.WinGeneratorBase):
+ "Generate a Microsoft Visual C++ 6 project"
+
+ def __init__(self, fname, verfname, options):
+ gen_win.WinGeneratorBase.__init__(self, fname, verfname, options,
+ 'msvc-dsp')
+
+ def quote(self, str):
+ return '"%s"' % str
+
+ def write_project(self, target, fname):
+ "Write a Project (.dsp)"
+
+ if isinstance(target, gen_base.TargetExe):
+ targtype = "Win32 (x86) Console Application"
+ targval = "0x0103"
+ elif isinstance(target, gen_base.TargetJava):
+ targtype = "Win32 (x86) Generic Project"
+ targval = "0x010a"
+ elif isinstance(target, gen_base.TargetLib):
+ if target.msvc_static:
+ targtype = "Win32 (x86) Static Library"
+ targval = "0x0104"
+ else:
+ targtype = "Win32 (x86) Dynamic-Link Library"
+ targval = "0x0102"
+ elif isinstance(target, gen_base.TargetProject):
+ if target.cmd:
+ targtype = "Win32 (x86) External Target"
+ targval = "0x0106"
+ else:
+ targtype = "Win32 (x86) Generic Project"
+ targval = "0x010a"
+ else:
+ raise gen_base.GenError("Cannot create project for %s" % target.name)
+
+ target.output_name = self.get_output_name(target)
+ target.output_dir = self.get_output_dir(target)
+ target.intermediate_dir = self.get_intermediate_dir(target)
+ target.output_pdb = self.get_output_pdb(target)
+
+ configs = self.get_configs(target)
+
+ sources = self.get_proj_sources(True, target)
+
+ data = {
+ 'target' : target,
+ 'target_type' : targtype,
+ 'target_number' : targval,
+ 'rootpath' : self.rootpath,
+ 'platforms' : self.platforms,
+ 'configs' : configs,
+ 'includes' : self.get_win_includes(target),
+ 'sources' : sources,
+ 'default_platform' : self.platforms[0],
+ 'default_config' : configs[0].name,
+ 'is_exe' : ezt.boolean(isinstance(target, gen_base.TargetExe)),
+ 'is_external' : ezt.boolean((isinstance(target, gen_base.TargetProject)
+ or isinstance(target, gen_base.TargetI18N))
+ and target.cmd),
+ 'is_utility' : ezt.boolean(isinstance(target,
+ gen_base.TargetProject)),
+ 'is_dll' : ezt.boolean(isinstance(target, gen_base.TargetLib)
+ and not target.msvc_static),
+ 'instrument_apr_pools' : self.instrument_apr_pools,
+ 'instrument_purify_quantify' : self.instrument_purify_quantify,
+ }
+
+ self.write_with_template(fname, 'templates/msvc_dsp.ezt', data)
+
+ def write(self):
+ "Write a Workspace (.dsw)"
+
+ # Gather sql targets for inclusion in svn_config project.
+ class _eztdata(object):
+ def __init__(self, **kw):
+ vars(self).update(kw)
+
+ import sys
+ sql=[]
+ for hdrfile, sqlfile in sorted(self.graph.get_deps(gen_base.DT_SQLHDR),
+ key=lambda t: t[0]):
+ sql.append(_eztdata(header=hdrfile.replace('/', '\\'),
+ source=sqlfile[0].replace('/', '\\'),
+ svn_python=sys.executable))
+
+ self.move_proj_file(self.projfilesdir,
+ 'svn_config.dsp',
+ (
+ ('sql', sql),
+ ('project_guid', self.makeguid('__CONFIG__')),
+ )
+ )
+ self.move_proj_file(self.projfilesdir,
+ 'svn_locale.dsp',
+ (
+ ('project_guid', self.makeguid('svn_locale')),
+ ))
+ self.write_zlib_project_file('zlib.dsp')
+ self.write_neon_project_file('neon.dsp')
+ self.write_serf_project_file('serf.dsp')
+ install_targets = self.get_install_targets()
+
+ targets = [ ]
+
+ self.gen_proj_names(install_targets)
+
+ # Traverse the targets and generate the project files
+ for target in install_targets:
+ name = target.name
+ fname = self.get_external_project(target, 'dsp')
+ if fname is None:
+ fname = os.path.join(self.projfilesdir,
+ "%s_msvc.dsp" % target.proj_name)
+ self.write_project(target, fname)
+
+ if '-' in fname:
+ fname = '"%s"' % fname
+
+ depends = [ ]
+ if not isinstance(target, gen_base.TargetI18N):
+ depends = self.adjust_win_depends(target, name)
+ #print name
+ #for dep in depends:
+ # print " ",dep.name
+
+ dep_names = [ ]
+ for dep in depends:
+ dep_names.append(dep.proj_name)
+
+ targets.append(
+ gen_win.ProjectItem(name=target.proj_name,
+ dsp=fname.replace(os.sep, '\\'),
+ depends=dep_names))
+
+ targets.sort(key = lambda x: x.name)
+ data = {
+ 'targets' : targets,
+ }
+
+ self.write_with_template('subversion_msvc.dsw', 'templates/msvc_dsw.ezt', data)
diff --git a/build/generator/gen_vcnet_vcproj.py b/build/generator/gen_vcnet_vcproj.py
new file mode 100644
index 0000000..0e9ce15
--- /dev/null
+++ b/build/generator/gen_vcnet_vcproj.py
@@ -0,0 +1,280 @@
+#
+#
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements. See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership. The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied. See the License for the
+# specific language governing permissions and limitations
+# under the License.
+#
+#
+#
+# gen_vcnet.py -- generate Microsoft Visual C++.NET projects
+#
+
+import os
+import gen_base
+import gen_win
+import ezt
+
+
+class Generator(gen_win.WinGeneratorBase):
+ "Generate a Visual C++.NET project"
+
+ def __init__(self, fname, verfname, options):
+ gen_win.WinGeneratorBase.__init__(self, fname, verfname, options,
+ 'vcnet-vcproj')
+
+ def quote(self, str):
+ return '"%s"' % str
+
+ def get_external_project(self, target, proj_ext):
+ "Link project files: prefer vcproj's, but if don't exist, try dsp's."
+ vcproj = gen_win.WinGeneratorBase.get_external_project(self, target,
+ proj_ext)
+ if vcproj and not os.path.exists(vcproj):
+ dspproj = gen_win.WinGeneratorBase.get_external_project(self, target,
+ 'dsp')
+ if os.path.exists(dspproj):
+ return dspproj
+
+ return vcproj
+
+ def write_project(self, target, fname, depends):
+ "Write a Project (.vcproj/.vcxproj)"
+
+ if isinstance(target, gen_base.TargetProject):
+ config_type='Utility'
+ target_type=10
+ elif isinstance(target, gen_base.TargetExe):
+ config_type='Application'
+ target_type=1
+ elif isinstance(target, gen_base.TargetJava):
+ config_type='Utility'
+ target_type=10
+ elif isinstance(target, gen_base.TargetLib):
+ if target.msvc_static:
+ config_type='StaticLibrary'
+ target_type=4
+ else:
+ config_type='DynamicLibrary'
+ target_type=2
+ elif isinstance(target, gen_base.TargetI18N):
+ config_type='Makefile'
+ target_type=4
+ else:
+ raise gen_base.GenError("Cannot create project for %s" % target.name)
+
+ target.output_name = self.get_output_name(target)
+ target.output_pdb = self.get_output_pdb(target)
+ target.output_dir = self.get_output_dir(target)
+ target.intermediate_dir = self.get_intermediate_dir(target)
+ basename = os.path.basename(target.output_name)
+ target.output_ext = basename[basename.rfind('.'):]
+ target.output_name_without_ext = basename[:basename.rfind('.')]
+
+ configs = self.get_configs(target)
+
+ sources = self.get_proj_sources(False, target)
+
+ if self.vcproj_extension == '.vcxproj':
+ for src in sources:
+ if src.custom_build is not None:
+ src.custom_build = src.custom_build.replace('$(InputPath)', '%(FullPath)')
+
+ data = {
+ 'target' : target,
+ 'target_type' : target_type,
+ 'project_guid' : target.project_guid,
+ 'rootpath' : self.rootpath,
+ 'platforms' : self.platforms,
+ 'config_type' : config_type,
+ 'configs' : configs,
+ 'includes' : self.get_win_includes(target),
+ 'sources' : sources,
+ 'default_platform' : self.platforms[0],
+ 'default_config' : configs[0].name,
+ 'def_file' : self.get_def_file(target),
+ 'depends' : depends,
+ 'is_exe' : ezt.boolean(isinstance(target, gen_base.TargetExe)),
+ 'is_external' : ezt.boolean((isinstance(target, gen_base.TargetProject)
+ or isinstance(target, gen_base.TargetI18N))
+ and target.cmd),
+ 'is_utility' : ezt.boolean(isinstance(target,
+ gen_base.TargetProject)),
+ 'instrument_apr_pools' : self.instrument_apr_pools,
+ 'instrument_purify_quantify' : self.instrument_purify_quantify,
+ 'version' : self.vcproj_version,
+ }
+
+ if self.vcproj_extension == '.vcproj':
+ self.write_with_template(fname, 'templates/vcnet_vcproj.ezt', data)
+ else:
+ self.write_with_template(fname, 'templates/vcnet_vcxproj.ezt', data)
+ self.write_with_template(fname + '.filters', 'templates/vcnet_vcxproj_filters.ezt', data)
+
+ def find_rootpath(self):
+ "Gets the root path as understand by the project system"
+ return "$(SolutionDir)"
+
+ def write(self):
+ "Write a Solution (.sln)"
+
+ # Gather sql targets for inclusion in svn_config project.
+ class _eztdata(object):
+ def __init__(self, **kw):
+ vars(self).update(kw)
+
+ import sys
+ sql=[]
+ for hdrfile, sqlfile in sorted(self.graph.get_deps(gen_base.DT_SQLHDR),
+ key=lambda t: t[0]):
+ sql.append(_eztdata(header=hdrfile.replace('/', '\\'),
+ source=sqlfile[0].replace('/', '\\'),
+ dependencies=[x.replace('/', '\\') for x in sqlfile[1:]],
+ svn_python=sys.executable))
+
+ # apr doesn't supply vcproj files, the user must convert them
+ # manually before loading the generated solution
+ self.move_proj_file(self.projfilesdir,
+ 'svn_config' + self.vcproj_extension,
+ (
+ ('sql', sql),
+ ('project_guid', self.makeguid('__CONFIG__')),
+ )
+ )
+ self.move_proj_file(self.projfilesdir,
+ 'svn_locale' + self.vcproj_extension,
+ (
+ ('project_guid', self.makeguid('svn_locale')),
+ ))
+ self.write_zlib_project_file('zlib' + self.vcproj_extension)
+ self.write_neon_project_file('neon' + self.vcproj_extension)
+ self.write_serf_project_file('serf' + self.vcproj_extension)
+
+ install_targets = self.get_install_targets()
+
+ targets = [ ]
+
+ guids = { }
+
+ # Visual Studio uses GUIDs to refer to projects. Get them up front
+ # because we need them already assigned on the dependencies for
+ # each target we work with.
+ for target in install_targets:
+ # If there is a GUID in an external project, then use it
+ # rather than generating our own that won't match and will
+ # cause dependency failures.
+ proj_path = self.get_external_project(target, self.vcproj_extension[1:])
+ if proj_path is not None:
+ target.project_guid = self.makeguid(target.name)
+ guids[target.name] = target.project_guid
+
+ self.gen_proj_names(install_targets)
+
+ for target in install_targets:
+ fname = self.get_external_project(target, self.vcproj_extension[1:])
+ if fname is None:
+ fname = os.path.join(self.projfilesdir, "%s%s" %
+ (target.proj_name, self.vcproj_extension))
+ target.fname = fname
+
+ # Traverse the targets and generate the project files
+ for target in install_targets:
+ name = target.name
+
+ depends = [ ]
+ if not isinstance(target, gen_base.TargetI18N):
+ depends = self.adjust_win_depends(target, name)
+
+ deplist = [ ]
+ for i in range(len(depends)):
+ if depends[i].fname.startswith(self.projfilesdir):
+ path = depends[i].fname[len(self.projfilesdir) + 1:]
+ else:
+ path = '$(SolutionDir)' + depends[i].fname
+ deplist.append(gen_win.ProjectItem(guid=guids[depends[i].name],
+ index=i,
+ path=path,
+ ))
+
+ fname = self.get_external_project(target, self.vcproj_extension[1:])
+ if fname is None:
+ fname = target.fname
+ self.write_project(target, fname, deplist)
+
+ groupname = ''
+
+ if target.name.startswith('__'):
+ groupname = 'root'
+ elif isinstance(target, gen_base.TargetLib):
+ if isinstance(target, gen_base.TargetSWIGLib) \
+ or isinstance(target, gen_base.TargetSWIG):
+ groupname = 'swiglib'
+ elif target.msvc_fake:
+ groupname = 'fake'
+ elif target.msvc_export and not self.disable_shared:
+ groupname = 'dll'
+ else:
+ groupname = 'lib'
+ elif isinstance(target, gen_base.TargetSWIGProject):
+ groupname = 'swiglib'
+ elif isinstance(target, gen_base.TargetJava):
+ # Keep the buildbot happy
+ groupname = 'root'
+ # groupname = 'java'
+ elif isinstance(target, gen_base.TargetExe):
+ if target.name.endswith('-test') \
+ or target.name.endswith('-tests'):
+ groupname = 'test'
+ else:
+ groupname = 'exe'
+
+ targets.append(
+ gen_win.ProjectItem(name=target.name,
+ path=fname.replace(os.sep, '\\'),
+ guid=guids[target.name],
+ depends=deplist,
+ group=groupname,
+ ))
+
+ targets.sort(key = lambda x: x.name)
+
+ configs = [ ]
+ for i in range(len(self.configs)):
+ ### this is different from write_project
+ configs.append(gen_win.ProjectItem(name=self.configs[i], index=i))
+
+ # sort the values for output stability.
+ guidvals = sorted(guids.values())
+
+ # Before VS2010 dependencies are managed at the solution level
+ if self.vcproj_extension == '.vcproj':
+ dependency_location = 'solution'
+ else:
+ dependency_location = 'project'
+
+ data = {
+ 'version': self.sln_version,
+ 'vs_version' : self.vs_version,
+ 'dependency_location' : dependency_location,
+ 'targets' : targets,
+ 'configs' : configs,
+ 'platforms' : self.platforms,
+ 'guids' : guidvals,
+ }
+
+ if self.vs_version == '2002' or self.vs_version == '2003':
+ self.write_with_template('subversion_vcnet.sln', 'templates/vcnet_vc7_sln.ezt', data)
+ else:
+ self.write_with_template('subversion_vcnet.sln', 'templates/vcnet_sln.ezt', data)
diff --git a/build/generator/gen_win.py b/build/generator/gen_win.py
new file mode 100644
index 0000000..7654d4d
--- /dev/null
+++ b/build/generator/gen_win.py
@@ -0,0 +1,1684 @@
+#
+#
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements. See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership. The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied. See the License for the
+# specific language governing permissions and limitations
+# under the License.
+#
+#
+#
+# gen_win.py -- base class for generating windows projects
+#
+
+import os
+try:
+ # Python >=2.5
+ from hashlib import md5 as hashlib_md5
+except ImportError:
+ # Python <2.5
+ from md5 import md5 as hashlib_md5
+import sys
+import fnmatch
+import re
+import subprocess
+import glob
+import string
+import generator.swig.header_wrappers
+import generator.swig.checkout_swig_header
+import generator.swig.external_runtime
+
+if sys.version_info[0] >= 3:
+ # Python >=3.0
+ from io import StringIO
+else:
+ # Python <3.0
+ try:
+ from cStringIO import StringIO
+ except ImportError:
+ from StringIO import StringIO
+
+import gen_base
+import ezt
+
+
+class GeneratorBase(gen_base.GeneratorBase):
+ """This intermediate base class exists to be instantiated by win-tests.py,
+ in order to obtain information from build.conf and library paths without
+ actually doing any generation."""
+ _extension_map = {
+ ('exe', 'target'): '.exe',
+ ('exe', 'object'): '.obj',
+ ('lib', 'target'): '.dll',
+ ('lib', 'object'): '.obj',
+ ('pyd', 'target'): '.pyd',
+ ('pyd', 'object'): '.obj',
+ }
+
+ def parse_options(self, options):
+ self.apr_path = 'apr'
+ self.apr_util_path = 'apr-util'
+ self.apr_iconv_path = 'apr-iconv'
+ self.serf_path = None
+ self.serf_lib = None
+ self.bdb_path = 'db4-win32'
+ self.without_neon = False
+ self.neon_path = 'neon'
+ self.neon_ver = 25005
+ self.httpd_path = None
+ self.libintl_path = None
+ self.zlib_path = 'zlib'
+ self.openssl_path = None
+ self.jdk_path = None
+ self.junit_path = None
+ self.swig_path = None
+ self.vs_version = '2002'
+ self.sln_version = '7.00'
+ self.vcproj_version = '7.00'
+ self.vcproj_extension = '.vcproj'
+ self.sqlite_path = 'sqlite-amalgamation'
+ self.skip_sections = { 'mod_dav_svn': None,
+ 'mod_authz_svn': None,
+ 'mod_dontdothat' : None,
+ 'libsvn_auth_kwallet': None,
+ 'libsvn_auth_gnome_keyring': None }
+
+ # Instrumentation options
+ self.disable_shared = None
+ self.static_apr = None
+ self.instrument_apr_pools = None
+ self.instrument_purify_quantify = None
+ self.configure_apr_util = None
+ self.sasl_path = None
+
+ # NLS options
+ self.enable_nls = None
+
+ # ML (assembler) is disabled by default; use --enable-ml to detect
+ self.enable_ml = None
+
+ for opt, val in options:
+ if opt == '--with-berkeley-db':
+ self.bdb_path = val
+ elif opt == '--with-apr':
+ self.apr_path = val
+ elif opt == '--with-apr-util':
+ self.apr_util_path = val
+ elif opt == '--with-apr-iconv':
+ self.apr_iconv_path = val
+ elif opt == '--with-serf':
+ self.serf_path = val
+ elif opt == '--with-neon':
+ self.neon_path = val
+ elif opt == '--without-neon':
+ self.without_neon = True
+ elif opt == '--with-httpd':
+ self.httpd_path = val
+ del self.skip_sections['mod_dav_svn']
+ del self.skip_sections['mod_authz_svn']
+ del self.skip_sections['mod_dontdothat']
+ elif opt == '--with-libintl':
+ self.libintl_path = val
+ self.enable_nls = 1
+ elif opt == '--with-jdk':
+ self.jdk_path = val
+ elif opt == '--with-junit':
+ self.junit_path = val
+ elif opt == '--with-zlib':
+ self.zlib_path = val
+ elif opt == '--with-swig':
+ self.swig_path = val
+ elif opt == '--with-sqlite':
+ self.sqlite_path = val
+ elif opt == '--with-sasl':
+ self.sasl_path = val
+ elif opt == '--with-openssl':
+ self.openssl_path = val
+ elif opt == '--enable-purify':
+ self.instrument_purify_quantify = 1
+ self.instrument_apr_pools = 1
+ elif opt == '--enable-quantify':
+ self.instrument_purify_quantify = 1
+ elif opt == '--enable-pool-debug':
+ self.instrument_apr_pools = 1
+ elif opt == '--enable-nls':
+ self.enable_nls = 1
+ elif opt == '--enable-bdb-in-apr-util':
+ self.configure_apr_util = 1
+ elif opt == '--enable-ml':
+ self.enable_ml = 1
+ elif opt == '--disable-shared':
+ self.disable_shared = 1
+ elif opt == '--with-static-apr':
+ self.static_apr = 1
+ elif opt == '--vsnet-version':
+ if val == '2002' or re.match('7(\.\d+)?', val):
+ self.vs_version = '2002'
+ self.sln_version = '7.00'
+ self.vcproj_version = '7.00'
+ self.vcproj_extension = '.vcproj'
+ elif val == '2003' or re.match('8(\.\d+)?', val):
+ self.vs_version = '2003'
+ self.sln_version = '8.00'
+ self.vcproj_version = '7.10'
+ self.vcproj_extension = '.vcproj'
+ elif val == '2005' or re.match('9(\.\d+)?', val):
+ self.vs_version = '2005'
+ self.sln_version = '9.00'
+ self.vcproj_version = '8.00'
+ self.vcproj_extension = '.vcproj'
+ elif val == '2008' or re.match('10(\.\d+)?', val):
+ self.vs_version = '2008'
+ self.sln_version = '10.00'
+ self.vcproj_version = '9.00'
+ self.vcproj_extension = '.vcproj'
+ elif val == '2010':
+ self.vs_version = '2010'
+ self.sln_version = '11.00'
+ self.vcproj_version = '10.0'
+ self.vcproj_extension = '.vcxproj'
+ else:
+ print('WARNING: Unknown VS.NET version "%s",'
+ ' assuming "%s"\n' % (val, '7.00'))
+
+
+ def __init__(self, fname, verfname, options):
+
+ # parse (and save) the options that were passed to us
+ self.parse_options(options)
+
+ # Initialize parent
+ gen_base.GeneratorBase.__init__(self, fname, verfname, options)
+
+ # Find Berkeley DB
+ self._find_bdb()
+
+ def _find_bdb(self):
+ "Find the Berkeley DB library and version"
+ for ver in ("48", "47", "46", "45", "44", "43", "42", "41", "40"):
+ lib = "libdb" + ver
+ path = os.path.join(self.bdb_path, "lib")
+ if os.path.exists(os.path.join(path, lib + ".lib")):
+ self.bdb_lib = lib
+ break
+ else:
+ self.bdb_lib = None
+
+class WinGeneratorBase(GeneratorBase):
+ "Base class for all Windows project files generators"
+
+ def __init__(self, fname, verfname, options, subdir):
+ """
+ Do some Windows specific setup
+
+ Build the list of Platforms & Configurations &
+ create the necessary paths
+ """
+
+ # Initialize parent
+ GeneratorBase.__init__(self, fname, verfname, options)
+
+ if self.bdb_lib is not None:
+ print("Found %s.lib in %s\n" % (self.bdb_lib, self.bdb_path))
+ else:
+ print("BDB not found, BDB fs will not be built\n")
+
+ if subdir == 'vcnet-vcproj':
+ print('Generating for Visual Studio %s\n' % self.vs_version)
+
+ # Find the right Ruby include and libraries dirs and
+ # library name to link SWIG bindings with
+ self._find_ruby()
+
+ # Find the right Perl library name to link SWIG bindings with
+ self._find_perl()
+
+ # Find the right Python include and libraries dirs for SWIG bindings
+ self._find_python()
+
+ # Find the installed SWIG version to adjust swig options
+ self._find_swig()
+
+ # Find the installed Java Development Kit
+ self._find_jdk()
+
+ # Find APR and APR-util version
+ self._find_apr()
+ self._find_apr_util()
+
+ # Find Sqlite
+ self._find_sqlite()
+
+ # Look for ZLib and ML
+ if self.zlib_path:
+ self._find_zlib()
+ self._find_ml()
+
+ # Find neon version
+ if self.neon_path:
+ self._find_neon()
+
+ # Find serf and its dependencies
+ if self.serf_path:
+ self._find_serf()
+
+ #Make some files for the installer so that we don't need to
+ #require sed or some other command to do it
+ ### GJS: don't do this right now
+ if 0:
+ buf = open(os.path.join("packages","win32-innosetup","svn.iss.in"), 'rb').read()
+ buf = buf.replace("@VERSION@", "0.16.1+").replace("@RELEASE@", "4365")
+ buf = buf.replace("@DBBINDLL@", self.dbbindll)
+ svnissrel = os.path.join("packages","win32-innosetup","svn.iss.release")
+ svnissdeb = os.path.join("packages","win32-innosetup","svn.iss.debug")
+ if self.write_file_if_changed(svnissrel, buf.replace("@CONFIG@", "Release")):
+ print('Wrote %s' % svnissrel)
+ if self.write_file_if_changed(svnissdeb, buf.replace("@CONFIG@", "Debug")):
+ print('Wrote %s' % svnissdeb)
+
+ #Make the project files directory if it doesn't exist
+ #TODO win32 might not be the best path as win64 stuff will go here too
+ self.projfilesdir=os.path.join("build","win32",subdir)
+ self.rootpath = self.find_rootpath()
+ if not os.path.exists(self.projfilesdir):
+ os.makedirs(self.projfilesdir)
+
+ # Generate the build_zlib.bat file
+ if self.zlib_path:
+ data = {'zlib_path': os.path.abspath(self.zlib_path),
+ 'zlib_version': self.zlib_version,
+ 'use_ml': self.have_ml and 1 or None}
+ bat = os.path.join(self.projfilesdir, 'build_zlib.bat')
+ self.write_with_template(bat, 'templates/build_zlib.ezt', data)
+
+ # Generate the build_locale.bat file
+ pofiles = []
+ if self.enable_nls:
+ for po in os.listdir(os.path.join('subversion', 'po')):
+ if fnmatch.fnmatch(po, '*.po'):
+ pofiles.append(POFile(po[:-3]))
+
+ data = {'pofiles': pofiles}
+ self.write_with_template(os.path.join(self.projfilesdir,
+ 'build_locale.bat'),
+ 'templates/build_locale.ezt', data)
+
+ #Here we can add additional platforms to compile for
+ self.platforms = ['Win32']
+
+ # VC 2002 and VC 2003 only allow a single platform per project file
+ if subdir == 'vcnet-vcproj':
+ if self.vcproj_version != '7.00' and self.vcproj_version != '7.10':
+ self.platforms = ['Win32','x64']
+
+ #Here we can add additional modes to compile for
+ self.configs = ['Debug','Release']
+
+ if self.swig_libdir:
+ # Generate SWIG header wrappers and external runtime
+ for swig in (generator.swig.header_wrappers,
+ generator.swig.checkout_swig_header,
+ generator.swig.external_runtime):
+ swig.Generator(self.conf, self.swig_exe).write()
+ else:
+ print("%s not found; skipping SWIG file generation..." % self.swig_exe)
+
+ def find_rootpath(self):
+ "Gets the root path as understand by the project system"
+ return ".." + "\\.." * self.projfilesdir.count(os.sep) + "\\"
+
+ def makeguid(self, data):
+ "Generate a windows style GUID"
+ ### blah. this function can generate invalid GUIDs. leave it for now,
+ ### but we need to fix it. we can wrap the apr UUID functions, or
+ ### implement this from scratch using the algorithms described in
+ ### http://www.webdav.org/specs/draft-leach-uuids-guids-01.txt
+
+ myhash = hashlib_md5(data).hexdigest()
+
+ guid = ("{%s-%s-%s-%s-%s}" % (myhash[0:8], myhash[8:12],
+ myhash[12:16], myhash[16:20],
+ myhash[20:32])).upper()
+ return guid
+
+ def path(self, *paths):
+ """Convert build path to msvc path and prepend root"""
+ return self.rootpath + msvc_path_join(*list(map(msvc_path, paths)))
+
+ def apath(self, path, *paths):
+ """Convert build path to msvc path and prepend root if not absolute"""
+ ### On Unix, os.path.isabs won't do the right thing if "item"
+ ### contains backslashes or drive letters
+ if os.path.isabs(path):
+ return msvc_path_join(msvc_path(path), *list(map(msvc_path, paths)))
+ else:
+ return self.rootpath + msvc_path_join(msvc_path(path),
+ *list(map(msvc_path, paths)))
+
+ def get_install_targets(self):
+ "Generate the list of targets"
+
+ # Get list of targets to generate project files for
+ install_targets = self.graph.get_all_sources(gen_base.DT_INSTALL) \
+ + self.projects
+
+ # Don't create projects for scripts
+ install_targets = [x for x in install_targets if not isinstance(x, gen_base.TargetScript)]
+
+ # Drop the libsvn_fs_base target and tests if we don't have BDB
+ if not self.bdb_lib:
+ install_targets = [x for x in install_targets if x.name != 'libsvn_fs_base']
+ install_targets = [x for x in install_targets if not (isinstance(x, gen_base.TargetExe)
+ and x.install == 'bdb-test')]
+
+ # Drop the serf target if we don't have both serf and openssl
+ if not self.serf_lib:
+ install_targets = [x for x in install_targets if x.name != 'serf']
+ install_targets = [x for x in install_targets if x.name != 'libsvn_ra_serf']
+ if self.without_neon:
+ install_targets = [x for x in install_targets if x.name != 'neon']
+ install_targets = [x for x in install_targets if x.name != 'libsvn_ra_neon']
+
+ # Drop the swig targets if we don't have swig
+ if not self.swig_path and not self.swig_libdir:
+ install_targets = [x for x in install_targets
+ if not (isinstance(x, gen_base.TargetSWIG)
+ or isinstance(x, gen_base.TargetSWIGLib)
+ or isinstance(x, gen_base.TargetSWIGProject))]
+
+ dll_targets = []
+ for target in install_targets:
+ if isinstance(target, gen_base.TargetLib):
+ if target.msvc_fake:
+ install_targets.append(self.create_fake_target(target))
+ if target.msvc_export:
+ if self.disable_shared:
+ target.msvc_static = True
+ else:
+ dll_targets.append(self.create_dll_target(target))
+ install_targets.extend(dll_targets)
+
+ for target in install_targets:
+ target.project_guid = self.makeguid(target.name)
+
+ # sort these for output stability, to watch out for regressions.
+ install_targets.sort(key = lambda t: t.name)
+ return install_targets
+
+ def create_fake_target(self, dep):
+ "Return a new target which depends on another target but builds nothing"
+ section = gen_base.TargetProject.Section(gen_base.TargetProject,
+ dep.name + "_fake",
+ {'path': 'build/win32'}, self)
+ section.create_targets()
+ section.target.msvc_name = dep.msvc_name and dep.msvc_name + "_fake"
+ self.graph.add(gen_base.DT_LINK, section.target.name, dep)
+ dep.msvc_fake = section.target
+ return section.target
+
+ def create_dll_target(self, dep):
+ "Return a dynamic library that depends on a static library"
+ target = gen_base.TargetLib(dep.name,
+ { 'path' : dep.path,
+ 'msvc-name' : dep.name + "_dll" },
+ self)
+ target.msvc_export = dep.msvc_export
+
+ # move the description from the static library target to the dll.
+ target.desc = dep.desc
+ dep.desc = None
+
+ # The dependency should now be static.
+ dep.msvc_export = None
+ dep.msvc_static = True
+
+ # Remove the 'lib' prefix, so that the static library will be called
+ # svn_foo.lib
+ dep.name = dep.name[3:]
+ # However, its name should still be 'libsvn_foo' in Visual Studio
+ dep.msvc_name = target.name
+
+ # We renamed dep, so right now it has no dependencies. Because target has
+ # dep's old dependencies, transfer them over to dep.
+ deps = self.graph.deps[gen_base.DT_LINK]
+ deps[dep.name] = deps[target.name]
+
+ for key in deps.keys():
+ # Link everything except tests against the dll. Tests need to be linked
+ # against the static libraries because they sometimes access internal
+ # library functions.
+ if dep in deps[key] and key.find("test") == -1:
+ deps[key].remove(dep)
+ deps[key].append(target)
+
+ # The dll has exactly one dependency, the static library.
+ deps[target.name] = [ dep ]
+
+ return target
+
+ def get_configs(self, target):
+ "Get the list of configurations for the project"
+ configs = [ ]
+ for cfg in self.configs:
+ configs.append(
+ ProjectItem(name=cfg,
+ lower=cfg.lower(),
+ defines=self.get_win_defines(target, cfg),
+ libdirs=self.get_win_lib_dirs(target, cfg),
+ libs=self.get_win_libs(target, cfg),
+ ))
+ return configs
+
+ def get_proj_sources(self, quote_path, target):
+ "Get the list of source files for each project"
+ sources = [ ]
+
+ javac_exe = "javac"
+ javah_exe = "javah"
+ jar_exe = "jar"
+ if self.jdk_path:
+ javac_exe = os.path.join(self.jdk_path, "bin", javac_exe)
+ javah_exe = os.path.join(self.jdk_path, "bin", javah_exe)
+ jar_exe = os.path.join(self.jdk_path, "bin", jar_exe)
+
+ if not isinstance(target, gen_base.TargetProject):
+ for source, object, reldir in self.get_win_sources(target):
+ cbuild = None
+ ctarget = None
+ cdesc = None
+ if isinstance(target, gen_base.TargetJavaHeaders):
+ classes = self.path(target.classes)
+ if self.junit_path is not None:
+ classes = "%s;%s" % (classes, self.junit_path)
+
+ headers = self.path(target.headers)
+ classname = target.package + "." + source.class_name
+
+ cbuild = "%s -verbose -force -classpath %s -d %s %s" \
+ % (self.quote(javah_exe), self.quote(classes),
+ self.quote(headers), classname)
+
+ ctarget = self.path(object.filename_win)
+ cdesc = "Generating %s" % (object.filename_win)
+
+ elif isinstance(target, gen_base.TargetJavaClasses):
+ classes = targetdir = self.path(target.classes)
+ if self.junit_path is not None:
+ classes = "%s;%s" % (classes, self.junit_path)
+
+ sourcepath = self.path(source.sourcepath)
+
+ cbuild = "%s -g -target 1.5 -source 1.5 -classpath %s -d %s " \
+ "-sourcepath %s $(InputPath)" \
+ % tuple(map(self.quote, (javac_exe, classes,
+ targetdir, sourcepath)))
+
+ ctarget = self.path(object.filename)
+ cdesc = "Compiling %s" % (source)
+
+ rsrc = self.path(str(source))
+ if quote_path and '-' in rsrc:
+ rsrc = '"%s"' % rsrc
+
+ sources.append(ProjectItem(path=rsrc, reldir=reldir, user_deps=[],
+ custom_build=cbuild, custom_target=ctarget,
+ custom_desc=cdesc,
+ extension=os.path.splitext(rsrc)[1]))
+
+ if isinstance(target, gen_base.TargetJavaClasses) and target.jar:
+ classdir = self.path(target.classes)
+ jarfile = msvc_path_join(classdir, target.jar)
+ cbuild = "%s cf %s -C %s %s" \
+ % (self.quote(jar_exe), jarfile, classdir,
+ " ".join(target.packages))
+ deps = [x.custom_target for x in sources]
+ sources.append(ProjectItem(path='makejar', reldir='', user_deps=deps,
+ custom_build=cbuild, custom_target=jarfile,
+ extension=''))
+
+ if isinstance(target, gen_base.TargetSWIG):
+ swig_options = self.swig.opts[target.lang].split()
+ swig_options.append('-DWIN32')
+ swig_deps = []
+
+ for include_dir in self.get_win_includes(target):
+ swig_options.append("-I%s" % self.quote(include_dir))
+
+ for obj in self.graph.get_sources(gen_base.DT_LINK, target.name):
+ if isinstance(obj, gen_base.SWIGObject):
+ for cobj in self.graph.get_sources(gen_base.DT_OBJECT, obj):
+ if isinstance(cobj, gen_base.SWIGObject):
+ csrc = self.path(cobj.filename)
+
+ cout = csrc
+
+ # included header files that the generated c file depends on
+ user_deps = swig_deps[:]
+
+ for iobj in self.graph.get_sources(gen_base.DT_SWIG_C, cobj):
+ isrc = self.path(str(iobj))
+
+ if not isinstance(iobj, gen_base.SWIGSource):
+ user_deps.append(isrc)
+ continue
+
+ cbuild = '%s %s -o %s $(InputPath)' \
+ % (self.swig_exe, " ".join(swig_options), cout)
+
+ cdesc = 'Generating %s' % cout
+
+ sources.append(ProjectItem(path=isrc, reldir=None,
+ custom_build=cbuild,
+ custom_target=csrc,
+ custom_desc=cdesc,
+ user_deps=user_deps,
+ extension=''))
+
+ def_file = self.get_def_file(target)
+ if def_file is not None:
+ gsrc = self.path("build/generator/extractor.py")
+
+ deps = [self.path('build.conf')]
+ for header in target.msvc_export:
+ deps.append(self.path('subversion/include', header))
+
+ cbuild = "%s $(InputPath) %s > %s" \
+ % (self.quote(sys.executable), " ".join(deps), def_file)
+
+ cdesc = 'Generating %s ' % def_file
+
+ sources.append(ProjectItem(path=gsrc, reldir=None,
+ custom_build=cbuild,
+ custom_target=def_file,
+ custom_desc=cdesc,
+ user_deps=deps,
+ extension=''))
+
+ sources.append(ProjectItem(path=def_file, reldir=None,
+ custom_build=None, user_deps=[],
+ extension=''))
+
+ sources.sort(key = lambda x: x.path)
+ return sources
+
+ def get_output_name(self, target):
+ if isinstance(target, gen_base.TargetExe):
+ return target.name + '.exe'
+ elif isinstance(target, gen_base.TargetJava):
+ ### This target file is not actually built, but we need it to keep
+ ### the VC Express build happy.
+ return target.name
+ elif isinstance(target, gen_base.TargetApacheMod):
+ return target.name + '.so'
+ elif isinstance(target, gen_base.TargetLib):
+ if target.msvc_static:
+ return '%s-%d.lib' % (target.name, self.version)
+ else:
+ return os.path.basename(target.filename)
+ elif isinstance(target, gen_base.TargetProject):
+ ### Since this target type doesn't produce any output, we shouldn't
+ ### need to specify an output filename. But to keep the VC.NET template
+ ### happy for now we have to return something
+ return target.name + '.exe'
+ elif isinstance(target, gen_base.TargetI18N):
+ return target.name
+
+ def get_output_pdb(self, target):
+ name = self.get_output_name(target)
+ name = os.path.splitext(name)
+ return name[0] + '.pdb'
+
+ def get_output_dir(self, target):
+ if isinstance(target, gen_base.TargetJavaHeaders):
+ return msvc_path("../" + target.headers)
+ elif isinstance(target, gen_base.TargetJavaClasses):
+ return msvc_path("../" + target.classes)
+ else:
+ return msvc_path(target.path)
+
+ def get_intermediate_dir(self, target):
+ if isinstance(target, gen_base.TargetSWIG):
+ return msvc_path_join(msvc_path(target.path), target.name)
+ else:
+ return self.get_output_dir(target)
+
+ def get_def_file(self, target):
+ if isinstance(target, gen_base.TargetLib) and target.msvc_export \
+ and not self.disable_shared:
+ return target.name + ".def"
+ return None
+
+ def gen_proj_names(self, install_targets):
+ "Generate project file names for the targets"
+ # Generate project file names for the targets: replace dashes with
+ # underscores and replace *-test with test_* (so that the test
+ # programs are visually separare from the rest of the projects)
+ for target in install_targets:
+ if target.msvc_name:
+ target.proj_name = target.msvc_name
+ continue
+
+ name = target.name
+ pos = name.find('-test')
+ if pos >= 0:
+ proj_name = 'test_' + name[:pos].replace('-', '_')
+ elif isinstance(target, gen_base.TargetSWIG):
+ proj_name = 'swig_' + name.replace('-', '_')
+ else:
+ proj_name = name.replace('-', '_')
+ target.proj_name = proj_name
+
+ def get_external_project(self, target, proj_ext):
+ if not ((isinstance(target, gen_base.TargetLinked)
+ or isinstance(target, gen_base.TargetI18N))
+ and target.external_project):
+ return None
+
+ if target.external_project[:5] == 'neon/':
+ path = self.neon_path + target.external_project[4:]
+ elif target.external_project[:5] == 'serf/' and self.serf_lib:
+ path = self.serf_path + target.external_project[4:]
+ elif target.external_project.find('/') != -1:
+ path = target.external_project
+ else:
+ path = os.path.join(self.projfilesdir, target.external_project)
+
+ return "%s.%s" % (gen_base.native_path(path), proj_ext)
+
+ def adjust_win_depends(self, target, name):
+ "Handle special dependencies if needed"
+
+ if name == '__CONFIG__':
+ depends = []
+ else:
+ depends = self.sections['__CONFIG__'].get_dep_targets(target)
+
+ depends.extend(self.get_win_depends(target, FILTER_PROJECTS))
+
+ # Make the default target generate the .mo files, too
+ if self.enable_nls and name == '__ALL__':
+ depends.extend(self.sections['locale'].get_targets())
+
+ # Build ZLib as a dependency of Neon or Serf if we have it
+ if self.zlib_path and (name == 'neon' or name == 'serf'):
+ depends.extend(self.sections['zlib'].get_targets())
+
+ # To set the correct build order of the JavaHL targets, the javahl-javah
+ # and libsvnjavahl targets are defined with extra dependencies in build.conf
+ # like this:
+ # add-deps = $(javahl_javah_DEPS) $(javahl_java_DEPS)
+ #
+ # This section parses those dependencies and adds them to the dependency list
+ # for this target.
+ if name.startswith('javahl') or name == 'libsvnjavahl':
+ for dep in re.findall('\$\(([^\)]*)_DEPS\)', target.add_deps):
+ dep = dep.replace('_', '-')
+ depends.extend(self.sections[dep].get_targets())
+
+ return depends
+
+ def get_win_depends(self, target, mode):
+ """Return the list of dependencies for target"""
+
+ dep_dict = {}
+
+ if isinstance(target, gen_base.TargetLib) and target.msvc_static:
+ self.get_static_win_depends(target, dep_dict)
+ else:
+ self.get_linked_win_depends(target, dep_dict)
+
+ deps = []
+
+ if mode == FILTER_PROJECTS:
+ for dep, (is_proj, is_lib, is_static) in dep_dict.items():
+ if is_proj:
+ deps.append(dep)
+ elif mode == FILTER_LIBS:
+ for dep, (is_proj, is_lib, is_static) in dep_dict.items():
+ if is_static or (is_lib and not is_proj):
+ deps.append(dep)
+ else:
+ raise NotImplementedError
+
+ deps.sort(key = lambda d: d.name)
+ return deps
+
+ def get_direct_depends(self, target):
+ """Read target dependencies from graph
+ return value is list of (dependency, (is_project, is_lib, is_static)) tuples
+ """
+ deps = []
+
+ for dep in self.graph.get_sources(gen_base.DT_LINK, target.name):
+ if not isinstance(dep, gen_base.Target):
+ continue
+
+ is_project = hasattr(dep, 'proj_name')
+ is_lib = isinstance(dep, gen_base.TargetLib)
+ is_static = is_lib and dep.msvc_static
+ deps.append((dep, (is_project, is_lib, is_static)))
+
+ for dep in self.graph.get_sources(gen_base.DT_NONLIB, target.name):
+ is_project = hasattr(dep, 'proj_name')
+ is_lib = isinstance(dep, gen_base.TargetLib)
+ is_static = is_lib and dep.msvc_static
+ deps.append((dep, (is_project, is_lib, is_static)))
+
+ return deps
+
+ def get_static_win_depends(self, target, deps):
+ """Find project dependencies for a static library project"""
+ for dep, dep_kind in self.get_direct_depends(target):
+ is_proj, is_lib, is_static = dep_kind
+
+ # recurse for projectless targets
+ if not is_proj:
+ self.get_static_win_depends(dep, deps)
+
+ # Only add project dependencies on non-library projects. If we added
+ # project dependencies on libraries, MSVC would copy those libraries
+ # into the static archive. This would waste space and lead to linker
+ # warnings about multiply defined symbols. Instead, the library
+ # dependencies get added to any DLLs or EXEs that depend on this static
+ # library (see get_linked_win_depends() implementation).
+ if not is_lib:
+ deps[dep] = dep_kind
+
+ # a static library can depend on another library through a fake project
+ elif dep.msvc_fake:
+ deps[dep.msvc_fake] = dep_kind
+
+ def get_linked_win_depends(self, target, deps, static_recurse=0):
+ """Find project dependencies for a DLL or EXE project"""
+
+ direct_deps = self.get_direct_depends(target)
+ for dep, dep_kind in direct_deps:
+ is_proj, is_lib, is_static = dep_kind
+
+ # add all top level dependencies
+ if not static_recurse or is_lib:
+ # We need to guard against linking both a static and a dynamic library
+ # into a project (this is mainly a concern for tests). To do this, for
+ # every dll dependency we first check to see if its corresponding
+ # static library is already in the list of dependencies. If it is,
+ # we don't add the dll to the list.
+ if is_lib and dep.msvc_export and not self.disable_shared:
+ static_dep = self.graph.get_sources(gen_base.DT_LINK, dep.name)[0]
+ if static_dep in deps:
+ continue
+ deps[dep] = dep_kind
+
+ # add any libraries that static library dependencies depend on
+ for dep, dep_kind in direct_deps:
+ is_proj, is_lib, is_static = dep_kind
+
+ # recurse for projectless dependencies
+ if not is_proj:
+ self.get_linked_win_depends(dep, deps, 0)
+
+ # also recurse into static library dependencies
+ elif is_static:
+ self.get_linked_win_depends(dep, deps, 1)
+
+ def get_win_defines(self, target, cfg):
+ "Return the list of defines for target"
+
+ fakedefines = ["WIN32","_WINDOWS","alloca=_alloca",
+ "_CRT_SECURE_NO_DEPRECATE=",
+ "_CRT_NONSTDC_NO_DEPRECATE=",
+ "_CRT_SECURE_NO_WARNINGS="]
+
+ if self.sqlite_inline:
+ fakedefines.append("SVN_SQLITE_INLINE")
+
+ if isinstance(target, gen_base.TargetApacheMod):
+ if target.name == 'mod_dav_svn':
+ fakedefines.extend(["AP_DECLARE_EXPORT"])
+
+ if target.name.find('ruby') == -1:
+ fakedefines.append("snprintf=_snprintf")
+
+ if isinstance(target, gen_base.TargetSWIG):
+ fakedefines.append("SWIG_GLOBAL")
+
+ # Expect rb_errinfo() to be avilable in Ruby 1.9+,
+ # rather than ruby_errinfo.
+ if (self.ruby_major_version > 1 or self.ruby_minor_version > 8):
+ fakedefines.extend(["HAVE_RB_ERRINFO"])
+
+ if cfg == 'Debug':
+ fakedefines.extend(["_DEBUG","SVN_DEBUG"])
+ elif cfg == 'Release':
+ fakedefines.append("NDEBUG")
+
+ if self.static_apr:
+ fakedefines.extend(["APR_DECLARE_STATIC", "APU_DECLARE_STATIC"])
+
+ # XXX: Check if db is present, and if so, let apr-util know
+ # XXX: This is a hack until the apr build system is improved to
+ # XXX: know these things for itself.
+ if self.bdb_lib:
+ fakedefines.append("APU_HAVE_DB=1")
+ fakedefines.append("SVN_LIBSVN_FS_LINKS_FS_BASE=1")
+
+ # check if they wanted nls
+ if self.enable_nls:
+ fakedefines.append("ENABLE_NLS")
+
+ # check for neon 0.26.x or newer
+ if self.neon_ver >= 26000:
+ fakedefines.append("SVN_NEON_0_26=1")
+
+ # check for neon 0.27.x or newer
+ if self.neon_ver >= 27000:
+ fakedefines.append("SVN_NEON_0_27=1")
+
+ # check for neon 0.28.x or newer
+ if self.neon_ver >= 28000:
+ fakedefines.append("SVN_NEON_0_28=1")
+
+ if self.serf_lib:
+ fakedefines.append("SVN_HAVE_SERF")
+ fakedefines.append("SVN_LIBSVN_CLIENT_LINKS_RA_SERF")
+
+ if self.neon_lib:
+ fakedefines.append("SVN_HAVE_NEON")
+ fakedefines.append("SVN_LIBSVN_CLIENT_LINKS_RA_NEON")
+
+ # check we have sasl
+ if self.sasl_path:
+ fakedefines.append("SVN_HAVE_SASL")
+
+ if target.name.endswith('svn_subr'):
+ fakedefines.append("SVN_USE_WIN32_CRASHHANDLER")
+
+ # use static linking to Expat
+ fakedefines.append("XML_STATIC")
+
+ return fakedefines
+
+ def get_win_includes(self, target):
+ "Return the list of include directories for target"
+
+ fakeincludes = [ self.path("subversion/include"),
+ self.path("subversion"),
+ self.apath(self.apr_path, "include"),
+ self.apath(self.apr_util_path, "include") ]
+
+ if target.name == 'mod_authz_svn':
+ fakeincludes.extend([ self.apath(self.httpd_path, "modules/aaa") ])
+
+ if isinstance(target, gen_base.TargetApacheMod):
+ fakeincludes.extend([ self.apath(self.apr_util_path, "xml/expat/lib"),
+ self.apath(self.httpd_path, "include"),
+ self.apath(self.bdb_path, "include") ])
+ elif isinstance(target, gen_base.TargetSWIG):
+ util_includes = "subversion/bindings/swig/%s/libsvn_swig_%s" \
+ % (target.lang,
+ gen_base.lang_utillib_suffix[target.lang])
+ fakeincludes.extend([ self.path("subversion/bindings/swig"),
+ self.path("subversion/bindings/swig/proxy"),
+ self.path("subversion/bindings/swig/include"),
+ self.path(util_includes) ])
+ else:
+ fakeincludes.extend([ self.apath(self.apr_util_path, "xml/expat/lib"),
+ self.apath(self.neon_path, "src"),
+ self.path("subversion/bindings/swig/proxy"),
+ self.apath(self.bdb_path, "include") ])
+
+ if self.libintl_path:
+ fakeincludes.append(self.apath(self.libintl_path, 'inc'))
+
+ if self.serf_lib:
+ fakeincludes.append(self.apath(self.serf_path))
+
+ if self.swig_libdir \
+ and (isinstance(target, gen_base.TargetSWIG)
+ or isinstance(target, gen_base.TargetSWIGLib)):
+ if self.swig_vernum >= 103028:
+ fakeincludes.append(self.apath(self.swig_libdir, target.lang))
+ if target.lang == 'perl':
+ # At least swigwin 1.3.38+ uses perl5 as directory name. Just add it
+ # to the list to make sure we don't break old versions
+ fakeincludes.append(self.apath(self.swig_libdir, 'perl5'))
+ else:
+ fakeincludes.append(self.swig_libdir)
+ if target.lang == "perl":
+ fakeincludes.extend(self.perl_includes)
+ if target.lang == "python":
+ fakeincludes.extend(self.python_includes)
+ if target.lang == "ruby":
+ fakeincludes.extend(self.ruby_includes)
+
+ fakeincludes.append(self.apath(self.zlib_path))
+
+ if self.sqlite_inline:
+ fakeincludes.append(self.apath(self.sqlite_path))
+ else:
+ fakeincludes.append(self.apath(self.sqlite_path, 'inc'))
+
+ if self.sasl_path:
+ fakeincludes.append(self.apath(self.sasl_path, 'include'))
+
+ if target.name == "libsvnjavahl" and self.jdk_path:
+ fakeincludes.append(os.path.join(self.jdk_path, 'include'))
+ fakeincludes.append(os.path.join(self.jdk_path, 'include', 'win32'))
+
+ return fakeincludes
+
+ def get_win_lib_dirs(self, target, cfg):
+ "Return the list of library directories for target"
+
+ libcfg = cfg.replace("Debug", "LibD").replace("Release", "LibR")
+
+ fakelibdirs = [ self.apath(self.bdb_path, "lib"),
+ self.apath(self.neon_path),
+ self.apath(self.zlib_path),
+ ]
+
+ if not self.sqlite_inline:
+ fakelibdirs.append(self.apath(self.sqlite_path, "lib"))
+
+ if self.sasl_path:
+ fakelibdirs.append(self.apath(self.sasl_path, "lib"))
+ if self.serf_lib:
+ fakelibdirs.append(self.apath(msvc_path_join(self.serf_path, cfg)))
+
+ fakelibdirs.append(self.apath(self.apr_path, cfg))
+ fakelibdirs.append(self.apath(self.apr_util_path, cfg))
+ fakelibdirs.append(self.apath(self.apr_util_path, 'xml', 'expat',
+ 'lib', libcfg))
+
+ if isinstance(target, gen_base.TargetApacheMod):
+ fakelibdirs.append(self.apath(self.httpd_path, cfg))
+ if target.name == 'mod_dav_svn':
+ fakelibdirs.append(self.apath(self.httpd_path, "modules/dav/main",
+ cfg))
+ if self.swig_libdir \
+ and (isinstance(target, gen_base.TargetSWIG)
+ or isinstance(target, gen_base.TargetSWIGLib)):
+ if target.lang == "perl" and self.perl_libdir:
+ fakelibdirs.append(self.perl_libdir)
+ if target.lang == "python" and self.python_libdir:
+ fakelibdirs.append(self.python_libdir)
+ if target.lang == "ruby" and self.ruby_libdir:
+ fakelibdirs.append(self.ruby_libdir)
+
+ return fakelibdirs
+
+ def get_win_libs(self, target, cfg):
+ "Return the list of external libraries needed for target"
+
+ dblib = None
+ if self.bdb_lib:
+ dblib = self.bdb_lib+(cfg == 'Debug' and 'd.lib' or '.lib')
+
+ if self.neon_lib:
+ neonlib = self.neon_lib+(cfg == 'Debug' and 'd.lib' or '.lib')
+
+ if self.serf_lib:
+ if self.serf_ver_maj == 1:
+ serflib = 'serf-1.lib'
+ else:
+ serflib = 'serf.lib'
+
+ zlib = (cfg == 'Debug' and 'zlibstatD.lib' or 'zlibstat.lib')
+ sasllib = None
+ if self.sasl_path:
+ sasllib = 'libsasl.lib'
+
+ if not isinstance(target, gen_base.TargetLinked):
+ return []
+
+ if isinstance(target, gen_base.TargetLib) and target.msvc_static:
+ return []
+
+ nondeplibs = target.msvc_libs[:]
+ nondeplibs.append(zlib)
+ if self.enable_nls:
+ if self.libintl_path:
+ nondeplibs.append(self.apath(self.libintl_path,
+ 'lib', 'intl3_svn.lib'))
+ else:
+ nondeplibs.append('intl3_svn.lib')
+
+ if isinstance(target, gen_base.TargetExe):
+ nondeplibs.append('setargv.obj')
+
+ if ((isinstance(target, gen_base.TargetSWIG)
+ or isinstance(target, gen_base.TargetSWIGLib))
+ and target.lang == 'perl'):
+ nondeplibs.append(self.perl_lib)
+
+ if ((isinstance(target, gen_base.TargetSWIG)
+ or isinstance(target, gen_base.TargetSWIGLib))
+ and target.lang == 'ruby'):
+ nondeplibs.append(self.ruby_lib)
+
+ for dep in self.get_win_depends(target, FILTER_LIBS):
+ nondeplibs.extend(dep.msvc_libs)
+
+ if dep.external_lib == '$(SVN_DB_LIBS)':
+ nondeplibs.append(dblib)
+
+ if dep.external_lib == '$(SVN_SQLITE_LIBS)' and not self.sqlite_inline:
+ nondeplibs.append('sqlite3.lib')
+
+ if self.neon_lib and dep.external_lib == '$(NEON_LIBS)':
+ nondeplibs.append(neonlib)
+
+ if self.serf_lib and dep.external_lib == '$(SVN_SERF_LIBS)':
+ nondeplibs.append(serflib)
+
+ if dep.external_lib == '$(SVN_SASL_LIBS)':
+ nondeplibs.append(sasllib)
+
+ if dep.external_lib == '$(SVN_APR_LIBS)':
+ nondeplibs.append(self.apr_lib)
+
+ if dep.external_lib == '$(SVN_APRUTIL_LIBS)':
+ nondeplibs.append(self.aprutil_lib)
+
+ if dep.external_lib == '$(SVN_XML_LIBS)':
+ nondeplibs.append('xml.lib')
+
+ return gen_base.unique(nondeplibs)
+
+ def get_win_sources(self, target, reldir_prefix=''):
+ "Return the list of source files that need to be compliled for target"
+
+ sources = { }
+
+ for obj in self.graph.get_sources(gen_base.DT_LINK, target.name):
+ if isinstance(obj, gen_base.Target):
+ continue
+
+ for src in self.graph.get_sources(gen_base.DT_OBJECT, obj):
+ if isinstance(src, gen_base.SourceFile):
+ if reldir_prefix:
+ if src.reldir:
+ reldir = reldir_prefix + '\\' + src.reldir
+ else:
+ reldir = reldir_prefix
+ else:
+ reldir = src.reldir
+ else:
+ reldir = ''
+ sources[src] = src, obj, reldir
+
+ return list(sources.values())
+
+ def write_file_if_changed(self, fname, new_contents):
+ """Rewrite the file if new_contents are different than its current content.
+
+ If you have your windows projects open and generate the projects
+ it's not a small thing for windows to re-read all projects so
+ only update those that have changed.
+ """
+
+ try:
+ old_contents = open(fname, 'rb').read()
+ except IOError:
+ old_contents = None
+ if old_contents != new_contents:
+ open(fname, 'wb').write(new_contents)
+ print("Wrote: %s" % fname)
+
+ def write_with_template(self, fname, tname, data):
+ fout = StringIO()
+
+ template = ezt.Template(compress_whitespace = 0)
+ template.parse_file(os.path.join('build', 'generator', tname))
+ template.generate(fout, data)
+ self.write_file_if_changed(fname, fout.getvalue())
+
+ def write_zlib_project_file(self, name):
+ if not self.zlib_path:
+ return
+ zlib_path = os.path.abspath(self.zlib_path)
+ self.move_proj_file(self.projfilesdir, name,
+ (('zlib_path', zlib_path),
+ ('zlib_sources',
+ glob.glob(os.path.join(zlib_path, '*.c'))
+ + glob.glob(os.path.join(zlib_path,
+ 'contrib/masmx86/*.c'))
+ + glob.glob(os.path.join(zlib_path,
+ 'contrib/masmx86/*.asm'))),
+ ('zlib_headers',
+ glob.glob(os.path.join(zlib_path, '*.h'))),
+ ('zlib_version', self.zlib_version),
+ ('project_guid', self.makeguid('zlib')),
+ ('use_ml', self.have_ml and 1 or None),
+ ))
+
+ def write_neon_project_file(self, name):
+ if self.without_neon:
+ return
+
+ neon_path = os.path.abspath(self.neon_path)
+ self.move_proj_file(self.neon_path, name,
+ (('neon_sources',
+ glob.glob(os.path.join(neon_path, 'src', '*.c'))),
+ ('neon_headers',
+ glob.glob(os.path.join(neon_path, 'src', '*.h'))),
+ ('expat_path',
+ os.path.join(os.path.abspath(self.apr_util_path),
+ 'xml', 'expat', 'lib')),
+ ('zlib_path', self.zlib_path
+ and os.path.abspath(self.zlib_path)),
+ ('openssl_path',
+ self.openssl_path
+ and os.path.abspath(self.openssl_path)),
+ ('project_guid', self.makeguid('neon')),
+ ))
+
+ def write_serf_project_file(self, name):
+ if not self.serf_lib:
+ return
+
+ serf_path = os.path.abspath(self.serf_path)
+ if self.serf_ver_maj == 1:
+ serflib = 'serf-1.lib'
+ else:
+ serflib = 'serf.lib'
+
+ self.move_proj_file(self.serf_path, name,
+ (('serf_sources',
+ glob.glob(os.path.join(serf_path, '*.c'))
+ + glob.glob(os.path.join(serf_path, 'auth', '*.c'))
+ + glob.glob(os.path.join(serf_path, 'buckets',
+ '*.c'))),
+ ('serf_headers',
+ glob.glob(os.path.join(serf_path, '*.h'))
+ + glob.glob(os.path.join(serf_path, 'auth', '*.h'))
+ + glob.glob(os.path.join(serf_path, 'buckets',
+ '*.h'))),
+ ('zlib_path', self.zlib_path
+ and os.path.abspath(self.zlib_path)),
+ ('openssl_path',
+ self.openssl_path
+ and os.path.abspath(self.openssl_path)),
+ ('apr_path', os.path.abspath(self.apr_path)),
+ ('apr_util_path', os.path.abspath(self.apr_util_path)),
+ ('project_guid', self.makeguid('serf')),
+ ('apr_static', self.static_apr),
+ ('serf_lib', serflib),
+ ))
+
+ def move_proj_file(self, path, name, params=()):
+ ### Move our slightly templatized pre-built project files into place --
+ ### these projects include zlib, neon, serf, locale, config, etc.
+
+ dest_file = os.path.join(path, name)
+ source_template = os.path.join('templates', name + '.ezt')
+ data = {
+ 'version' : self.vcproj_version,
+ 'configs' : self.configs,
+ 'platforms' : self.platforms
+ }
+ for key, val in params:
+ data[key] = val
+ self.write_with_template(dest_file, source_template, data)
+
+ def write(self):
+ "Override me when creating a new project type"
+
+ raise NotImplementedError
+
+ def _find_perl(self):
+ "Find the right perl library name to link swig bindings with"
+ self.perl_includes = []
+ self.perl_libdir = None
+ fp = os.popen('perl -MConfig -e ' + escape_shell_arg(
+ 'print "$Config{PERL_REVISION}$Config{PERL_VERSION}"'), 'r')
+ try:
+ line = fp.readline()
+ if line:
+ msg = 'Found installed perl version number.'
+ self.perl_lib = 'perl' + line.rstrip() + '.lib'
+ else:
+ msg = 'Could not detect perl version.'
+ self.perl_lib = 'perl56.lib'
+ print('%s\n Perl bindings will be linked with %s\n'
+ % (msg, self.perl_lib))
+ finally:
+ fp.close()
+
+ fp = os.popen('perl -MConfig -e ' + escape_shell_arg(
+ 'print $Config{archlib}'), 'r')
+ try:
+ line = fp.readline()
+ if line:
+ self.perl_libdir = os.path.join(line, 'CORE')
+ self.perl_includes = [os.path.join(line, 'CORE')]
+ finally:
+ fp.close()
+
+ def _find_ruby(self):
+ "Find the right Ruby library name to link swig bindings with"
+ self.ruby_includes = []
+ self.ruby_libdir = None
+ self.ruby_version = None
+ self.ruby_major_version = None
+ self.ruby_minor_version = None
+ proc = os.popen('ruby -rrbconfig -e ' + escape_shell_arg(
+ "puts Config::CONFIG['ruby_version'];"
+ "puts Config::CONFIG['LIBRUBY'];"
+ "puts Config::CONFIG['archdir'];"
+ "puts Config::CONFIG['libdir'];"), 'r')
+ try:
+ rubyver = proc.readline()[:-1]
+ if rubyver:
+ self.ruby_version = rubyver
+ self.ruby_major_version = string.atoi(self.ruby_version[0])
+ self.ruby_minor_version = string.atoi(self.ruby_version[2])
+ libruby = proc.readline()[:-1]
+ if libruby:
+ msg = 'Found installed ruby %s' % rubyver
+ self.ruby_lib = libruby
+ self.ruby_includes.append(proc.readline()[:-1])
+ self.ruby_libdir = proc.readline()[:-1]
+ else:
+ msg = 'Could not detect Ruby version, assuming 1.8.'
+ self.ruby_version = "1.8"
+ self.ruby_major_version = 1
+ self.ruby_minor_version = 8
+ self.ruby_lib = 'msvcrt-ruby18.lib'
+ print('%s\n Ruby bindings will be linked with %s\n'
+ % (msg, self.ruby_lib))
+ finally:
+ proc.close()
+
+ def _find_python(self):
+ "Find the appropriate options for creating SWIG-based Python modules"
+ self.python_includes = []
+ self.python_libdir = ""
+ try:
+ from distutils import sysconfig
+ inc = sysconfig.get_python_inc()
+ plat = sysconfig.get_python_inc(plat_specific=1)
+ self.python_includes.append(inc)
+ if inc != plat:
+ self.python_includes.append(plat)
+ self.python_libdir = self.apath(sysconfig.PREFIX, "libs")
+ except ImportError:
+ pass
+
+ def _find_jdk(self):
+ if not self.jdk_path:
+ jdk_ver = None
+ try:
+ try:
+ # Python >=3.0
+ import winreg
+ except ImportError:
+ # Python <3.0
+ import _winreg as winreg
+ key = winreg.OpenKey(winreg.HKEY_LOCAL_MACHINE,
+ r"SOFTWARE\JavaSoft\Java Development Kit")
+ # Find the newest JDK version.
+ num_values = winreg.QueryInfoKey(key)[1]
+ for i in range(num_values):
+ (name, value, key_type) = winreg.EnumValue(key, i)
+ if name == "CurrentVersion":
+ jdk_ver = value
+ break
+
+ # Find the JDK path.
+ if jdk_ver is not None:
+ key = winreg.OpenKey(key, jdk_ver)
+ num_values = winreg.QueryInfoKey(key)[1]
+ for i in range(num_values):
+ (name, value, key_type) = winreg.EnumValue(key, i)
+ if name == "JavaHome":
+ self.jdk_path = value
+ break
+ winreg.CloseKey(key)
+ except (ImportError, EnvironmentError):
+ pass
+ if self.jdk_path:
+ print("Found JDK version %s in %s\n" % (jdk_ver, self.jdk_path))
+ else:
+ print("Using JDK in %s\n" % (self.jdk_path))
+
+ def _find_swig(self):
+ # Require 1.3.24. If not found, assume 1.3.25.
+ default_version = '1.3.25'
+ minimum_version = '1.3.24'
+ vernum = 103025
+ minimum_vernum = 103024
+ libdir = ''
+
+ if self.swig_path is not None:
+ self.swig_exe = os.path.abspath(os.path.join(self.swig_path, 'swig'))
+ else:
+ self.swig_exe = 'swig'
+
+ try:
+ outfp = subprocess.Popen([self.swig_exe, '-version'], stdout=subprocess.PIPE, universal_newlines=True).stdout
+ txt = outfp.read()
+ if txt:
+ vermatch = re.compile(r'^SWIG\ Version\ (\d+)\.(\d+)\.(\d+)$', re.M) \
+ .search(txt)
+ else:
+ vermatch = None
+
+ if vermatch:
+ version = tuple(map(int, vermatch.groups()))
+ # build/ac-macros/swig.m4 explains the next incantation
+ vernum = int('%d%02d%03d' % version)
+ print('Found installed SWIG version %d.%d.%d\n' % version)
+ if vernum < minimum_vernum:
+ print('WARNING: Subversion requires version %s\n'
+ % minimum_version)
+
+ libdir = self._find_swig_libdir()
+ else:
+ print('Could not find installed SWIG,'
+ ' assuming version %s\n' % default_version)
+ self.swig_libdir = ''
+ outfp.close()
+ except OSError:
+ print('Could not find installed SWIG,'
+ ' assuming version %s\n' % default_version)
+ self.swig_libdir = ''
+
+ self.swig_vernum = vernum
+ self.swig_libdir = libdir
+
+ def _find_swig_libdir(self):
+ fp = os.popen(self.swig_exe + ' -swiglib', 'r')
+ try:
+ libdir = fp.readline().rstrip()
+ if libdir:
+ print('Using SWIG library directory %s\n' % libdir)
+ return libdir
+ else:
+ print('WARNING: could not find SWIG library directory\n')
+ finally:
+ fp.close()
+ return ''
+
+ def _find_ml(self):
+ "Check if the ML assembler is in the path"
+ if not self.enable_ml:
+ self.have_ml = 0
+ return
+ fp = os.popen('ml /help', 'r')
+ try:
+ line = fp.readline()
+ if line:
+ msg = 'Found ML, ZLib build will use ASM sources'
+ self.have_ml = 1
+ else:
+ msg = 'Could not find ML, ZLib build will not use ASM sources'
+ self.have_ml = 0
+ print('%s\n' % (msg,))
+ finally:
+ fp.close()
+
+ def _find_neon(self):
+ "Find the neon version"
+ msg = 'WARNING: Unable to determine neon version\n'
+ if self.without_neon:
+ self.neon_lib = None
+ msg = 'Not attempting to find neon\n'
+ else:
+ try:
+ self.neon_lib = "libneon"
+ fp = open(os.path.join(self.neon_path, '.version'))
+ txt = fp.read()
+ vermatch = re.compile(r'(\d+)\.(\d+)\.(\d+)$', re.M) \
+ .search(txt)
+
+ if vermatch:
+ version = tuple(map(int, vermatch.groups()))
+ # build/ac-macros/swig.m4 explains the next incantation
+ self.neon_ver = int('%d%02d%03d' % version)
+ msg = 'Found neon version %d.%d.%d\n' % version
+ if self.neon_ver < 25005:
+ msg = 'WARNING: Neon version 0.25.5 or higher is required'
+ except:
+ msg = 'WARNING: Error while determining neon version\n'
+ self.neon_lib = None
+
+ print(msg)
+
+ def _get_serf_version(self):
+ "Retrieves the serf version from serf.h"
+
+ # shouldn't be called unless serf is there
+ assert self.serf_path and os.path.exists(self.serf_path)
+
+ self.serf_ver_maj = None
+ self.serf_ver_min = None
+ self.serf_ver_patch = None
+
+ # serf.h should be present
+ if not os.path.exists(os.path.join(self.serf_path, 'serf.h')):
+ return None, None, None
+
+ txt = open(os.path.join(self.serf_path, 'serf.h')).read()
+
+ maj_match = re.search(r'SERF_MAJOR_VERSION\s+(\d+)', txt)
+ min_match = re.search(r'SERF_MINOR_VERSION\s+(\d+)', txt)
+ patch_match = re.search(r'SERF_PATCH_VERSION\s+(\d+)', txt)
+ if maj_match:
+ self.serf_ver_maj = int(maj_match.group(1))
+ if min_match:
+ self.serf_ver_min = int(min_match.group(1))
+ if patch_match:
+ self.serf_ver_patch = int(patch_match.group(1))
+
+ return self.serf_ver_maj, self.serf_ver_min, self.serf_ver_patch
+
+ def _find_serf(self):
+ "Check if serf and its dependencies are available"
+
+ minimal_serf_version = (0, 3, 0)
+ self.serf_lib = None
+ if self.serf_path and os.path.exists(self.serf_path):
+ if self.openssl_path and os.path.exists(self.openssl_path):
+ self.serf_lib = 'serf'
+ version = self._get_serf_version()
+ if None in version:
+ msg = 'Unknown serf version found; but, will try to build ' \
+ 'ra_serf.\n'
+ else:
+ self.serf_ver = '.'.join(str(v) for v in version)
+ if version < minimal_serf_version:
+ self.serf_lib = None
+ msg = 'Found serf %s, but >= %s is required. ra_serf will not be built.\n' % \
+ (self.serf_ver, '.'.join(str(v) for v in minimal_serf_version))
+ else:
+ msg = 'Found serf version %s\n' % self.serf_ver
+ print(msg)
+ else:
+ print('openssl not found, ra_serf will not be built\n')
+ else:
+ print('serf not found, ra_serf will not be built\n')
+
+ def _find_apr(self):
+ "Find the APR library and version"
+
+ version_file_path = os.path.join(self.apr_path, 'include',
+ 'apr_version.h')
+
+ if not os.path.exists(version_file_path):
+ sys.stderr.write("ERROR: '%s' not found.\n" % version_file_path);
+ sys.stderr.write("Use '--with-apr' option to configure APR location.\n");
+ sys.exit(1)
+
+ fp = open(version_file_path)
+ txt = fp.read()
+ fp.close()
+ vermatch = re.search(r'^\s*#define\s+APR_MAJOR_VERSION\s+(\d+)', txt, re.M)
+
+ major_ver = int(vermatch.group(1))
+
+ suffix = ''
+ if major_ver > 0:
+ suffix = '-%d' % major_ver
+
+ if self.static_apr:
+ self.apr_lib = 'apr%s.lib' % suffix
+ else:
+ self.apr_lib = 'libapr%s.lib' % suffix
+
+ def _find_apr_util(self):
+ "Find the APR-util library and version"
+
+ version_file_path = os.path.join(self.apr_util_path, 'include',
+ 'apu_version.h')
+
+ if not os.path.exists(version_file_path):
+ sys.stderr.write("ERROR: '%s' not found.\n" % version_file_path);
+ sys.stderr.write("Use '--with-apr-util' option to configure APR-Util location.\n");
+ sys.exit(1)
+
+ fp = open(version_file_path)
+ txt = fp.read()
+ fp.close()
+ vermatch = re.search(r'^\s*#define\s+APU_MAJOR_VERSION\s+(\d+)', txt, re.M)
+
+ major_ver = int(vermatch.group(1))
+
+ suffix = ''
+ if major_ver > 0:
+ suffix = '-%d' % major_ver
+
+ if self.static_apr:
+ self.aprutil_lib = 'aprutil%s.lib' % suffix
+ else:
+ self.aprutil_lib = 'libaprutil%s.lib' % suffix
+
+ def _find_sqlite(self):
+ "Find the Sqlite library and version"
+
+ header_file = os.path.join(self.sqlite_path, 'inc', 'sqlite3.h')
+
+ # First check for compiled version of SQLite.
+ if os.path.exists(header_file):
+ # Compiled SQLite seems found, check for sqlite3.lib file.
+ lib_file = os.path.join(self.sqlite_path, 'lib', 'sqlite3.lib')
+ if not os.path.exists(lib_file):
+ sys.stderr.write("ERROR: '%s' not found.\n" % lib_file)
+ sys.stderr.write("Use '--with-sqlite' option to configure sqlite location.\n");
+ sys.exit(1)
+ self.sqlite_inline = False
+ else:
+ # Compiled SQLite not found. Try amalgamation version.
+ amalg_file = os.path.join(self.sqlite_path, 'sqlite3.c')
+ if not os.path.exists(amalg_file):
+ sys.stderr.write("ERROR: SQLite not found in '%s' directory.\n" % self.sqlite_path)
+ sys.stderr.write("Use '--with-sqlite' option to configure sqlite location.\n");
+ sys.exit(1)
+ header_file = os.path.join(self.sqlite_path, 'sqlite3.h')
+ self.sqlite_inline = True
+
+ fp = open(header_file)
+ txt = fp.read()
+ fp.close()
+ vermatch = re.search(r'^\s*#define\s+SQLITE_VERSION\s+"(\d+)\.(\d+)\.(\d+)(?:\.\d)?"', txt, re.M)
+
+ version = tuple(map(int, vermatch.groups()))
+
+
+ self.sqlite_version = '%d.%d.%d' % version
+
+ msg = 'Found SQLite version %s\n'
+
+ major, minor, patch = version
+ if major < 3 or (major == 3 and minor < 6) \
+ or (major == 3 and minor == 6 and patch < 18):
+ sys.stderr.write("ERROR: SQLite 3.6.18 or higher is required "
+ "(%s found)\n" % self.sqlite_version);
+ sys.exit(1)
+ else:
+ print(msg % self.sqlite_version)
+
+ def _find_zlib(self):
+ "Find the ZLib library and version"
+
+ if not self.zlib_path:
+ self.zlib_version = '1'
+ return
+
+ header_file = os.path.join(self.zlib_path, 'zlib.h')
+
+ if not os.path.exists(header_file):
+ self.zlib_version = '1'
+ return
+
+ fp = open(header_file)
+ txt = fp.read()
+ fp.close()
+ vermatch = re.search(r'^\s*#define\s+ZLIB_VERSION\s+"(\d+)\.(\d+)\.(\d+)(?:\.\d)?"', txt, re.M)
+
+ version = tuple(map(int, vermatch.groups()))
+
+ self.zlib_version = '%d.%d.%d' % version
+
+ msg = 'Found ZLib version %s\n'
+
+ print(msg % self.zlib_version)
+
+class ProjectItem:
+ "A generic item class for holding sources info, config info, etc for a project"
+ def __init__(self, **kw):
+ vars(self).update(kw)
+
+# ============================================================================
+# This is a cut-down and modified version of code from:
+# subversion/subversion/bindings/swig/python/svn/core.py
+#
+if sys.platform == "win32":
+ _escape_shell_arg_re = re.compile(r'(\\+)(\"|$)')
+
+ def escape_shell_arg(arg):
+ # The (very strange) parsing rules used by the C runtime library are
+ # described at:
+ # http://msdn.microsoft.com/library/en-us/vclang/html/_pluslang_Parsing_C.2b2b_.Command.2d.Line_Arguments.asp
+
+ # double up slashes, but only if they are followed by a quote character
+ arg = re.sub(_escape_shell_arg_re, r'\1\1\2', arg)
+
+ # surround by quotes and escape quotes inside
+ arg = '"' + arg.replace('"', '"^""') + '"'
+ return arg
+
+else:
+ def escape_shell_arg(str):
+ return "'" + str.replace("'", "'\\''") + "'"
+
+# ============================================================================
+
+FILTER_LIBS = 1
+FILTER_PROJECTS = 2
+
+class POFile:
+ "Item class for holding po file info"
+ def __init__(self, base):
+ self.po = base + '.po'
+ self.spo = base + '.spo'
+ self.mo = base + '.mo'
+
+# MSVC paths always use backslashes regardless of current platform
+def msvc_path(path):
+ """Convert a build path to an msvc path"""
+ return path.replace('/', '\\')
+
+def msvc_path_join(*path_parts):
+ """Join path components into an msvc path"""
+ return '\\'.join(path_parts)
diff --git a/build/generator/swig/__init__.py b/build/generator/swig/__init__.py
new file mode 100644
index 0000000..c0eae6a
--- /dev/null
+++ b/build/generator/swig/__init__.py
@@ -0,0 +1,77 @@
+#
+#
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements. See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership. The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied. See the License for the
+# specific language governing permissions and limitations
+# under the License.
+#
+#
+#
+# generator.swig: Base class for SWIG-related generators
+#
+
+import os
+import re
+import shutil
+import generator.util.executable as _exec
+from generator.gen_base import _collect_paths
+try:
+ # Python >=3.0
+ import configparser
+except ImportError:
+ # Python <3.0
+ import ConfigParser as configparser
+
+class Generator:
+ """Base class for SWIG-related generators"""
+ langs = ["python", "perl", "ruby"]
+ short = { "perl": "pl", "python": "py", "ruby": "rb" }
+
+ def __init__(self, conf, swig_path):
+ """Read build.conf"""
+
+ # Now read and parse build.conf
+ parser = configparser.ConfigParser()
+ parser.read(conf)
+
+ # Read configuration options
+ self.proxy_dir = parser.get('options', 'swig-proxy-dir')
+ self.includes = _collect_paths(parser.get('options', 'includes'))
+ self.swig_checkout_files = \
+ _collect_paths(parser.get('options', 'swig-checkout-files'))
+
+ # Calculate build options
+ self.opts = {}
+ for lang in self.langs:
+ self.opts[lang] = parser.get('options', 'swig-%s-opts' % lang)
+
+ # Calculate SWIG paths
+ self.swig_path = swig_path
+ try:
+ self.swig_libdir = _exec.output([self.swig_path, "-swiglib"], strip=1)
+ except AssertionError:
+ pass
+
+ def version(self):
+ """Get the version number of SWIG"""
+ try:
+ swig_version = _exec.output([self.swig_path, "-version"])
+ m = re.search("Version (\d+).(\d+).(\d+)", swig_version)
+ if m:
+ return (m.group(1), m.group(2), m.group(3))
+ except AssertionError:
+ pass
+ return (0, 0, 0)
+
diff --git a/build/generator/swig/checkout_swig_header.py b/build/generator/swig/checkout_swig_header.py
new file mode 100755
index 0000000..3aa8352
--- /dev/null
+++ b/build/generator/swig/checkout_swig_header.py
@@ -0,0 +1,85 @@
+#!/usr/bin/env python
+#
+#
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements. See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership. The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied. See the License for the
+# specific language governing permissions and limitations
+# under the License.
+#
+#
+#
+# Checkout files from the SWIG library into Subversion's proxy directory
+#
+
+import sys, os, re, fileinput, shutil
+if __name__ == "__main__":
+ parent_dir = os.path.dirname(os.path.abspath(os.path.dirname(sys.argv[0])))
+ sys.path[0:0] = [ parent_dir, os.path.dirname(parent_dir) ]
+import generator.swig
+from gen_base import build_path_splitfile, build_path_join
+from generator.util.executable import run
+
+class Generator(generator.swig.Generator):
+
+ def write(self):
+ """Checkout all files"""
+ for path in self.swig_checkout_files:
+ self.checkout(path)
+
+ def write_makefile_rules(self, makefile):
+ """Write makefile rules to checkout files"""
+ script_path = '$(top_srcdir)/build/generator/swig/checkout_swig_header.py'
+ conf = '$(abs_srcdir)/build.conf'
+ makefile.write('CHECKOUT_SWIG = cd $(top_builddir) && $(PYTHON)' +
+ ' %s %s $(SWIG)\n\n' % (script_path, conf))
+ checkout_locations = []
+ for path in self.swig_checkout_files:
+ out = self._output_file(path)
+ checkout_locations.append(out)
+ makefile.write('%s: %s\n' % (out, script_path) +
+ '\t$(CHECKOUT_SWIG) %s\n\n' % path)
+ makefile.write('SWIG_CHECKOUT_FILES = %s\n\n\n'
+ % " ".join(checkout_locations))
+
+ def checkout(self, path):
+ """Checkout a specific header file from SWIG"""
+ out = self._output_file(path)
+ if os.path.exists(out):
+ os.remove(out)
+ if self._skip_checkout(path):
+ open(out, "w")
+ elif self.version() == (1, 3, 24):
+ shutil.copy(build_path_join(self.swig_libdir, path), out)
+ else:
+ run("%s -o %s -co %s" % (self.swig_path, out, path))
+
+ def _skip_checkout(self, path):
+ """Should we skip this checkout?"""
+ return (path == "ruby/rubytracking.swg" and self.version() < (1, 3, 26) or
+ path == "common.swg" and self.version() > (1, 3, 24))
+
+ def _output_file(self, path):
+ """Get output filename"""
+ dir, filename = build_path_splitfile(path)
+ return build_path_join(self.proxy_dir, filename)
+
+if __name__ == "__main__":
+ if len(sys.argv) != 4:
+ print("Usage: %s build.conf swig file.swg")
+ print("Checkout a specific header file from SWIG's library into")
+ print("the Subversion proxy directory.")
+ else:
+ gen = Generator(sys.argv[1], sys.argv[2])
+ gen.checkout(sys.argv[3])
diff --git a/build/generator/swig/external_runtime.py b/build/generator/swig/external_runtime.py
new file mode 100755
index 0000000..bbf58fc
--- /dev/null
+++ b/build/generator/swig/external_runtime.py
@@ -0,0 +1,113 @@
+#!/usr/bin/env python
+#
+#
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements. See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership. The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied. See the License for the
+# specific language governing permissions and limitations
+# under the License.
+#
+#
+
+#
+# external_runtime.py: Generate external runtime files for SWIG
+#
+
+import sys, os, re, fileinput
+if __name__ == "__main__":
+ parent_dir = os.path.dirname(os.path.abspath(os.path.dirname(sys.argv[0])))
+ sys.path[0:0] = [ parent_dir, os.path.dirname(parent_dir) ]
+import generator.swig
+import generator.util.executable
+_exec = generator.util.executable
+
+class Generator(generator.swig.Generator):
+ """Generate external runtime files for SWIG"""
+
+ def write(self):
+ """Generate external runtimes"""
+ for lang in self.langs:
+ self.write_external_runtime(lang)
+
+ def write_makefile_rules(self, makefile):
+ """Write the makefile rules for generating external runtimes"""
+ makefile.write(
+ 'GEN_SWIG_RUNTIME = cd $(top_srcdir) && $(PYTHON)' +
+ ' build/generator/swig/external_runtime.py build.conf $(SWIG)\n\n'
+ )
+ for lang in self.langs:
+ out = self._output_file(lang)
+ makefile.write(
+ 'autogen-swig-%s: %s\n' % (self.short[lang], out) +
+ '%s: $(SWIG_CHECKOUT_FILES)\n' % out +
+ '\t$(GEN_SWIG_RUNTIME) %s\n\n' % lang
+ )
+ makefile.write('\n')
+
+ def write_external_runtime(self, lang):
+ """Generate external runtime header files for each SWIG language"""
+
+ # Runtime library names
+ runtime_library = {
+ "python": "pyrun.swg", "perl":"perlrun.swg", "ruby":"rubydef.swg"
+ }
+
+ # Build runtime files
+ out = self._output_file(lang)
+ if self.version() == (1, 3, 24):
+ out_file = open(out, "w")
+ out_file.write(open("%s/swigrun.swg" % self.proxy_dir).read())
+ out_file.write(open("%s/common.swg" % self.proxy_dir).read())
+ out_file.write(
+ open("%s/%s" % (self.proxy_dir, runtime_library[lang])).read())
+ if lang != "ruby":
+ out_file.write(open("%s/runtime.swg" % self.proxy_dir).read())
+ out_file.close()
+ else:
+ _exec.run("%s -%s -external-runtime %s" % (self.swig_path, lang, out))
+
+ # SWIG 1.3.24-27 should include rubyhead.swg in their
+ # external runtime, but they don't.
+ if lang == "ruby" and self.version() < (1, 3, 28):
+ runtime = open(out).read()
+ out_file = open(out, "w")
+ head = open("%s/rubyhead.swg" % self.proxy_dir).read();
+ out_file.write(head)
+ if self.version() >= (1, 3, 26):
+ # SWIG 1.3.26-27 should include rubytracking.swg in their
+ # external runtime, but they don't.
+ tracking = open("%s/rubytracking.swg" % self.proxy_dir).read();
+ out_file.write(tracking)
+ out_file.write(runtime)
+ out_file.close()
+
+ # SWIG 1.3.25 and earlier use the wrong number of arguments in calls to
+ # SWIG_GetModule. We fix this below.
+ if self.version() <= (1, 3, 25):
+ for line in fileinput.input(out, inplace=1):
+ sys.stdout.write(
+ re.sub(r"SWIG_GetModule\(\)", "SWIG_GetModule(NULL)", line)
+ )
+ def _output_file(self, lang):
+ """Return the output filename of the runtime for the given language"""
+ return '%s/swig_%s_external_runtime.swg' % (self.proxy_dir, lang)
+
+
+if __name__ == "__main__":
+ if len(sys.argv) != 4:
+ print("Usage: %s build.conf swig")
+ print("Generates external runtime files for SWIG")
+ else:
+ gen = Generator(sys.argv[1], sys.argv[2])
+ gen.write_external_runtime(sys.argv[3])
diff --git a/build/generator/swig/header_wrappers.py b/build/generator/swig/header_wrappers.py
new file mode 100755
index 0000000..2f3d200
--- /dev/null
+++ b/build/generator/swig/header_wrappers.py
@@ -0,0 +1,354 @@
+#!/usr/bin/env python
+#
+#
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements. See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership. The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied. See the License for the
+# specific language governing permissions and limitations
+# under the License.
+#
+#
+
+#
+# header_wrappers.py: Generates SWIG proxy wrappers around Subversion
+# header files
+#
+
+import os, re, sys, glob, shutil, tempfile
+if __name__ == "__main__":
+ parent_dir = os.path.dirname(os.path.abspath(os.path.dirname(sys.argv[0])))
+ sys.path[0:0] = [ parent_dir, os.path.dirname(parent_dir) ]
+from gen_base import unique, native_path, build_path_basename, build_path_join
+import generator.swig
+
+class Generator(generator.swig.Generator):
+ """Generate SWIG proxy wrappers around Subversion header files"""
+
+ def __init__(self, conf, swig_path):
+ """Initialize Generator object"""
+ generator.swig.Generator.__init__(self, conf, swig_path)
+
+ # Build list of header files
+ self.header_files = list(map(native_path, self.includes))
+ self.header_basenames = list(map(os.path.basename, self.header_files))
+
+ # Ignore svn_repos_parse_fns_t because SWIG can't parse it
+ _ignores = ["svn_repos_parse_fns_t",
+ "svn_auth_gnome_keyring_unlock_prompt_func_t",
+ ]
+
+ def write_makefile_rules(self, makefile):
+ """Write makefile rules for generating SWIG wrappers for Subversion
+ header files."""
+ wrapper_fnames = []
+ python_script = '$(abs_srcdir)/build/generator/swig/header_wrappers.py'
+ makefile.write('GEN_SWIG_WRAPPER = cd $(top_srcdir) && $(PYTHON)' +
+ ' %s build.conf $(SWIG)\n\n' % python_script)
+ for fname in self.includes:
+ wrapper_fname = build_path_join(self.proxy_dir,
+ self.proxy_filename(build_path_basename(fname)))
+ wrapper_fnames.append(wrapper_fname)
+ makefile.write(
+ '%s: %s %s\n' % (wrapper_fname, fname, python_script) +
+ '\t$(GEN_SWIG_WRAPPER) %s\n\n' % fname
+ )
+ makefile.write('SWIG_WRAPPERS = %s\n\n' % ' '.join(wrapper_fnames))
+ for short_name in self.short.values():
+ # swig-pl needs the '.swig_checked' target here; swig-rb and swig-py
+ # already reach it via a different dependency chain:
+ #
+ # In build-outputs.mk, swig-py and swig-rb targets depend on *.la
+ # targets, which depend on *.lo targets, which depend on *.c targets,
+ # which depend on .swig_checked target.
+ makefile.write('autogen-swig-%s: .swig_checked $(SWIG_WRAPPERS)\n' % short_name)
+ makefile.write('\n\n')
+
+ def proxy_filename(self, include_filename):
+ """Convert a .h filename into a _h.swg filename"""
+ return include_filename.replace(".h","_h.swg")
+
+ def _write_nodefault_calls(self, structs):
+ """Write proxy definitions to a SWIG interface file"""
+ self.ofile.write("\n/* No default constructors for opaque structs */\n")
+ self.ofile.write('#ifdef SWIGPYTHON\n');
+ for structName, structDefinition in structs:
+ if not structDefinition:
+ self.ofile.write('%%nodefault %s;\n' % structName)
+ self.ofile.write('#endif\n');
+
+ def _write_includes(self, includes, base_fname):
+ """Write includes to a SWIG interface file"""
+
+ self.ofile.write('\n/* Includes */\n')
+ self.ofile.write('%%{\n#include "%s"\n%%}\n' % base_fname)
+ if base_fname not in self._ignores:
+ self.ofile.write('%%include %s\n' % base_fname)
+
+
+ def _write_callback(self, type, return_type, module, function, params,
+ callee):
+ """Write out an individual callback"""
+
+ # Get rid of any extra spaces or newlines
+ return_type = ' '.join(return_type.split())
+ params = ' '.join(params.split())
+
+ # Calculate parameters
+ if params == "void":
+ param_names = ""
+ params = "%s _obj" % type
+ else:
+ param_names = ", ".join(self._re_param_names.findall(params))
+ params = "%s _obj, %s" % (type, params)
+
+ invoke_callback = "%s(%s)" % (callee, param_names)
+ if return_type != "void":
+ invoke_callback = "return %s" % (invoke_callback)
+
+ # Write out the declaration
+ self.ofile.write(
+ "static %s %s_invoke_%s(\n" % (return_type, module, function) +
+ " %s) {\n" % params +
+ " %s;\n" % invoke_callback +
+ "}\n\n")
+
+
+ def _write_callback_typemaps(self, callbacks):
+ """Apply the CALLABLE_CALLBACK typemap to all callbacks"""
+
+ self.ofile.write('\n/* Callback typemaps */\n')
+ types = [];
+ for match in callbacks:
+ if match[0] and match[1]:
+ # Callbacks declared as a typedef
+ return_type, module, function, params = match
+ type = "%s_%s_t" % (module, function)
+ types.append(type)
+
+ if types:
+ self.ofile.write(
+ "#ifdef SWIGPYTHON\n"
+ "%%apply CALLABLE_CALLBACK {\n"
+ " %s\n"
+ "};\n"
+ "%%apply CALLABLE_CALLBACK * {\n"
+ " %s *\n"
+ "};\n"
+ "#endif\n" % ( ",\n ".join(types), " *,\n ".join(types) )
+ );
+
+
+ def _write_baton_typemaps(self, batons):
+ """Apply the PY_AS_VOID typemap to all batons"""
+
+ self.ofile.write('\n/* Baton typemaps */\n')
+
+ if batons:
+ self.ofile.write(
+ "#ifdef SWIGPYTHON\n"
+ "%%apply void *PY_AS_VOID {\n"
+ " void *%s\n"
+ "};\n"
+ "#endif\n" % ( ",\n void *".join(batons) )
+ )
+
+
+ def _write_callbacks(self, callbacks):
+ """Write invoker functions for callbacks"""
+ self.ofile.write('\n/* Callbacks */\n')
+ self.ofile.write("\n%inline %{\n")
+
+ struct = None
+ for match in callbacks:
+ if match[0] and not match[1]:
+ # Struct definitions
+ struct = match[0]
+ elif not match[0] and struct not in self._ignores:
+ # Struct member callbacks
+ return_type, name, params = match[1:]
+ type = "%s *" % struct
+
+ self._write_callback(type, return_type, struct[:-2], name, params,
+ "(_obj->%s)" % name)
+ elif match[0] and match[1]:
+ # Callbacks declared as a typedef
+ return_type, module, function, params = match
+ type = "%s_%s_t" % (module, function)
+
+ if type not in self._ignores:
+ self._write_callback(type, return_type, module, function, params,
+ "_obj")
+
+ self.ofile.write("%}\n")
+
+ self.ofile.write("\n#ifdef SWIGPYTHON\n")
+ for match in callbacks:
+
+ if match[0] and not match[1]:
+ # Struct definitions
+ struct = match[0]
+ elif not match[0] and struct not in self._ignores:
+ # Using funcptr_member_proxy, add proxy methods to anonymous
+ # struct member callbacks, so that they can be invoked directly.
+ return_type, name, params = match[1:]
+ self.ofile.write('%%funcptr_member_proxy(%s, %s, %s_invoke_%s);\n'
+ % (struct, name, struct[:-2], name))
+ elif match[0] and match[1]:
+ # Using funcptr_proxy, create wrapper objects for each typedef'd
+ # callback, so that they can be invoked directly. The
+ # CALLABLE_CALLBACK typemap (used in _write_callback_typemaps)
+ # ensures that these wrapper objects are actually used.
+ return_type, module, function, params = match
+ self.ofile.write('%%funcptr_proxy(%s_%s_t, %s_invoke_%s);\n'
+ % (module, function, module, function))
+ self.ofile.write("\n#endif\n")
+
+ def _write_proxy_definitions(self, structs):
+ """Write proxy definitions to a SWIG interface file"""
+ self.ofile.write('\n/* Structure definitions */\n')
+ self.ofile.write('#ifdef SWIGPYTHON\n');
+ for structName, structDefinition in structs:
+ if structDefinition:
+ self.ofile.write('%%proxy(%s);\n' % structName)
+ else:
+ self.ofile.write('%%opaque_proxy(%s);\n' % structName)
+ self.ofile.write('#endif\n');
+
+ """Regular expression for parsing includes from a C header file"""
+ _re_includes = re.compile(r'#\s*include\s*[<"]([^<">;\s]+)')
+
+ """Regular expression for parsing structs from a C header file"""
+ _re_structs = re.compile(r'\btypedef\s+(?:struct|union)\s+'
+ r'(svn_[a-z_0-9]+)\b\s*(\{?)')
+
+ """Regular expression for parsing callbacks declared inside structs
+ from a C header file"""
+ _re_struct_callbacks = re.compile(r'\btypedef\s+(?:struct|union)\s+'
+ r'(svn_[a-z_0-9]+)\b|'
+ r'\n[ \t]+((?!typedef)[a-z_0-9\s*]+)'
+ r'\(\*(\w+)\)'
+ r'\s*\(([^)]+)\);')
+
+
+ """Regular expression for parsing callbacks declared as a typedef
+ from a C header file"""
+ _re_typed_callbacks = re.compile(r'typedef\s+([a-z_0-9\s*]+)'
+ r'\(\*(svn_[a-z]+)_([a-z_0-9]+)_t\)\s*'
+ r'\(([^)]+)\);');
+
+ """Regular expression for parsing batons"""
+ _re_batons = re.compile(r'void\s*\*\s*(\w*baton\w*)');
+
+ """Regular expression for parsing parameter names from a parameter list"""
+ _re_param_names = re.compile(r'\b(\w+)\s*\)*\s*(?:,|$)')
+
+ """Regular expression for parsing comments"""
+ _re_comments = re.compile(r'/\*.*?\*/')
+
+ def _write_swig_interface_file(self, base_fname, batons, includes, structs,
+ callbacks):
+ """Convert a header file into a SWIG header file"""
+
+ # Calculate output filename from base filename
+ output_fname = os.path.join(self.proxy_dir,
+ self.proxy_filename(base_fname))
+
+ # Open a temporary output file
+ self.ofile = tempfile.TemporaryFile(dir=self.proxy_dir)
+ self.ofile.write('/* Proxy classes for %s\n' % base_fname)
+ self.ofile.write(' * DO NOT EDIT -- AUTOMATICALLY GENERATED */\n')
+
+ # Write list of structs for which we shouldn't define constructors
+ # by default
+ self._write_nodefault_calls(structs)
+
+ # Write typemaps for the callbacks
+ self._write_callback_typemaps(callbacks)
+
+ # Write typemaps for the batons
+ self._write_baton_typemaps(batons)
+
+ # Write includes into the SWIG interface file
+ self._write_includes(includes, base_fname)
+
+ # Write proxy definitions into the SWIG interface file
+ self._write_proxy_definitions(structs)
+
+ # Write callback definitions into the SWIG interface file
+ self._write_callbacks(callbacks)
+
+ # Copy the temporary file over to the result file.
+ # Ideally we'd simply rename the temporary file to output_fname,
+ # but NamedTemporaryFile() only supports its 'delete' parameter
+ # in python 2.6 and above, and renaming the file while it's opened
+ # exclusively is probably not a good idea.
+ outputfile = open(output_fname, 'w')
+ self.ofile.seek(0)
+ shutil.copyfileobj(self.ofile, outputfile)
+
+ # Close our temporary file.
+ # It will also be deleted automatically.
+ self.ofile.close()
+
+ # Close our output file, too.
+ outputfile.close()
+
+ def process_header_file(self, fname):
+ """Generate a wrapper around a header file"""
+
+ # Read the contents of the header file
+ contents = open(fname).read()
+
+ # Remove comments
+ contents = self._re_comments.sub("", contents)
+
+ # Get list of includes
+ includes = unique(self._re_includes.findall(contents))
+
+ # Get list of structs
+ structs = unique(self._re_structs.findall(contents))
+
+ # Get list of batons
+ batons = unique(self._re_batons.findall(contents))
+
+ # Get list of callbacks
+ callbacks = (self._re_struct_callbacks.findall(contents) +
+ self._re_typed_callbacks.findall(contents))
+
+ # Get the location of the output file
+ base_fname = os.path.basename(fname)
+
+ # Write the SWIG interface file
+ self._write_swig_interface_file(base_fname, batons, includes, structs,
+ callbacks)
+
+ def write(self):
+ """Generate wrappers for all header files"""
+
+ for fname in self.header_files:
+ self.process_header_file(fname)
+
+if __name__ == "__main__":
+ if len(sys.argv) < 3:
+ print("""Usage: %s build.conf swig [ subversion/include/header_file.h ]
+Generates SWIG proxy wrappers around Subversion header files. If no header
+files are specified, generate wrappers for subversion/include/*.h. """ % \
+ os.path.basename(sys.argv[0]))
+ else:
+ gen = Generator(sys.argv[1], sys.argv[2])
+ if len(sys.argv) > 3:
+ for fname in sys.argv[3:]:
+ gen.process_header_file(fname)
+ else:
+ gen.write()
diff --git a/build/generator/templates/build_locale.ezt b/build/generator/templates/build_locale.ezt
new file mode 100644
index 0000000..53e7e6c
--- /dev/null
+++ b/build/generator/templates/build_locale.ezt
@@ -0,0 +1,36 @@
+@rem Licensed to the Apache Software Foundation (ASF) under one
+@rem or more contributor license agreements. See the NOTICE file
+@rem distributed with this work for additional information
+@rem regarding copyright ownership. The ASF licenses this file
+@rem to you under the Apache License, Version 2.0 (the
+@rem "License"); you may not use this file except in compliance
+@rem with the License. You may obtain a copy of the License at
+@rem
+@rem http://www.apache.org/licenses/LICENSE-2.0
+@rem
+@rem Unless required by applicable law or agreed to in writing,
+@rem software distributed under the License is distributed on an
+@rem "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+@rem KIND, either express or implied. See the License for the
+@rem specific language governing permissions and limitations
+@rem under the License.
+
+
+@echo off
+@rem **************************************************************************
+cd ..\..\subversion\po
+mkdir ..\..\%1\mo
+set exitcode=0
+[for pofiles]echo Running msgfmt on [pofiles.po]...
+python ..\..\build\strip-po-charset.py [pofiles.po] [pofiles.spo]
+if not errorlevel 0 goto err
+msgfmt.exe -c -o ..\..\%1\mo\[pofiles.mo] [pofiles.spo]
+if not errorlevel 0 goto err
+del [pofiles.spo]
+[end]
+goto end
+@rem **************************************************************************
+:err
+set exitcode=1
+:end
+exit %exitcode%
diff --git a/build/generator/templates/build_zlib.ezt b/build/generator/templates/build_zlib.ezt
new file mode 100644
index 0000000..ec0e2f6
--- /dev/null
+++ b/build/generator/templates/build_zlib.ezt
@@ -0,0 +1,134 @@
+[define COPYRIGHT]
+#
+#
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements. See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership. The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied. See the License for the
+# specific language governing permissions and limitations
+# under the License.
+#
+#
+[end]@echo off
+@rem **************************************************************************
+@rem * --== WARNING ==-- This is a generated file. Do not edit!
+@rem *
+@rem * From this directory, run this batch file like so:
+@rem *
+@rem * .\build_zlib debug|release Win32|X64 [rebuild|clean]
+@rem *
+@rem **************************************************************************
+
+@rem **************************************************************************
+cd /D [zlib_path]
+set exitcode=0
+set zlib_version=[zlib_version]
+
+if /i "%1" == "release" goto release
+if /i "%1" == "debug" goto debug
+goto pIerr
+
+:checkplatform
+if /i "%2" == "Win32" goto PWin32
+if /i "%2" == "x64" goto PX64
+goto pIIerr
+
+:checkrebuild
+
+[if-any use_ml]
+@rem **************************************************************************
+@rem Compile ASM sources with ML
+set ASFLAGS=-nologo -Zi -coff
+set LOC=-DASMV -DASMINF
+[is zlib_version "1.2.4"]
+set OBJA=contrib\masmx86\gvmat32c.obj contrib\masmx86\gvmat32.obj contrib\masmx86\inffas32.obj
+set ASM_OPTS=ASFLAGS="%ASFLAGS%" LOC="%LOC%" OBJA="%OBJA%"
+[else]
+if /i "%2" == "Win32" (
+ set ASM_OPTS=LOC="-DASMV -DASMINF" OBJA="inffas32.obj match686.obj"
+) else if /i "%2" == "x64" (
+ set ASM_OPTS=LOC="-DASMV -DASMINF" OBJA="inffasx64.obj gvmat64.obj inffas8664.obj" AS=ml64
+)
+[end]
+[end]
+
+
+if /i "%3" == "rebuild" goto rebuild
+if /i "%3" == "clean" goto clean
+if not "%3" == "" goto pIIIerr
+set target= %STATICLIB%
+goto build
+
+:rebuild
+set target=clean %STATICLIB%
+goto build
+
+:clean
+set target=clean
+goto build
+
+@rem **************************************************************************
+:release
+set STATICLIB=zlibstat.lib
+set CC_OPTS=/MD /O2 /Zi
+goto checkplatform
+
+@rem **************************************************************************
+:debug
+set STATICLIB=zlibstatD.lib
+set CC_OPTS=/MDd /Gm /ZI /Od /GZ /D_DEBUG
+goto checkplatform
+
+@rem **************************************************************************
+:PWin32
+goto checkrebuild
+
+@rem **************************************************************************
+:PX64
+goto checkrebuild
+
+@rem **************************************************************************
+:build
+set COMMON_CC_OPTS=/nologo /W3 /DWIN32 /D_WINDOWS
+[is zlib_version "1.2.4"]
+set CFLAGS=%COMMON_CC_OPTS% %CC_OPTS% %LOC%
+set BUILD_OPTS=%ASM_OPTS% CFLAGS="%COMMON_CC_OPTS% %CC_OPTS% %LOC%"
+[else]
+set BUILD_OPTS=%ASM_OPTS%
+[end]
+@echo nmake /f win32\Makefile.msc %BUILD_OPTS% STATICLIB=%STATICLIB% %target%
+nmake /nologo /f win32\Makefile.msc %BUILD_OPTS% STATICLIB=%STATICLIB% %target%
+if not errorlevel 0 goto err
+goto end
+
+@rem **************************************************************************
+:pIerr
+echo error: First parameter should be "release" or "debug"
+goto err
+
+@rem **************************************************************************
+:pIIerr
+echo error: Second parameter should be "Win32" or "X64"
+goto err
+
+
+@rem **************************************************************************
+:pIIIerr
+echo error: Third parameter should be "rebuild" or empty
+goto err
+
+@rem **************************************************************************
+:err
+set exitcode=1
+:end
+exit %exitcode%
diff --git a/build/generator/templates/makefile.ezt b/build/generator/templates/makefile.ezt
new file mode 100644
index 0000000..a76db9c
--- /dev/null
+++ b/build/generator/templates/makefile.ezt
@@ -0,0 +1,158 @@
+[define COPYRIGHT]
+#
+#
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements. See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership. The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied. See the License for the
+# specific language governing permissions and limitations
+# under the License.
+#
+#
+[end]# DO NOT EDIT -- AUTOMATICALLY GENERATED
+
+########################################
+# Section 1: Global make variables
+########################################
+[for modules]
+[modules.name]_DEPS =[for modules.deps] [modules.deps][end]
+[modules.name]_LINK =[for modules.link] [modules.link][end]
+[end]
+BUILD_DIRS =[for build_dirs] [build_dirs][end]
+
+BDB_TEST_DEPS =[for bdb_test_deps] [bdb_test_deps][end]
+
+BDB_TEST_PROGRAMS =[for bdb_test_progs] [bdb_test_progs][end]
+
+TEST_DEPS =[for test_deps] [test_deps][end]
+
+TEST_PROGRAMS =[for test_progs] [test_progs][end]
+
+MANPAGES =[for manpages] [manpages][end]
+
+CLEAN_FILES =[for cfiles] [cfiles][end]
+EXTRACLEAN_FILES =[for sql] [sql.header][end]
+
+SWIG_INCLUDES = -I$(abs_builddir)/subversion \
+ -I$(abs_srcdir)/subversion/include \
+ -I$(abs_srcdir)/subversion/bindings/swig \
+ -I$(abs_srcdir)/subversion/bindings/swig/include \
+ -I$(abs_srcdir)/subversion/bindings/swig/proxy \
+ -I$(abs_builddir)/subversion/bindings/swig/proxy \
+ $(SVN_APR_INCLUDES) $(SVN_APRUTIL_INCLUDES)
+[if-any release_mode]
+RELEASE_MODE = 1
+[end]
+
+########################################
+# Section 2: SWIG headers (wrappers and external runtimes)
+########################################
+[if-any release_mode]
+[else]
+[swig_rules][end]
+########################################
+# Section 3: SWIG autogen rules
+########################################
+[for swig_langs]
+autogen-swig-[swig_langs.short]:[for swig_langs.deps] [swig_langs.deps][end]
+autogen-swig: autogen-swig-[swig_langs.short]
+[end]
+
+
+########################################
+# Section 4: Rules to build SWIG .c files from .i files
+########################################
+[for swig_c]
+[swig_c.c_file]: .swig_checked [for swig_c.deps] [swig_c.deps][end]
+ $(SWIG) $(SWIG_INCLUDES) [swig_c.opts] -o $@ $(top_srcdir)/[swig_c.source][end]
+
+# This needs to be here, rather than in Makefile.in, else
+# './autogen.sh --release' doesn't find it.
+.swig_checked:
+ @if [ "$(SWIG)" = "none" ]; then \
+ echo "SWIG disabled at configure time" >&2; \
+ exit 1; \
+ fi
+ @touch .swig_checked
+
+
+########################################
+# Section 5: Individual target build rules
+########################################
+[for target]
+[target.varname]_PATH = [target.path][if-any target.install]
+install-[target.install]: [target.install_deps][end]
+[is target.type "java"][target.varname]_HEADERS = [for target.headers][if-index target.headers first][else] [end][target.headers][end]
+[target.varname]_OBJECTS = [for target.objects][if-index target.objects first][else] [end][target.objects][end]
+[target.varname]_DEPS = $([target.varname]_HEADERS) $([target.varname]_OBJECTS) [target.add_deps] [for target.deps][if-index target.deps first][else] [end][target.deps][end]
+[target.name]: $([target.varname]_DEPS)
+[if-any target.headers][target.varname]_CLASS_FILENAMES =[for target.header_class_filenames] [target.header_class_filenames][end]
+[target.varname]_CLASSES =[for target.header_classes] [target.header_classes][end]
+$([target.varname]_HEADERS): $([target.varname]_CLASS_FILENAMES)
+ [target.link_cmd] -d [target.output_dir] -classpath [target.classes]:$([target.varname]_CLASSPATH) $([target.varname]_CLASSES)
+[end][if-any target.sources][target.varname]_SRC =[for target.sources] [target.sources][end]
+$([target.varname]_OBJECTS): $([target.varname]_SRC)
+ [target.link_cmd] -d [target.output_dir] -classpath [target.classes]:$([target.varname]_CLASSPATH) $([target.varname]_SRC)
+[if-any target.jar]
+ $(JAR) cf [target.jar_path] -C [target.classes][for target.packages] [target.packages][end][end][end]
+[else][is target.type "i18n"][target.varname]_DEPS = [target.add_deps][for target.objects] [target.objects][end][for target.deps] [target.deps][end]
+[target.name]: $([target.varname]_DEPS)
+[else][target.varname]_DEPS = [target.add_deps][for target.objects] [target.objects][end][for target.deps] [target.deps][end]
+[target.varname]_OBJECTS =[for target.objnames] [target.objnames][end]
+[target.filename]: $([target.varname]_DEPS)
+ cd [target.path] && [target.link_cmd] $([target.varname]_LDFLAGS) -o [target.basename] [target.undefined_flag] $([target.varname]_OBJECTS)[for target.libs] [target.libs][end] $(LIBS)
+[end][end][end]
+
+########################################
+# Section 6: Install-Group build targets
+########################################
+[for itargets]
+[itargets.type]: [for itargets.outputs][if-index itargets.outputs first][else] [end][itargets.outputs][end]
+[end]
+
+########################################
+# Section 7: Install-Group install targets
+########################################
+[for areas]
+[is areas.type "apache-mod"]install-mods-shared:[for areas.files] [areas.files.fullname][end][for areas.files]
+ cd [areas.files.dirname] ; $(MKDIR) "$(APACHE_LIBEXECDIR)" ; $(INSTALL_MOD_SHARED) -n [areas.files.name] [areas.files.filename][end]
+[else]install-[areas.type]: [for areas.files][if-index areas.files first][else] [end][areas.files.fullname][end] [for areas.apache_files] [areas.apache_files.fullname][end]
+ $(MKDIR) $(DESTDIR)$([areas.varname]dir)[for areas.files][is areas.type "locale"]
+ $(MKDIR) [areas.files.installdir]
+ cd [areas.files.dirname] ; $(INSTALL_[areas.uppervar]) [areas.files.filename] [areas.files.installdir]/$(PACKAGE_NAME)[areas.files.objext][else]
+ cd [areas.files.dirname] ; $(INSTALL_[areas.uppervar]) [areas.files.filename] $(DESTDIR)[areas.files.install_fname][end][end][for areas.apache_files]
+ cd [areas.apache_files.dirname] ; $(MKDIR) "$(APACHE_LIBEXECDIR)" ; $(INSTALL_MOD_SHARED) -n [areas.apache_files.name] [areas.apache_files.filename][end]
+[if-any areas.extra_install] $(INSTALL_EXTRA_[areas.uppervar])
+[end][end][end]
+
+########################################
+# Section 8: The install-include rule
+########################################
+
+install-include:[for includes] [includes.file][end]
+ $(MKDIR) $(DESTDIR)[includedir][for includes]
+ $(INSTALL_INCLUDE) [includes.src] $(DESTDIR)[includes.dst][end]
+
+########################################
+# Section 9: Shortcut targets for manual builds of specific items
+########################################
+[for isources]
+[isources.name]: [isources.filename][end]
+
+########################################
+# Section 10: Rules to build all other kinds of object-like files
+########################################
+[for deps]
+[deps.name]:[for deps.deps] [deps.deps][end][if-any deps.cmd]
+ [deps.cmd] [if-any deps.generated][else]$(canonicalized_srcdir)[end][deps.source][end]
+[end]
diff --git a/build/generator/templates/msvc_dsp.ezt b/build/generator/templates/msvc_dsp.ezt
new file mode 100644
index 0000000..f34e646
--- /dev/null
+++ b/build/generator/templates/msvc_dsp.ezt
@@ -0,0 +1,95 @@
+# Microsoft Developer Studio Project File - Name="[target.proj_name]" - Package Owner=<4>
+# Microsoft Developer Studio Generated Build File, Format Version 6.00
+# ** DO NOT EDIT **
+
+# TARGTYPE "[target_type]" [target_number]
+
+CFG=[target.proj_name] - [default_platform] [default_config]
+!MESSAGE This is not a valid makefile. To build this project using NMAKE,
+!MESSAGE use the Export Makefile command and run
+!MESSAGE
+!MESSAGE NMAKE /f "[target.proj_name]_msvc.mak".
+!MESSAGE
+!MESSAGE You can specify a configuration when running NMAKE
+!MESSAGE by defining the macro CFG on the command line. For example:
+!MESSAGE
+!MESSAGE NMAKE /f "[target.proj_name]_msvc.mak" CFG="[target.proj_name] - [default_platform] [default_config]"
+!MESSAGE
+!MESSAGE Possible choices for configuration are:
+!MESSAGE
+[for platforms][for configs]!MESSAGE "[target.proj_name] - [platforms] [configs.name]" (based on "[target_type]")
+[end][end]!MESSAGE
+
+# Begin Project
+# PROP AllowPerConfigDependencies 0
+# PROP Scc_ProjName ""
+# PROP Scc_LocalPath ""
+CPP=cl.exe
+RSC=rc.exe
+[for platforms][for configs]
+![if-index platforms first][if-index configs first][else]ELSE[end][else]ELSE[end]IF "$(CFG)" == "[target.proj_name] - [platforms] [configs.name]"
+
+# PROP Use_MFC 0
+# PROP Use_Debug_Libraries [is configs.name "Debug"]1[else]0[end]
+[if-any is_external]# PROP Output_Dir "[rootpath]\[target.output_dir]\[configs.name]"
+# PROP Intermediate_Dir "[rootpath]\[target.intermediate_dir]\[configs.name]"
+# PROP Cmd_Line "cmd /c [target.cmd] [configs.lower]"
+# PROP Rebuild_Opt "rebuild"
+# PROP Target_File "[rootpath]\[target.output_dir]\[is configs.name "Debug"][target.debug][else][target.release][end]"
+# PROP Target_Dir "[rootpath]\[target.output_dir]"
+[else]# PROP Output_Dir "[rootpath]\[configs.name]\[target.output_dir]"
+# PROP Intermediate_Dir "[rootpath]\[configs.name]\[target.intermediate_dir]"
+# PROP Target_Dir ""
+[if-any is_utility][else]LIB32=link.exe -lib
+# ADD LIB32 /out:"[rootpath]\[configs.name]\[target.output_dir]\[target.output_name]"
+# ADD CPP /nologo /W3 /FD /Fd"[rootpath]\[configs.name]\[target.output_dir]\[target.output_pdb]" /c [is configs.name "Debug"]/MDd /Gm /Gi /GX /ZI /Od /GZ[else]/MD /GX /O2 /Zi[end][for configs.defines] /D "[configs.defines]"[end][if-any instrument_apr_pools] /D "APR_POOL_DEBUG=[instrument_apr_pools]"[end][for includes] /I "[includes]"[end]
+# ADD RSC /l [if-any is_exe]0x409[else]0x424[end][is configs.name "Debug"] /d "_DEBUG"[end][for includes] /I "[includes]"[end]
+BSC32=bscmake.exe
+LINK32=link.exe
+[if-any is_exe is_dll]# ADD LINK32 /nologo[if-any is_exe] /subsystem:console[end][if-any is_dll] /dll[end] /debug /machine:IX86[for configs.libs] [configs.libs][end][for configs.libdirs] /libpath:"[configs.libdirs]"[end] /out:"[rootpath]\[configs.name]\[target.output_dir]\[target.output_name]"[if-any instrument_purify_quantify] /fixed:no[end]
+[end][end][end][end][end]
+!ENDIF
+
+# Begin Target
+
+[for platforms][for configs]# Name "[target.proj_name] - [platforms] [configs.name]"
+[end][end][is target_number "0x0102"]# Begin Source File
+
+SOURCE="..\empty.c"
+# End Source File
+[end][for sources]# Begin Source File
+
+SOURCE=[sources.path][if-any sources.reldir]
+[for platforms][for configs]
+![if-index platforms first][if-index configs first][else]ELSE[end][else]ELSE[end]IF "$(CFG)" == "[target.proj_name] - [platforms] [configs.name]"
+
+# PROP Intermediate_Dir "[rootpath]\[configs.name]\[target.intermediate_dir]\[sources.reldir]"
+[end][end]
+!ENDIF
+[end][for sources.user_deps][if-index sources.user_deps first]
+USERDEP__=[else] [end]"[sources.user_deps]"[end][if-any sources.custom_build]
+[for platforms][for configs]
+![if-index platforms first][if-index configs first][else]ELSE[end][else]ELSE[end]IF "$(CFG)" == "[target.proj_name] - [platforms] [configs.name]"
+
+# Begin Custom Build
+
+InputPath=[sources.path]
+
+[sources.custom_target] : $(SOURCE) "$(INTDIR)" "$(OUTDIR)"
+[if-any sources.custom_build] [sources.custom_build]
+[end]
+# End Custom Build
+[end][end]
+!ENDIF
+[end]
+# End Source File
+[end]
+[if-any target.desc]
+# Begin Source File
+
+SOURCE="[rootpath]\build\win32\svn.rc"
+# ADD RSC /d SVN_FILE_NAME="[target.output_name]" /d SVN_FILE_DESCRIPTION="[target.desc]"
+# End Source File
+[end]
+# End Target
+# End Project
diff --git a/build/generator/templates/msvc_dsw.ezt b/build/generator/templates/msvc_dsw.ezt
new file mode 100644
index 0000000..cd8a281
--- /dev/null
+++ b/build/generator/templates/msvc_dsw.ezt
@@ -0,0 +1,32 @@
+Microsoft Developer Studio Workspace File, Format Version 6.00
+# WARNING: DO NOT EDIT OR DELETE THIS WORKSPACE FILE!
+
+###############################################################################
+
+[for targets]Project: "[targets.name]"=[targets.dsp] - Package Owner=<4>
+
+Package=<5>
+{{{
+}}}
+
+Package=<4>
+{{{
+[for targets.depends] Begin Project Dependency
+ Project_Dep_Name [targets.depends]
+ End Project Dependency
+[end]}}}
+
+###############################################################################
+
+[end]Global:
+
+Package=<5>
+{{{
+}}}
+
+Package=<3>
+{{{
+}}}
+
+###############################################################################
+
diff --git a/build/generator/templates/neon.dsp.ezt b/build/generator/templates/neon.dsp.ezt
new file mode 100644
index 0000000..111aab9
--- /dev/null
+++ b/build/generator/templates/neon.dsp.ezt
@@ -0,0 +1,96 @@
+# Microsoft Developer Studio Project File - Name="neon" - Package Owner=<4>
+# Microsoft Developer Studio Generated Build File, Format Version 6.00
+# ** DO NOT EDIT **
+
+# TARGTYPE "Win32 (x86) External Target" 0x0106
+
+CFG=neon - Win32 Debug
+!MESSAGE This is not a valid makefile. To build this project using NMAKE,
+!MESSAGE use the Export Makefile command and run
+!MESSAGE
+!MESSAGE NMAKE /f "neon.mak".
+!MESSAGE
+!MESSAGE You can specify a configuration when running NMAKE
+!MESSAGE by defining the macro CFG on the command line. For example:
+!MESSAGE
+!MESSAGE NMAKE /f "neon.mak" CFG="neon - Win32 Debug"
+!MESSAGE
+!MESSAGE Possible choices for configuration are:
+!MESSAGE
+!MESSAGE "neon - Win32 Release" (based on "Win32 (x86) External Target")
+!MESSAGE "neon - Win32 Debug" (based on "Win32 (x86) External Target")
+!MESSAGE
+
+# Begin Project
+# PROP AllowPerConfigDependencies 0
+# PROP Scc_ProjName ""
+# PROP Scc_LocalPath ""
+
+!IF "$(CFG)" == "neon - Win32 Release"
+
+# PROP BASE Use_MFC 0
+# PROP BASE Use_Debug_Libraries 0
+# PROP BASE Output_Dir "Release"
+# PROP BASE Intermediate_Dir "Release"
+# PROP BASE Target_File "libneon.lib"
+# PROP BASE Bsc_Name ""
+# PROP Use_MFC 0
+# PROP Use_Debug_Libraries 0
+# PROP Output_Dir "Release"
+# PROP Intermediate_Dir "Release"
+# PROP Cmd_Line "nmake /nologo /f neon.mak ALL EXPAT_INC=[expat_path] [if-any zlib_path]ZLIB_SRC=[zlib_path][end] [if-any openssl_path]OPENSSL_SRC=[openssl_path][end]"
+# PROP Rebuild_Opt "/a"
+# PROP Target_File "libneon.lib"
+# PROP Bsc_Name ""
+
+!ELSEIF "$(CFG)" == "neon - Win32 Debug"
+
+# PROP BASE Use_MFC 0
+# PROP BASE Use_Debug_Libraries 1
+# PROP BASE Output_Dir "Debug"
+# PROP BASE Intermediate_Dir "Debug"
+# PROP BASE Rebuild_Opt "rebuild"
+# PROP BASE Target_File "libneonD.lib"
+# PROP BASE Bsc_Name ""
+# PROP Use_MFC 0
+# PROP Use_Debug_Libraries 1
+# PROP Output_Dir "Debug"
+# PROP Intermediate_Dir "Debug"
+# PROP Cmd_Line "nmake /nologo /f neon.mak ALL DEBUG_BUILD=Aye EXPAT_INC=[expat_path] [if-any zlib_path]ZLIB_SRC=[zlib_path] ZLIB_LIBS=[zlib_path]\zlibstatD.lib[end] [if-any openssl_path]OPENSSL_SRC=[openssl_path][end]"
+# PROP Rebuild_Opt "/a"
+# PROP Target_File "libneonD.lib"
+# PROP Bsc_Name ""
+
+!ENDIF
+
+# Begin Target
+
+# Name "neon - Win32 Release"
+# Name "neon - Win32 Debug"
+
+!IF "$(CFG)" == "neon - Win32 Release"
+
+!ELSEIF "$(CFG)" == "neon - Win32 Debug"
+
+!ENDIF
+
+# Begin Group "Source Files"
+
+# PROP Default_Filter "cpp;c;cxx;rc;def;r;odl;idl;hpj;bat"
+[for neon_sources]# Begin Source File
+
+SOURCE="[neon_sources]"
+# End Source File
+[end]
+# End Group
+# Begin Group "Header Files"
+
+# PROP Default_Filter "h;hpp;hxx;hm;inl"
+[for neon_headers]# Begin Source File
+
+SOURCE="[neon_headers]"
+# End Source File
+[end]
+# End Group
+# End Target
+# End Project
diff --git a/build/generator/templates/neon.vcproj.ezt b/build/generator/templates/neon.vcproj.ezt
new file mode 100644
index 0000000..af867c4
--- /dev/null
+++ b/build/generator/templates/neon.vcproj.ezt
@@ -0,0 +1,85 @@
+[define COPYRIGHT]
+<!--
+ Licensed to the Apache Software Foundation (ASF) under one
+ or more contributor license agreements. See the NOTICE file
+ distributed with this work for additional information
+ regarding copyright ownership. The ASF licenses this file
+ to you under the Apache License, Version 2.0 (the
+ "License"); you may not use this file except in compliance
+ with the License. You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+ Unless required by applicable law or agreed to in writing,
+ software distributed under the License is distributed on an
+ "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ KIND, either express or implied. See the License for the
+ specific language governing permissions and limitations
+ under the License.
+-->
+[end]<?xml version="1.0" encoding = "Windows-1252"?>
+<VisualStudioProject
+ ProjectType="Visual C++"
+ Version="[version]"
+ Name="neon"
+ ProjectGUID="[project_guid]"
+ Keyword="MakeFileProj">
+ <Platforms>
+[for platforms] <Platform
+ Name="[platforms]"/>
+[end] </Platforms>
+ <Configurations>
+[for platforms] <Configuration
+ Name="Debug|[platforms]"
+ OutputDirectory="Debug"
+ BuildLogFile="$(IntDir)\BuildLog_$(ProjectName).htm"
+ IntermediateDirectory="Debug"
+ ConfigurationType="0"
+ ManagedExtensions="1"
+ UseOfMFC="0"
+ ATLMinimizesCRunTimeLibraryUsage="FALSE">
+ <Tool
+ Name="VCNMakeTool"
+ BuildCommandLine="nmake /nologo /f neon.mak ALL DEBUG_BUILD=Aye EXPAT_INC=[expat_path] [if-any zlib_path]ZLIB_SRC=[zlib_path] ZLIB_LIBS=[zlib_path]\zlibstatD.lib[end] [if-any openssl_path]OPENSSL_SRC=[openssl_path][end]"
+ ReBuildCommandLine="nmake /nologo /f neon.mak CLEAN ALL DEBUG_BUILD=Aye EXPAT_INC=[expat_path] [if-any zlib_path]ZLIB_SRC=[zlib_path] ZLIB_LIBS=[zlib_path]\zlibstatD.lib[end] [if-any openssl_path]OPENSSL_SRC=[openssl_path][end]"
+ CleanCommandLine="nmake /nologo /f neon.mak CLEAN DEBUG_BUILD=Aye EXPAT_INC=[expat_path] [if-any zlib_path]ZLIB_SRC=[zlib_path] ZLIB_LIBS=[zlib_path]\zlibstatD.lib[end] [if-any openssl_path]OPENSSL_SRC=[openssl_path][end]"
+ Output="libneonD.lib"/>
+ </Configuration>
+ <Configuration
+ Name="Release|[platforms]"
+ OutputDirectory="Release"
+ BuildLogFile="$(IntDir)\BuildLog_$(ProjectName).htm"
+ IntermediateDirectory="Release"
+ ConfigurationType="0"
+ ManagedExtensions="1"
+ UseOfMFC="0"
+ ATLMinimizesCRunTimeLibraryUsage="FALSE">
+ <Tool
+ Name="VCNMakeTool"
+ BuildCommandLine="nmake /nologo /f neon.mak ALL EXPAT_INC=[expat_path] [if-any zlib_path]ZLIB_SRC=[zlib_path][end] [if-any openssl_path]OPENSSL_SRC=[openssl_path][end]"
+ ReBuildCommandLine="nmake /nologo /f neon.mak CLEAN ALL EXPAT_INC=[expat_path] [if-any zlib_path]ZLIB_SRC=[zlib_path][end] [if-any openssl_path]OPENSSL_SRC=[openssl_path][end]"
+ CleanCommandLine="nmake /nologo /f neon.mak CLEAN EXPAT_INC=[expat_path] [if-any zlib_path]ZLIB_SRC=[zlib_path][end] [if-any openssl_path]OPENSSL_SRC=[openssl_path][end]"
+ Output="libneon.lib"/>
+ </Configuration>
+[end] </Configurations>
+ <Files>
+ <Filter
+ Name="Source Files"
+ Filter="cpp;c;cxx;rc;def;r;odl;idl;hpj;bat">
+ [for neon_sources]<File
+ RelativePath="[neon_sources]">
+ </File>
+[end]
+ </Filter>
+ <Filter
+ Name="Header Files"
+ Filter="h;hpp;hxx;hm;inl">
+ [for neon_headers]<File
+ RelativePath="[neon_headers]">
+ </File>
+[end]
+ </Filter>
+ </Files>
+ <Globals>
+ </Globals>
+</VisualStudioProject>
diff --git a/build/generator/templates/neon.vcxproj.ezt b/build/generator/templates/neon.vcxproj.ezt
new file mode 100644
index 0000000..e552624
--- /dev/null
+++ b/build/generator/templates/neon.vcxproj.ezt
@@ -0,0 +1,63 @@
+[#
+<!--
+ Licensed to the Apache Software Foundation (ASF) under one
+ or more contributor license agreements. See the NOTICE file
+ distributed with this work for additional information
+ regarding copyright ownership. The ASF licenses this file
+ to you under the Apache License, Version 2.0 (the
+ "License"); you may not use this file except in compliance
+ with the License. You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+ Unless required by applicable law or agreed to in writing,
+ software distributed under the License is distributed on an
+ "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ KIND, either express or implied. See the License for the
+ specific language governing permissions and limitations
+ under the License.
+-->
+]<?xml version="1.0" encoding="utf-8"?>
+<Project DefaultTargets="Build" ToolsVersion="4.0" xmlns="http://schemas.microsoft.com/developer/msbuild/2003">
+ <ItemGroup Label="ProjectConfigurations">
+[for configs][for platforms] <ProjectConfiguration Include="[configs]|[platforms]">
+ <Configuration>[configs]</Configuration>
+ <Platform>[platforms]</Platform>
+ </ProjectConfiguration>
+[end][end] </ItemGroup>
+ <PropertyGroup Label="Globals">
+ <ProjectGuid>[project_guid]</ProjectGuid>
+ <Keyword>MakeFileProj</Keyword>
+ </PropertyGroup>
+ <Import Project="$(VCTargetsPath)\Microsoft.Cpp.Default.props" />
+[for platforms][for configs] <PropertyGroup Condition="'$(Configuration)|$(Platform)'=='[configs]|[platforms]'" Label="Configuration">
+ <ConfigurationType>Makefile</ConfigurationType>
+ <UseDebugLibraries>[is configs "Debug"]true[else]false[end]</UseDebugLibraries>
+ </PropertyGroup>
+[end][end] <Import Project="$(VCTargetsPath)\Microsoft.Cpp.props" />
+ <ImportGroup Label="ExtensionSettings">
+ </ImportGroup>
+[for platforms][for configs] <ImportGroup Condition="'$(Configuration)|$(Platform)'=='[configs]|[platforms]'" Label="PropertySheets">
+ <Import Project="$(UserRootDir)\Microsoft.Cpp.$(Platform).user.props" Condition="exists('$(UserRootDir)\Microsoft.Cpp.$(Platform).user.props')" />
+ </ImportGroup>
+[end][end] <PropertyGroup Label="UserMacros" />
+ <PropertyGroup>
+[for configs][for platforms] <OutDir Condition="'$(Configuration)|$(Platform)'=='[configs]|[platforms]'">.\</OutDir>
+ <IntDir Condition="'$(Configuration)|$(Platform)'=='[configs]|[platforms]'">.\</IntDir>
+ <NMakeBuildCommandLine Condition="'$(Configuration)|$(Platform)'=='[configs]|[platforms]'">"$(VCInstallDir)bin\nmake.exe" /nologo /f neon.mak ALL [is configs "Debug"]DEBUG_BUILD=1 [end]EXPAT_INC=[expat_path] [if-any zlib_path]ZLIB_SRC=[zlib_path] ZLIB_LIBS=[zlib_path]\zlibstat[is configs "Debug"]D[end].lib[end] [if-any openssl_path]OPENSSL_SRC=[openssl_path][end]</NMakeBuildCommandLine>
+ <NMakeReBuildCommandLine Condition="'$(Configuration)|$(Platform)'=='[configs]|[platforms]'">"$(VCInstallDir)bin\nmake.exe" /nologo /f neon.mak CLEAN ALL [is configs "Debug"]DEBUG_BUILD=1 [end]EXPAT_INC=[expat_path] [if-any zlib_path]ZLIB_SRC=[zlib_path] ZLIB_LIBS=[zlib_path]\zlibstat[is configs "Debug"]D[end].lib[end] [if-any openssl_path]OPENSSL_SRC=[openssl_path][end]</NMakeReBuildCommandLine>
+ <NMakeCleanCommandLine Condition="'$(Configuration)|$(Platform)'=='[configs]|[platforms]'">"$(VCInstallDir)bin\nmake.exe" /nologo /f neon.mak CLEAN [is configs "Debug"]DEBUG_BUILD=1 [end]EXPAT_INC=[expat_path] [if-any zlib_path]ZLIB_SRC=[zlib_path] ZLIB_LIBS=[zlib_path]\zlibstat[is configs "Debug"]D[end].lib[end] [if-any openssl_path]OPENSSL_SRC=[openssl_path][end]</NMakeCleanCommandLine>
+ <NMakeOutput Condition="'$(Configuration)|$(Platform)'=='[configs]|[platforms]'">[is configs "Release"]libneon.lib[else]libneonD.lib[end]</NMakeOutput>
+[end][end] </PropertyGroup>
+ <ItemDefinitionGroup>
+ </ItemDefinitionGroup>
+ <ItemGroup>
+[for neon_sources] <ClCompile Include="[neon_sources]" />
+[end] </ItemGroup>
+ <ItemGroup>
+[for neon_headers] <ClInclude Include="[neon_headers]" />
+[end] </ItemGroup>
+ <Import Project="$(VCTargetsPath)\Microsoft.Cpp.targets" />
+ <ImportGroup Label="ExtensionTargets">
+ </ImportGroup>
+</Project>
diff --git a/build/generator/templates/serf.dsp.ezt b/build/generator/templates/serf.dsp.ezt
new file mode 100644
index 0000000..0db7452
--- /dev/null
+++ b/build/generator/templates/serf.dsp.ezt
@@ -0,0 +1,96 @@
+# Microsoft Developer Studio Project File - Name="serf" - Package Owner=<4>
+# Microsoft Developer Studio Generated Build File, Format Version 6.00
+# ** DO NOT EDIT **
+
+# TARGTYPE "Win32 (x86) External Target" 0x0106
+
+CFG=serf - Win32 Debug
+!MESSAGE This is not a valid makefile. To build this project using NMAKE,
+!MESSAGE use the Export Makefile command and run
+!MESSAGE
+!MESSAGE NMAKE /f "serf.mak".
+!MESSAGE
+!MESSAGE You can specify a configuration when running NMAKE
+!MESSAGE by defining the macro CFG on the command line. For example:
+!MESSAGE
+!MESSAGE NMAKE /f "serf.mak" CFG="serf - Win32 Debug"
+!MESSAGE
+!MESSAGE Possible choices for configuration are:
+!MESSAGE
+!MESSAGE "serf - Win32 Release" (based on "Win32 (x86) External Target")
+!MESSAGE "serf - Win32 Debug" (based on "Win32 (x86) External Target")
+!MESSAGE
+
+# Begin Project
+# PROP AllowPerConfigDependencies 0
+# PROP Scc_ProjName ""
+# PROP Scc_LocalPath ""
+
+!IF "$(CFG)" == "serf - Win32 Release"
+
+# PROP BASE Use_MFC 0
+# PROP BASE Use_Debug_Libraries 0
+# PROP BASE Output_Dir "Release"
+# PROP BASE Intermediate_Dir "Release"
+# PROP BASE Target_File "Release/[serf_lib]"
+# PROP BASE Bsc_Name ""
+# PROP Use_MFC 0
+# PROP Use_Debug_Libraries 0
+# PROP Output_Dir "Release"
+# PROP Intermediate_Dir "Release"
+# PROP Cmd_Line "nmake /s /nologo /f serf.mak CLEAN ALL APR_SRC=[apr_path] APRUTIL_SRC=[apr_util_path] [if-any zlib_path]ZLIB_SRC=[zlib_path][end] [if-any openssl_path]OPENSSL_SRC=[openssl_path][end]"
+# PROP Rebuild_Opt "/a"
+# PROP Target_File "Release/[serf_lib]"
+# PROP Bsc_Name ""
+
+!ELSEIF "$(CFG)" == "serf - Win32 Debug"
+
+# PROP BASE Use_MFC 0
+# PROP BASE Use_Debug_Libraries 1
+# PROP BASE Output_Dir "Debug"
+# PROP BASE Intermediate_Dir "Debug"
+# PROP BASE Rebuild_Opt "/a"
+# PROP BASE Target_File "Debug/[serf_lib]"
+# PROP BASE Bsc_Name ""
+# PROP Use_MFC 0
+# PROP Use_Debug_Libraries 1
+# PROP Output_Dir "Debug"
+# PROP Intermediate_Dir "Debug"
+# PROP Cmd_Line "nmake /s /nologo /f serf.mak CLEAN ALL DEBUG_BUILD=1 APR_SRC=[apr_path] APRUTIL_SRC=[apr_util_path] [if-any zlib_path]ZLIB_SRC=[zlib_path] ZLIB_LIBS=[zlib_path]\zlibstatD.lib[end] [if-any openssl_path]OPENSSL_SRC=[openssl_path][end]"
+# PROP Rebuild_Opt "/a"
+# PROP Target_File "Debug/[serf_lib]"
+# PROP Bsc_Name ""
+
+!ENDIF
+
+# Begin Target
+
+# Name "serf - Win32 Release"
+# Name "serf - Win32 Debug"
+
+!IF "$(CFG)" == "serf - Win32 Release"
+
+!ELSEIF "$(CFG)" == "serf - Win32 Debug"
+
+!ENDIF
+
+# Begin Group "Source Files"
+
+# PROP Default_Filter "cpp;c;cxx;rc;def;r;odl;idl;hpj;bat"
+[for serf_sources]# Begin Source File
+
+SOURCE="[serf_sources]"
+# End Source File
+[end]
+# End Group
+# Begin Group "Header Files"
+
+# PROP Default_Filter "h;hpp;hxx;hm;inl"
+[for serf_headers]# Begin Source File
+
+SOURCE="[serf_headers]"
+# End Source File
+[end]
+# End Group
+# End Target
+# End Project
diff --git a/build/generator/templates/serf.vcproj.ezt b/build/generator/templates/serf.vcproj.ezt
new file mode 100644
index 0000000..73a9788
--- /dev/null
+++ b/build/generator/templates/serf.vcproj.ezt
@@ -0,0 +1,85 @@
+[define COPYRIGHT]
+<!--
+ Licensed to the Apache Software Foundation (ASF) under one
+ or more contributor license agreements. See the NOTICE file
+ distributed with this work for additional information
+ regarding copyright ownership. The ASF licenses this file
+ to you under the Apache License, Version 2.0 (the
+ "License"); you may not use this file except in compliance
+ with the License. You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+ Unless required by applicable law or agreed to in writing,
+ software distributed under the License is distributed on an
+ "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ KIND, either express or implied. See the License for the
+ specific language governing permissions and limitations
+ under the License.
+-->
+[end]<?xml version="1.0" encoding = "Windows-1252"?>
+<VisualStudioProject
+ ProjectType="Visual C++"
+ Version="[version]"
+ Name="serf"
+ ProjectGUID="[project_guid]"
+ Keyword="MakeFileProj">
+ <Platforms>
+[for platforms] <Platform
+ Name="[platforms]"/>
+[end] </Platforms>
+ <Configurations>
+[for platforms] <Configuration
+ Name="Debug|[platforms]"
+ OutputDirectory="Debug"
+ BuildLogFile="$(IntDir)\BuildLog_$(ProjectName).htm"
+ IntermediateDirectory="Debug"
+ ConfigurationType="0"
+ ManagedExtensions="1"
+ UseOfMFC="0"
+ ATLMinimizesCRunTimeLibraryUsage="FALSE">
+ <Tool
+ Name="VCNMakeTool"
+ BuildCommandLine="nmake /s /nologo /f serf.mak ALL DEBUG_BUILD=1 APR_SRC=[apr_path] APRUTIL_SRC=[apr_util_path] [if-any zlib_path]ZLIB_SRC=[zlib_path][end] [if-any openssl_path]OPENSSL_SRC=[openssl_path][end]"
+ ReBuildCommandLine="nmake /s /nologo /f serf.mak CLEAN ALL DEBUG_BUILD=1 APR_SRC=[apr_path] APRUTIL_SRC=[apr_util_path] [if-any zlib_path]ZLIB_SRC=[zlib_path][end] [if-any openssl_path]OPENSSL_SRC=[openssl_path][end]"
+ CleanCommandLine="nmake /s /nologo /f serf.mak CLEAN DEBUG_BUILD=1 APR_SRC=[apr_path] APRUTIL_SRC=[apr_util_path] [if-any zlib_path]ZLIB_SRC=[zlib_path][end] [if-any openssl_path]OPENSSL_SRC=[openssl_path][end]"
+ Output="$(OutDir)\[serf_lib]"/>
+ </Configuration>
+ <Configuration
+ Name="Release|[platforms]"
+ OutputDirectory="Release"
+ BuildLogFile="$(IntDir)\BuildLog_$(ProjectName).htm"
+ IntermediateDirectory="Release"
+ ConfigurationType="0"
+ ManagedExtensions="1"
+ UseOfMFC="0"
+ ATLMinimizesCRunTimeLibraryUsage="FALSE">
+ <Tool
+ Name="VCNMakeTool"
+ BuildCommandLine="nmake /s /nologo /f serf.mak ALL APR_SRC=[apr_path] APRUTIL_SRC=[apr_util_path] [if-any zlib_path]ZLIB_SRC=[zlib_path][end] [if-any openssl_path]OPENSSL_SRC=[openssl_path][end]"
+ ReBuildCommandLine="nmake /s /nologo /f serf.mak CLEAN ALL APR_SRC=[apr_path] APRUTIL_SRC=[apr_util_path] [if-any zlib_path]ZLIB_SRC=[zlib_path][end] [if-any openssl_path]OPENSSL_SRC=[openssl_path][end]"
+ CleanCommandLine="nmake /s /nologo /f serf.mak CLEAN APR_SRC=[apr_path] APRUTIL_SRC=[apr_util_path] [if-any zlib_path]ZLIB_SRC=[zlib_path][end] [if-any openssl_path]OPENSSL_SRC=[openssl_path][end]"
+ Output="$(OutDir)\[serf_lib]"/>
+ </Configuration>
+[end] </Configurations>
+ <Files>
+ <Filter
+ Name="Source Files"
+ Filter="cpp;c;cxx;rc;def;r;odl;idl;hpj;bat">
+ [for serf_sources]<File
+ RelativePath="[serf_sources]">
+ </File>
+ [end]
+ </Filter>
+ <Filter
+ Name="Header Files"
+ Filter="h;hpp;hxx;hm;inl">
+ [for serf_headers]<File
+ RelativePath="[serf_headers]">
+ </File>
+ [end]
+ </Filter>
+ </Files>
+ <Globals>
+ </Globals>
+</VisualStudioProject>
diff --git a/build/generator/templates/serf.vcxproj.ezt b/build/generator/templates/serf.vcxproj.ezt
new file mode 100644
index 0000000..f9f8e9a
--- /dev/null
+++ b/build/generator/templates/serf.vcxproj.ezt
@@ -0,0 +1,64 @@
+[#
+<!--
+ Licensed to the Apache Software Foundation (ASF) under one
+ or more contributor license agreements. See the NOTICE file
+ distributed with this work for additional information
+ regarding copyright ownership. The ASF licenses this file
+ to you under the Apache License, Version 2.0 (the
+ "License"); you may not use this file except in compliance
+ with the License. You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+ Unless required by applicable law or agreed to in writing,
+ software distributed under the License is distributed on an
+ "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ KIND, either express or implied. See the License for the
+ specific language governing permissions and limitations
+ under the License.
+-->
+]<?xml version="1.0" encoding="utf-8"?>
+<Project DefaultTargets="Build" ToolsVersion="4.0" xmlns="http://schemas.microsoft.com/developer/msbuild/2003">
+ <ItemGroup Label="ProjectConfigurations">
+[for configs][for platforms] <ProjectConfiguration Include="[configs]|[platforms]">
+ <Configuration>[configs]</Configuration>
+ <Platform>[platforms]</Platform>
+ </ProjectConfiguration>
+[end][end] </ItemGroup>
+ <PropertyGroup Label="Globals">
+ <ProjectName>serf</ProjectName>
+ <ProjectGuid>[project_guid]</ProjectGuid>
+ <Keyword>MakeFileProj</Keyword>
+ </PropertyGroup>
+ <Import Project="$(VCTargetsPath)\Microsoft.Cpp.Default.props" />
+[for platforms][for configs] <PropertyGroup Condition="'$(Configuration)|$(Platform)'=='[configs]|[platforms]'" Label="Configuration">
+ <ConfigurationType>Makefile</ConfigurationType>
+ <UseDebugLibraries>[is configs "Debug"]true[else]false[end]</UseDebugLibraries>
+ </PropertyGroup>
+[end][end] <Import Project="$(VCTargetsPath)\Microsoft.Cpp.props" />
+ <ImportGroup Label="ExtensionSettings">
+ </ImportGroup>
+[for platforms][for configs] <ImportGroup Condition="'$(Configuration)|$(Platform)'=='[configs]|[platforms]'" Label="PropertySheets">
+ <Import Project="$(UserRootDir)\Microsoft.Cpp.$(Platform).user.props" Condition="exists('$(UserRootDir)\Microsoft.Cpp.$(Platform).user.props')" />
+ </ImportGroup>
+[end][end] <PropertyGroup Label="UserMacros" />
+ <PropertyGroup>
+[for configs][for platforms] <OutDir Condition="'$(Configuration)|$(Platform)'=='[configs]|[platforms]'">[configs]</OutDir>
+ <IntDir Condition="'$(Configuration)|$(Platform)'=='[configs]|[platforms]'">[configs]</IntDir>
+ <NMakeBuildCommandLine Condition="'$(Configuration)|$(Platform)'=='[configs]|[platforms]'">"$(VCInstallDir)bin\nmake.exe" /s /nologo /f serf.mak ALL [is configs "Debug"]DEBUG_BUILD=1 [end]APR_SRC=[apr_path] APRUTIL_SRC=[apr_util_path] [if-any zlib_path]ZLIB_SRC=[zlib_path][end] [if-any openssl_path]OPENSSL_SRC=[openssl_path][end]</NMakeBuildCommandLine>
+ <NMakeReBuildCommandLine Condition="'$(Configuration)|$(Platform)'=='[configs]|[platforms]'">"$(VCInstallDir)bin\nmake.exe" /s /nologo /f serf.mak CLEAN ALL [is configs "Debug"]DEBUG_BUILD=1 [end]APR_SRC=[apr_path] APRUTIL_SRC=[apr_util_path] [if-any zlib_path]ZLIB_SRC=[zlib_path][end] [if-any openssl_path]OPENSSL_SRC=[openssl_path][end]</NMakeReBuildCommandLine>
+ <NMakeCleanCommandLine Condition="'$(Configuration)|$(Platform)'=='[configs]|[platforms]'">"$(VCInstallDir)bin\nmake.exe" /s /nologo /f serf.mak CLEAN [is configs "Debug"]DEBUG_BUILD=1 [end]APR_SRC=[apr_path] APRUTIL_SRC=[apr_util_path] [if-any zlib_path]ZLIB_SRC=[zlib_path][end] [if-any openssl_path]OPENSSL_SRC=[openssl_path][end]</NMakeCleanCommandLine>
+ <NMakeOutput Condition="'$(Configuration)|$(Platform)'=='[configs]|[platforms]'">$(OutDir)\[serf_lib]</NMakeOutput>
+[end][end] </PropertyGroup>
+ <ItemDefinitionGroup>
+ </ItemDefinitionGroup>
+ <ItemGroup>
+[for serf_sources] <ClCompile Include="[serf_sources]" />
+[end] </ItemGroup>
+ <ItemGroup>
+[for serf_headers] <ClInclude Include="[serf_headers]" />
+[end] </ItemGroup>
+ <Import Project="$(VCTargetsPath)\Microsoft.Cpp.targets" />
+ <ImportGroup Label="ExtensionTargets">
+ </ImportGroup>
+</Project>
diff --git a/build/generator/templates/svn_config.dsp.ezt b/build/generator/templates/svn_config.dsp.ezt
new file mode 100644
index 0000000..a45eb37
--- /dev/null
+++ b/build/generator/templates/svn_config.dsp.ezt
@@ -0,0 +1,97 @@
+# Microsoft Developer Studio Project File - Name="__CONFIG__" - Package Owner=<4>
+# Microsoft Developer Studio Generated Build File, Format Version 6.00
+# ** DO NOT EDIT **
+
+# TARGTYPE "Win32 (x86) Generic Project" 0x010a
+
+CFG=__CONFIG__ - Win32 Debug
+!MESSAGE This is not a valid makefile. To build this project using NMAKE,
+!MESSAGE use the Export Makefile command and run
+!MESSAGE
+!MESSAGE NMAKE /f "svn_config.mak".
+!MESSAGE
+!MESSAGE You can specify a configuration when running NMAKE
+!MESSAGE by defining the macro CFG on the command line. For example:
+!MESSAGE
+!MESSAGE NMAKE /f "svn_config.mak" CFG="__CONFIG__ - Win32 Debug"
+!MESSAGE
+!MESSAGE Possible choices for configuration are:
+!MESSAGE
+!MESSAGE "__CONFIG__ - Win32 Release" (based on "Win32 (x86) Generic Project")
+!MESSAGE "__CONFIG__ - Win32 Debug" (based on "Win32 (x86) Generic Project")
+!MESSAGE
+
+# Begin Project
+# PROP AllowPerConfigDependencies 0
+# PROP Scc_ProjName ""
+# PROP Scc_LocalPath ""
+MTL=midl.exe
+
+!IF "$(CFG)" == "__CONFIG__ - Win32 Release"
+
+# PROP BASE Use_MFC 0
+# PROP BASE Use_Debug_Libraries 0
+# PROP BASE Output_Dir "Release"
+# PROP BASE Intermediate_Dir "Release"
+# PROP BASE Target_Dir ""
+# PROP Use_MFC 0
+# PROP Use_Debug_Libraries 0
+# PROP Output_Dir ""
+# PROP Intermediate_Dir ""
+# PROP Target_Dir ""
+
+!ELSEIF "$(CFG)" == "__CONFIG__ - Win32 Debug"
+
+# PROP BASE Use_MFC 0
+# PROP BASE Use_Debug_Libraries 1
+# PROP BASE Output_Dir "Debug"
+# PROP BASE Intermediate_Dir "Debug"
+# PROP BASE Target_Dir ""
+# PROP Use_MFC 0
+# PROP Use_Debug_Libraries 1
+# PROP Output_Dir ""
+# PROP Intermediate_Dir ""
+# PROP Target_Dir ""
+
+!ENDIF
+
+# Begin Target
+
+# Name "__CONFIG__ - Win32 Release"
+# Name "__CONFIG__ - Win32 Debug"
+# Begin Source File
+
+SOURCE=..\..\..\subversion\svn_private_config.h
+# PROP Exclude_From_Build 1
+# End Source File
+# Begin Source File
+
+SOURCE=..\..\..\subversion\svn_private_config.hw
+
+!IF "$(CFG)" == "__CONFIG__ - Win32 Release"
+
+# PROP Ignore_Default_Tool 1
+# Begin Custom Build - Creating svn_private_config.h from svn_private_config.hw.
+InputPath=..\..\..\subversion\svn_private_config.hw
+
+"..\..\..\subversion\svn_private_config.h" : $(SOURCE) "$(INTDIR)" "$(OUTDIR)"
+ copy ..\..\..\subversion\svn_private_config.hw ..\..\..\subversion\svn_private_config.h > nul
+
+# End Custom Build
+
+!ELSEIF "$(CFG)" == "__CONFIG__ - Win32 Debug"
+
+# PROP Ignore_Default_Tool 1
+# Begin Custom Build - Creating svn_private_config.h from svn_private_config.hw.
+InputPath=..\..\..\subversion\svn_private_config.hw
+
+"..\..\..\subversion\svn_private_config.h" : $(SOURCE) "$(INTDIR)" "$(OUTDIR)"
+ copy ..\..\..\subversion\svn_private_config.hw ..\..\subversion\svn_private_config.h > nul
+
+# End Custom Build
+
+!ENDIF
+
+# End Source File
+# End Target
+# End Project
diff --git a/build/generator/templates/svn_config.vcproj.ezt b/build/generator/templates/svn_config.vcproj.ezt
new file mode 100644
index 0000000..dd9612e
--- /dev/null
+++ b/build/generator/templates/svn_config.vcproj.ezt
@@ -0,0 +1,94 @@
+[define COPYRIGHT]
+<!--
+ Licensed to the Apache Software Foundation (ASF) under one
+ or more contributor license agreements. See the NOTICE file
+ distributed with this work for additional information
+ regarding copyright ownership. The ASF licenses this file
+ to you under the Apache License, Version 2.0 (the
+ "License"); you may not use this file except in compliance
+ with the License. You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+ Unless required by applicable law or agreed to in writing,
+ software distributed under the License is distributed on an
+ "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ KIND, either express or implied. See the License for the
+ specific language governing permissions and limitations
+ under the License.
+-->
+[end]<?xml version="1.0" encoding = "Windows-1252"?>
+<VisualStudioProject
+ ProjectType="Visual C++"
+ Version="[version]"
+ Name="__CONFIG__"
+ ProjectGUID="[project_guid]"
+ Keyword="MakeFileProj">
+ <Platforms>
+[for platforms] <Platform
+ Name="[platforms]"/>
+[end] </Platforms>
+ <Configurations>
+[for platforms][for configs] <Configuration
+ Name="[configs]|[platforms]"
+ OutputDirectory="."
+ BuildLogFile="$(IntDir)\BuildLog_$(ProjectName).htm"
+ IntermediateDirectory="..\..\$(ConfigurationName)"
+ ConfigurationType="10"
+ UseOfMFC="0"
+ DeleteExtensionsOnClean="*.obj;*.ilk;*.pdb;*.tlb;*.tli;*.tlh;*.tmp;*.rsp;$(TargetPath)"
+ ATLMinimizesCRunTimeLibraryUsage="FALSE">
+ <Tool
+ Name="VCCustomBuildTool"/>
+ <Tool
+ Name="VCMIDLTool"
+ TypeLibraryName="./svn_config.tlb"
+ HeaderFileName=""/>
+ <Tool
+ Name="VCPostBuildEventTool"/>
+ <Tool
+ Name="VCPreBuildEventTool"/>
+ </Configuration>
+[end][end]
+ </Configurations>
+ <Files>
+ <File
+ RelativePath="..\..\subversion\svn_private_config.h">
+[for platforms][for configs] <FileConfiguration
+ Name="[configs]|[platforms]"
+ ExcludedFromBuild="TRUE">
+ <Tool
+ Name="VCCustomBuildTool"/>
+ </FileConfiguration>
+[end][end] </File>
+ <File
+ RelativePath="..\..\subversion\svn_private_config.hw">
+[for platforms][for configs] <FileConfiguration
+ Name="[configs]|[platforms]">
+ <Tool
+ Name="VCCustomBuildTool"
+ Description="Creating svn_private_config.h from svn_private_config.hw."
+ CommandLine="copy ..\..\..\subversion\svn_private_config.hw ..\..\..\subversion\svn_private_config.h &gt; nul
+"
+ Outputs="..\..\subversion\svn_private_config.h"/>
+ </FileConfiguration>
+[end][end] </File>
+[for sql] <File
+ RelativePath="../../../[sql.source]">
+[for platforms][for configs] <FileConfiguration
+ Name="[configs]|[platforms]">
+ <Tool
+ Name="VCCustomBuildTool"
+ Description="Generating [sql.header]"
+ CommandLine="&quot;[sql.svn_python]&quot; &quot;$(SolutionDir)\build\transform_sql.py&quot; &quot;$(InputPath)&quot; &quot;$(SolutionDir)\[sql.header]&quot;"
+ AdditionalDependencies="[for sql.dependencies]$(SolutionDir)\[sql.dependencies];[end]"
+ Outputs="$(SolutionDir)\[sql.header]"
+ />
+ </FileConfiguration>
+[end][end] </File>
+[end]
+ </Files>
+ <Globals>
+ </Globals>
+</VisualStudioProject>
+
diff --git a/build/generator/templates/svn_config.vcxproj.ezt b/build/generator/templates/svn_config.vcxproj.ezt
new file mode 100644
index 0000000..1522623
--- /dev/null
+++ b/build/generator/templates/svn_config.vcxproj.ezt
@@ -0,0 +1,72 @@
+[#
+<!--
+ Licensed to the Apache Software Foundation (ASF) under one
+ or more contributor license agreements. See the NOTICE file
+ distributed with this work for additional information
+ regarding copyright ownership. The ASF licenses this file
+ to you under the Apache License, Version 2.0 (the
+ "License"); you may not use this file except in compliance
+ with the License. You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+ Unless required by applicable law or agreed to in writing,
+ software distributed under the License is distributed on an
+ "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ KIND, either express or implied. See the License for the
+ specific language governing permissions and limitations
+ under the License.
+-->
+]<?xml version="1.0" encoding="utf-8"?>
+<Project DefaultTargets="Build" ToolsVersion="4.0" xmlns="http://schemas.microsoft.com/developer/msbuild/2003">
+ <ItemGroup Label="ProjectConfigurations">
+[for configs][for platforms] <ProjectConfiguration Include="[configs]|[platforms]">
+ <Configuration>[configs]</Configuration>
+ <Platform>[platforms]</Platform>
+ </ProjectConfiguration>
+[end][end] </ItemGroup>
+ <PropertyGroup Label="Globals">
+ <ProjectGuid>[project_guid]</ProjectGuid>
+ <Keyword>MakeFileProj</Keyword>
+ </PropertyGroup>
+ <Import Project="$(VCTargetsPath)\Microsoft.Cpp.Default.props" />
+[for platforms][for configs] <PropertyGroup Condition="'$(Configuration)|$(Platform)'=='[configs]|[platforms]'" Label="Configuration">
+ <ConfigurationType>Utility</ConfigurationType>
+ <UseDebugLibraries>[is configs "Debug"]true[else]false[end]</UseDebugLibraries>
+ </PropertyGroup>
+[end][end] <Import Project="$(VCTargetsPath)\Microsoft.Cpp.props" />
+ <ImportGroup Label="ExtensionSettings">
+ </ImportGroup>
+[for platforms][for configs] <ImportGroup Condition="'$(Configuration)|$(Platform)'=='[configs]|[platforms]'" Label="PropertySheets">
+ <Import Project="$(UserRootDir)\Microsoft.Cpp.$(Platform).user.props" Condition="exists('$(UserRootDir)\Microsoft.Cpp.$(Platform).user.props')" />
+ </ImportGroup>
+[end][end] <PropertyGroup Label="UserMacros" />
+ <PropertyGroup>
+[for configs][for platforms] <OutDir Condition="'$(Configuration)|$(Platform)'=='[configs]|[platforms]'">.\</OutDir>
+ <IntDir Condition="'$(Configuration)|$(Platform)'=='[configs]|[platforms]'">.\</IntDir>
+[end][end] </PropertyGroup>
+ <ItemGroup>
+ <CustomBuild Include="$(SolutionDir)\subversion\svn_private_config.h">
+[for configs][for platforms] <ExcludedFromBuild Condition="'$(Configuration)|$(Platform)'=='[configs]|[platforms]'">true</ExcludedFromBuild>
+[end][end] </CustomBuild>
+ </ItemGroup>
+ <ItemGroup>
+ <CustomBuild Include="$(SolutionDir)\subversion\svn_private_config.hw">
+ <FileType>Document</FileType>
+[for configs][for platforms] <Message Condition="'$(Configuration)|$(Platform)'=='[configs]|[platforms]'">Creating svn_private_config.h from svn_private_config.hw.</Message>
+ <Command Condition="'$(Configuration)|$(Platform)'=='[configs]|[platforms]'">copy $(SolutionDir)\subversion\svn_private_config.hw $(SolutionDir)\subversion\svn_private_config.h &gt; nul:</Command>
+ <Outputs Condition="'$(Configuration)|$(Platform)'=='[configs]|[platforms]'">$(SolutionDir)\subversion\svn_private_config.h;%(Outputs)</Outputs>
+[end][end] </CustomBuild>
+[for sql] <CustomBuild Include="$(SolutionDir)\[sql.source]">
+ <FileType>Document</FileType>
+[for configs][for platforms] <Message Condition="'$(Configuration)|$(Platform)'=='[configs]|[platforms]'">Generating [sql.header].</Message>
+ <Command Condition="'$(Configuration)|$(Platform)'=='[configs]|[platforms]'">"[sql.svn_python]" "$(SolutionDir)\build\transform_sql.py" "%(FullPath)" "$(SolutionDir)\[sql.header]"</Command>
+ <Outputs Condition="'$(Configuration)|$(Platform)'=='[configs]|[platforms]'">$(SolutionDir)\[sql.header];%(Outputs)</Outputs>
+ <AdditionalInputs Condition="'$(Configuration)|$(Platform)'=='[configs]|[platforms]'">[for sql.dependencies]$(SolutionDir)\[sql.dependencies];[end]%(AdditionalInputs)</AdditionalInputs>
+[end][end]
+ </CustomBuild>
+[end] </ItemGroup>
+ <Import Project="$(VCTargetsPath)\Microsoft.Cpp.targets" />
+ <ImportGroup Label="ExtensionTargets">
+ </ImportGroup>
+</Project> \ No newline at end of file
diff --git a/build/generator/templates/svn_locale.dsp.ezt b/build/generator/templates/svn_locale.dsp.ezt
new file mode 100644
index 0000000..3026ee9
--- /dev/null
+++ b/build/generator/templates/svn_locale.dsp.ezt
@@ -0,0 +1,85 @@
+# Microsoft Developer Studio Project File - Name="locale" - Package Owner=<4>
+# Microsoft Developer Studio Generated Build File, Format Version 6.00
+# ** DO NOT EDIT **
+
+# TARGTYPE "Win32 (x86) External Target" 0x0106
+
+CFG=locale - Win32 Debug
+!MESSAGE This is not a valid makefile. To build this project using NMAKE,
+!MESSAGE use the Export Makefile command and run
+!MESSAGE
+!MESSAGE NMAKE /f "locale.mak".
+!MESSAGE
+!MESSAGE You can specify a configuration when running NMAKE
+!MESSAGE by defining the macro CFG on the command line. For example:
+!MESSAGE
+!MESSAGE NMAKE /f "locale.mak" CFG="locale - Win32 Debug"
+!MESSAGE
+!MESSAGE Possible choices for configuration are:
+!MESSAGE
+!MESSAGE "locale - Win32 Release" (based on "Win32 (x86) External Target")
+!MESSAGE "locale - Win32 Debug" (based on "Win32 (x86) External Target")
+!MESSAGE
+
+# Begin Project
+# PROP AllowPerConfigDependencies 0
+# PROP Scc_ProjName ""
+# PROP Scc_LocalPath ""
+
+!IF "$(CFG)" == "locale - Win32 Release"
+
+# PROP BASE Use_MFC 0
+# PROP BASE Use_Debug_Libraries 0
+# PROP BASE Output_Dir "..\..\locale\Release"
+# PROP BASE Intermediate_Dir "..\..\locale\Release"
+# PROP BASE Cmd_Line "build_locale.bat release"
+# PROP BASE Rebuild_Opt "rebuild"
+# PROP BASE Target_File ""
+# PROP BASE Bsc_Name ""
+# PROP Use_MFC 0
+# PROP Use_Debug_Libraries 0
+# PROP Output_Dir "..\..\locale\Release"
+# PROP Intermediate_Dir "..\..\locale\Release"
+# PROP Cmd_Line "cmd /c build_locale.bat release"
+# PROP Rebuild_Opt "rebuild"
+# PROP Target_File ""
+# PROP Bsc_Name ""
+
+!ELSEIF "$(CFG)" == "locale - Win32 Debug"
+
+# PROP BASE Use_MFC 0
+# PROP BASE Use_Debug_Libraries 1
+# PROP BASE Output_Dir "..\..\locale\Debug"
+# PROP BASE Intermediate_Dir "..\..\locale\Debug"
+# PROP BASE Cmd_Line "build_locale.bat debug"
+# PROP BASE Rebuild_Opt "rebuild"
+# PROP BASE Target_File ""
+# PROP BASE Bsc_Name ""
+# PROP Use_MFC 0
+# PROP Use_Debug_Libraries 1
+# PROP Output_Dir "..\..\locale\Debug"
+# PROP Intermediate_Dir "..\..\locale\Debug"
+# PROP Cmd_Line "cmd /c build_locale.bat debug"
+# PROP Rebuild_Opt "rebuild"
+# PROP Target_File ""
+# PROP Bsc_Name ""
+
+!ENDIF
+
+# Begin Target
+
+# Name "locale - Win32 Release"
+# Name "locale - Win32 Debug"
+
+!IF "$(CFG)" == "locale - Win32 Release"
+
+!ELSEIF "$(CFG)" == "locale - Win32 Debug"
+
+!ENDIF
+
+# Begin Group "Source Files"
+
+# PROP Default_Filter "cpp;c;cxx;rc;def;r;odl;idl;hpj;bat"
+# End Group
+# End Target
+# End Project
diff --git a/build/generator/templates/svn_locale.vcproj.ezt b/build/generator/templates/svn_locale.vcproj.ezt
new file mode 100644
index 0000000..b7caef2
--- /dev/null
+++ b/build/generator/templates/svn_locale.vcproj.ezt
@@ -0,0 +1,52 @@
+[define COPYRIGHT]
+<!--
+ Licensed to the Apache Software Foundation (ASF) under one
+ or more contributor license agreements. See the NOTICE file
+ distributed with this work for additional information
+ regarding copyright ownership. The ASF licenses this file
+ to you under the Apache License, Version 2.0 (the
+ "License"); you may not use this file except in compliance
+ with the License. You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+ Unless required by applicable law or agreed to in writing,
+ software distributed under the License is distributed on an
+ "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ KIND, either express or implied. See the License for the
+ specific language governing permissions and limitations
+ under the License.
+-->
+[end]<?xml version="1.0" encoding = "Windows-1252"?>
+<VisualStudioProject
+ ProjectType="Visual C++"
+ Version="[version]"
+ Name="locale"
+ ProjectGUID="{18FA6D17-1EE7-43A2-BC2A-C2AF819CEEFB}"
+ Keyword="MakeFileProj">
+ <Platforms>
+[for platforms] <Platform
+ Name="[platforms]"/>
+[end] </Platforms>
+ <Configurations>
+[for platforms][for configs] <Configuration
+ Name="[configs]|[platforms]"
+ OutputDirectory="..\..\[configs]\mo"
+ BuildLogFile="$(IntDir)\BuildLog_$(ProjectName).htm"
+ IntermediateDirectory="..\..\[configs]\mo"
+ ConfigurationType="0"
+ UseOfMFC="0"
+ ATLMinimizesCRunTimeLibraryUsage="FALSE">
+ <Tool
+ Name="VCNMakeTool"
+ BuildCommandLine="cmd /c build_locale.bat [configs]"
+ ReBuildCommandLine="cmd /c build_locale.bat [configs]"
+ CleanCommandLine="cmd /c del $(OutDir)\*.mo"
+ />
+ </Configuration>
+[end][end] </Configurations>
+ <Files>
+ </Files>
+ <Globals>
+ </Globals>
+</VisualStudioProject>
diff --git a/build/generator/templates/svn_locale.vcxproj.ezt b/build/generator/templates/svn_locale.vcxproj.ezt
new file mode 100644
index 0000000..e00b349
--- /dev/null
+++ b/build/generator/templates/svn_locale.vcxproj.ezt
@@ -0,0 +1,57 @@
+[define COPYRIGHT]
+<!--
+ Licensed to the Apache Software Foundation (ASF) under one
+ or more contributor license agreements. See the NOTICE file
+ distributed with this work for additional information
+ regarding copyright ownership. The ASF licenses this file
+ to you under the Apache License, Version 2.0 (the
+ "License"); you may not use this file except in compliance
+ with the License. You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+ Unless required by applicable law or agreed to in writing,
+ software distributed under the License is distributed on an
+ "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ KIND, either express or implied. See the License for the
+ specific language governing permissions and limitations
+ under the License.
+-->
+[end]<?xml version="1.0" encoding="utf-8"?>
+<Project DefaultTargets="Build" ToolsVersion="4.0" xmlns="http://schemas.microsoft.com/developer/msbuild/2003">
+ <ItemGroup Label="ProjectConfigurations">
+[for configs][for platforms] <ProjectConfiguration Include="[configs]|[platforms]">
+ <Configuration>[configs]</Configuration>
+ <Platform>[platforms]</Platform>
+ </ProjectConfiguration>
+[end][end] </ItemGroup>
+ <PropertyGroup Label="Globals">
+ <ProjectName>locale</ProjectName>
+ <ProjectGuid>{18FA6D17-1EE7-43A2-BC2A-C2AF819CEEFB}</ProjectGuid>
+ <Keyword>MakeFileProj</Keyword>
+ </PropertyGroup>
+ <Import Project="$(VCTargetsPath)\Microsoft.Cpp.Default.props" />
+[for platforms][for configs] <PropertyGroup Condition="'$(Configuration)|$(Platform)'=='[configs]|[platforms]'" Label="Configuration">
+ <ConfigurationType>Utility</ConfigurationType>
+ <CLRSupport>false</CLRSupport>
+ </PropertyGroup>
+[end][end] <Import Project="$(VCTargetsPath)\Microsoft.Cpp.props" />
+ <ImportGroup Label="ExtensionSettings">
+ </ImportGroup>
+[for platforms][for configs] <ImportGroup Condition="'$(Configuration)|$(Platform)'=='[configs]|[platforms]'" Label="PropertySheets">
+ <Import Project="$(UserRootDir)\Microsoft.Cpp.$(Platform).user.props" Condition="exists('$(UserRootDir)\Microsoft.Cpp.$(Platform).user.props')" />
+ </ImportGroup>
+[end][end] <PropertyGroup Label="UserMacros" />
+ <PropertyGroup>
+[for configs][for platforms] <OutDir Condition="'$(Configuration)|$(Platform)'=='[configs]|[platforms]'">..\..\[configs]\mo\</OutDir>
+ <IntDir Condition="'$(Configuration)|$(Platform)'=='[configs]|[platforms]'">..\..\[configs]\mo\</IntDir>
+ <NMakeBuildCommandLine Condition="'$(Configuration)|$(Platform)'=='[configs]|[platforms]'">cmd /c build_locale.bat [configs]</NMakeBuildCommandLine>
+ <NMakeReBuildCommandLine Condition="'$(Configuration)|$(Platform)'=='[configs]|[platforms]'">cmd /c build_locale.bat [configs]</NMakeReBuildCommandLine>
+ <NMakeCleanCommandLine Condition="'$(Configuration)|$(Platform)'=='[configs]|[platforms]'">cmd /c del $(OutDir)*.mo</NMakeCleanCommandLine>
+[end][end] </PropertyGroup>
+ <ItemGroup>
+ </ItemGroup>
+ <Import Project="$(VCTargetsPath)\Microsoft.Cpp.targets" />
+ <ImportGroup Label="ExtensionTargets">
+ </ImportGroup>
+</Project> \ No newline at end of file
diff --git a/build/generator/templates/vcnet_sln.ezt b/build/generator/templates/vcnet_sln.ezt
new file mode 100644
index 0000000..d189a55
--- /dev/null
+++ b/build/generator/templates/vcnet_sln.ezt
@@ -0,0 +1,64 @@
+[define COPYRIGHT]
+#
+#
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements. See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership. The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied. See the License for the
+# specific language governing permissions and limitations
+# under the License.
+#
+#
+[end]Microsoft Visual Studio Solution File, Format Version [version]
+# Visual Studio [vs_version]
+[for targets]Project("{8BC9CEB8-8B4A-11D0-8D11-00A0C91BC942}") = "[targets.name]", "[targets.path]", "[targets.guid]"
+[is dependency_location "solution"] ProjectSection(ProjectDependencies) = postProject
+[for targets.depends] [targets.depends.guid] = [targets.depends.guid]
+[end] EndProjectSection
+[end]EndProject
+[end]Project("{2150E333-8FDC-42A3-9474-1A3956D46DE8}") = "Libraries", "Libraries", "{F6BDBE6F-7C7B-458B-8801-0B351C85ED6E}"
+EndProject
+Project("{2150E333-8FDC-42A3-9474-1A3956D46DE8}") = "Dlls", "Dlls", "{2F13BEE6-AA51-4882-9015-DCE38AB8E474}"
+EndProject
+Project("{2150E333-8FDC-42A3-9474-1A3956D46DE8}") = "Swig", "Swig", "{9D82B7A1-8C6E-4FB1-895C-DEE6E244CB21}"
+EndProject
+Project("{2150E333-8FDC-42A3-9474-1A3956D46DE8}") = "Java", "Java", "{B604B17D-DC50-44D4-AD41-088A67CFB89E}"
+EndProject
+Project("{2150E333-8FDC-42A3-9474-1A3956D46DE8}") = "Programs", "Programs", "{01A6F906-A386-4E8D-B080-49A3373A7158}"
+EndProject
+Project("{2150E333-8FDC-42A3-9474-1A3956D46DE8}") = "Tests", "Tests", "{630535D0-22D7-4D7C-8850-CB468739AFAC}"
+EndProject
+Global
+ GlobalSection(SolutionConfigurationPlatforms) = preSolution
+[for configs][for platforms] [configs.name]|[platforms] = [configs.name]|[platforms]
+[end][end] EndGlobalSection
+ GlobalSection(ProjectConfigurationPlatforms) = postSolution
+[for guids][for platforms][for configs] [guids].[configs.name]|[platforms].ActiveCfg = [configs.name]|[platforms]
+[end][for configs] [guids].[configs.name]|[platforms].Build.0 = [configs.name]|[platforms]
+[end][end][end] EndGlobalSection
+ GlobalSection(ExtensibilityGlobals) = postSolution
+ EndGlobalSection
+ GlobalSection(ExtensibilityAddIns) = postSolution
+ EndGlobalSection
+ GlobalSection(NestedProjects) = preSolution
+[for targets][is targets.group "exe"] [targets.guid] = {01A6F906-A386-4E8D-B080-49A3373A7158}
+[end][is targets.group "lib"] [targets.guid] = {F6BDBE6F-7C7B-458B-8801-0B351C85ED6E}
+[end][is targets.group "dll"] [targets.guid] = {2F13BEE6-AA51-4882-9015-DCE38AB8E474}
+[end][is targets.group "swiglib"] [targets.guid] = {9D82B7A1-8C6E-4FB1-895C-DEE6E244CB21}
+[end][is targets.group "test"] [targets.guid] = {630535D0-22D7-4D7C-8850-CB468739AFAC}
+[end][is targets.group "java"] [targets.guid] = {B604B17D-DC50-44D4-AD41-088A67CFB89E}
+[end][end] {9D82B7A1-8C6E-4FB1-895C-DEE6E244CB21} = {F6BDBE6F-7C7B-458B-8801-0B351C85ED6E}
+ {2F13BEE6-AA51-4882-9015-DCE38AB8E474} = {F6BDBE6F-7C7B-458B-8801-0B351C85ED6E}
+ {B604B17D-DC50-44D4-AD41-088A67CFB89E} = {F6BDBE6F-7C7B-458B-8801-0B351C85ED6E}
+ EndGlobalSection
+EndGlobal
diff --git a/build/generator/templates/vcnet_vc7_sln.ezt b/build/generator/templates/vcnet_vc7_sln.ezt
new file mode 100644
index 0000000..931ee2d
--- /dev/null
+++ b/build/generator/templates/vcnet_vc7_sln.ezt
@@ -0,0 +1,40 @@
+[define COPYRIGHT]
+#
+#
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements. See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership. The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied. See the License for the
+# specific language governing permissions and limitations
+# under the License.
+#
+#
+[end]Microsoft Visual Studio Solution File, Format Version [version]
+[for targets]Project("{8BC9CEB8-8B4A-11D0-8D11-00A0C91BC942}") = "[targets.name]", "[targets.path]", "[targets.guid]"
+EndProject
+[end]Global
+ GlobalSection(SolutionConfiguration) = preSolution
+[for configs] ConfigName.[configs.index] = [configs.name]
+[end] EndGlobalSection
+ GlobalSection(ProjectDependencies) = postSolution
+[for targets][for targets.depends] [targets.guid].[targets.depends.index] = [targets.depends.guid]
+[end][end] EndGlobalSection
+ GlobalSection(ProjectConfiguration) = postSolution
+[for guids][for platforms][for configs] [guids].[configs.name].ActiveCfg = [configs.name]|[platforms]
+[end][for configs] [guids].[configs.name].Build.0 = [configs.name]|[platforms]
+[end][end][end] EndGlobalSection
+ GlobalSection(ExtensibilityGlobals) = postSolution
+ EndGlobalSection
+ GlobalSection(ExtensibilityAddIns) = postSolution
+ EndGlobalSection
+EndGlobal
diff --git a/build/generator/templates/vcnet_vcproj.ezt b/build/generator/templates/vcnet_vcproj.ezt
new file mode 100644
index 0000000..31a0c9c
--- /dev/null
+++ b/build/generator/templates/vcnet_vcproj.ezt
@@ -0,0 +1,162 @@
+[#
+<!--
+ Licensed to the Apache Software Foundation (ASF) under one
+ or more contributor license agreements. See the NOTICE file
+ distributed with this work for additional information
+ regarding copyright ownership. The ASF licenses this file
+ to you under the Apache License, Version 2.0 (the
+ "License"); you may not use this file except in compliance
+ with the License. You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+ Unless required by applicable law or agreed to in writing,
+ software distributed under the License is distributed on an
+ "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ KIND, either express or implied. See the License for the
+ specific language governing permissions and limitations
+ under the License.
+-->
+]<?xml version="1.0" encoding = "Windows-1252"?>
+[format "xml"]<VisualStudioProject
+ ProjectType="Visual C++"
+ Version="[version]"
+ Name="[target.proj_name]"
+ ProjectGUID="[project_guid]">
+ <Platforms>
+[for platforms] <Platform
+ Name="[platforms]"/>
+[end] </Platforms>
+ <Configurations>
+[for platforms][for configs] <Configuration
+ Name="[configs.name]|[platforms]"
+ OutputDirectory="..\..\..\[configs.name]\[target.output_dir]"
+ BuildLogFile="$(IntDir)\BuildLog_$(ProjectName).htm"
+ IntermediateDirectory="..\..\..\[configs.name]\[target.intermediate_dir]"
+ ConfigurationType="[target_type]"[is configs.name "Release"]
+ WholeProgramOptimization="FALSE"[end]>
+ <Tool
+ Name="VCCLCompilerTool"
+[is configs.name "Debug"] Optimization="0"
+ GlobalOptimizations="FALSE"
+[else] Optimization="2"
+ GlobalOptimizations="TRUE"
+ InlineFunctionExpansion="2"
+ EnableIntrinsicFunctions="TRUE"
+ FavorSizeOrSpeed="1"
+ OmitFramePointers="TRUE"
+[end] AdditionalIncludeDirectories="[for includes][includes][if-index includes last][else];[end][end]"
+ PreprocessorDefinitions="[if-any instrument_apr_pools]APR_POOL_DEBUG=[instrument_apr_pools];[end][is platforms "x64"]WIN64;[end][for configs.defines][configs.defines][if-index configs.defines last][else];[end][end];_CRT_SECURE_NO_WARNINGS"
+[is configs.name "Debug"] MinimalRebuild="TRUE"
+ RuntimeLibrary="3"
+ BasicRuntimeChecks="3"
+ BufferSecurityCheck="TRUE"
+ EnableFunctionLevelLinking="TRUE"
+[else] StringPooling="TRUE"
+ RuntimeLibrary="2"
+ BufferSecurityCheck="FALSE"
+[end] WarningLevel="4"
+ DisableSpecificWarnings="4100;4127;4204;4206;4701;4706"
+ Detect64BitPortabilityProblems="FALSE"
+ AdditionalOptions="
+ /we4002 /we4003 /we4013 /we4020 /we4022 /we4024 /we4028 /we4029 /we4030 /we4031 /we4033 /we4047 /we4089 /we4113 /we4115 /we4715"
+ DebugInformationFormat="3"
+ ProgramDataBaseFileName="$(IntDir)\[target.output_pdb]"
+ CompileAsManaged="0"
+ CompileAs="0"[if-any is_exe][is configs.name "Release"]
+ OptimizeForWindowsApplication="TRUE"[end][end]/>
+ <Tool
+ Name="VCCustomBuildTool"/>
+ <Tool
+ Name="VCLinkerTool"
+[is target_type "1"][if-any instrument_purify_quantify] AdditionalOptions="/fixed:no"[end][end]
+ AdditionalDependencies="[for configs.libs][configs.libs] [end]"
+[is target_type "4"][else] OutputFile="$(OutDir)\[target.output_name]"
+[end][is configs.name "Debug"] LinkIncremental="2"
+[else] LinkIncremental="1"
+[end] AdditionalLibraryDirectories="..\..\..\db4-win32\lib;[for configs.libdirs][configs.libdirs];[end]"
+ TargetMachine="[is platforms "Win32"]1[end][is platforms "x64"]17[end]"
+ IgnoreDefaultLibraryNames="libc.lib[is configs.name "debug"];msvcrt.lib[end]"
+[if-any def_file] ModuleDefinitionFile="[def_file]"
+[end] GenerateDebugInformation="TRUE"
+ ProgramDatabaseFile="$(OutDir)\$(TargetName).pdb"[is configs.name "Debug"]
+ OptimizeReferences="0"[else]
+ OptimizeReferences="2"
+ EnableCOMDATFolding="2"[end][is target_type "2"]
+ ImportLibrary="$(OutDir)\$(TargetName).lib"[end]
+[is target_type "1"] Subsystem="1"[end]/>
+[is target_type "4"]
+ <Tool
+ Name="VCLibrarianTool"
+ AdditionalOptions="[is platforms "win32"]/MACHINE:X86[end][is platforms "x64"]/MACHINE:X64[end]"
+ OutputFile="$(OutDir)\[target.output_name]"[is configs.name "debug"]
+ IgnoreDefaultLibraryNames="msvcrt.lib"[end]/>
+[end] <Tool
+ Name="VCMIDLTool"/>
+ <Tool
+ Name="VCPostBuildEventTool"/>
+ <Tool
+ Name="VCPreBuildEventTool"/>
+ <Tool
+ Name="VCPreLinkEventTool"/>
+ <Tool
+ Name="VCResourceCompilerTool"/>
+ <Tool
+ Name="VCWebServiceProxyGeneratorTool"/>
+ <Tool
+ Name="VCWebDeploymentTool"/>
+ </Configuration>
+[end][end] </Configurations>
+ <Files>[if-any target.desc]
+ <File
+ RelativePath="..\svn.rc">[for platforms][for configs]
+ <FileConfiguration
+ Name="[configs.name]|[platforms]">
+ <Tool
+ Name="VCResourceCompilerTool"
+ AdditionalIncludeDirectories="[for includes][includes][if-index includes last][else];[end][end]"
+ PreprocessorDefinitions="SVN_FILE_NAME=[target.output_name];SVN_FILE_DESCRIPTION=[target.desc];[is configs.name "Debug"]_DEBUG[else]NDEBUG[end]"/>
+ </FileConfiguration>[end][end]
+ </File>[end]
+ <Filter
+ Name="Source Files"
+ Filter="cpp;c;cxx;rc;def;r;odl;idl;hpj;bat">
+[for sources][is sources.extension ".h"][else] <File
+ RelativePath="[sources.path]">
+[if-any sources.custom_build][for platforms][for configs]
+ <FileConfiguration
+ Name="[configs.name]|[platforms]">
+ <Tool
+ Name="VCCustomBuildTool"
+ CommandLine="[sources.custom_build]"
+[if-any sources.custom_desc] Description="[sources.custom_desc]"
+[end] AdditionalDependencies="[for sources.user_deps]&quot;[sources.user_deps]&quot;;[end]"
+ Outputs="&quot;[sources.custom_target]&quot;"/>
+ </FileConfiguration>
+[end][end][end]
+[if-any sources.reldir][for platforms][for configs]
+ <FileConfiguration
+ Name="[configs.name]|[platforms]">
+ <Tool
+ Name="VCCLCompilerTool"
+ ObjectFile="$(IntDir)/[sources.reldir]/"
+ ProgramDataBaseFileName="$(IntDir)/[sources.reldir]/[target.proj_name]-[sources.reldir].pdb"/>
+ </FileConfiguration>
+[end][end][end]
+ </File>
+[end][end]
+[is target_type "2"] <File RelativePath="..\empty.c"/>[end]
+ </Filter>
+ <Filter
+ Name="Header Files"
+ Filter="h;hpp;hxx;hm;inl">
+[for sources][is sources.extension ".h"]
+ <File
+ RelativePath="[sources.path]" />
+[end][end]
+ </Filter>
+ </Files>
+ <Globals>
+ </Globals>
+</VisualStudioProject>
+[end]
diff --git a/build/generator/templates/vcnet_vcxproj.ezt b/build/generator/templates/vcnet_vcxproj.ezt
new file mode 100644
index 0000000..5e44d2f
--- /dev/null
+++ b/build/generator/templates/vcnet_vcxproj.ezt
@@ -0,0 +1,118 @@
+[#
+<!--
+ Licensed to the Apache Software Foundation (ASF) under one
+ or more contributor license agreements. See the NOTICE file
+ distributed with this work for additional information
+ regarding copyright ownership. The ASF licenses this file
+ to you under the Apache License, Version 2.0 (the
+ "License"); you may not use this file except in compliance
+ with the License. You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+ Unless required by applicable law or agreed to in writing,
+ software distributed under the License is distributed on an
+ "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ KIND, either express or implied. See the License for the
+ specific language governing permissions and limitations
+ under the License.
+-->
+]<?xml version="1.0" encoding="utf-8"?>
+[format "xml"]
+<Project DefaultTargets="Build" ToolsVersion="4.0" xmlns="http://schemas.microsoft.com/developer/msbuild/2003">
+ <ItemGroup Label="ProjectConfigurations">
+[for configs][for platforms] <ProjectConfiguration Include="[configs.name]|[platforms]">
+ <Configuration>[configs.name]</Configuration>
+ <Platform>[platforms]</Platform>
+ </ProjectConfiguration>
+[end][end] </ItemGroup>
+ <PropertyGroup Label="Globals">
+ <ProjectGuid>[project_guid]</ProjectGuid>
+ </PropertyGroup>
+ <Import Project="$(VCTargetsPath)\Microsoft.Cpp.Default.props" />
+[for platforms][for configs] <PropertyGroup Condition="'$(Configuration)|$(Platform)'=='[configs.name]|[platforms]'" Label="Configuration">
+ <ConfigurationType>[config_type]</ConfigurationType>
+ <UseDebugLibraries>[is configs.name "Debug"]true[else]false[end]</UseDebugLibraries>
+ </PropertyGroup>
+[end][end] <Import Project="$(VCTargetsPath)\Microsoft.Cpp.props" />
+ <ImportGroup Label="ExtensionSettings">
+ </ImportGroup>
+[for platforms][for configs] <ImportGroup Condition="'$(Configuration)|$(Platform)'=='[configs.name]|[platforms]'" Label="PropertySheets">
+ <Import Project="$(UserRootDir)\Microsoft.Cpp.$(Platform).user.props" Condition="exists('$(UserRootDir)\Microsoft.Cpp.$(Platform).user.props')" />
+ </ImportGroup>
+[end][end] <PropertyGroup Label="UserMacros" />
+ <PropertyGroup>
+[for configs][for platforms] <OutDir Condition="'$(Configuration)|$(Platform)'=='[configs.name]|[platforms]'">$(SolutionDir)[configs.name]\[target.output_dir]\</OutDir>
+ <IntDir Condition="'$(Configuration)|$(Platform)'=='[configs.name]|[platforms]'">$(SolutionDir)[configs.name]\[target.intermediate_dir]\[target.proj_name]\</IntDir>
+ <TargetName Condition="'$(Configuration)|$(Platform)'=='[configs.name]|[platforms]'">[target.output_name_without_ext]</TargetName>
+[is config_type "DynamicLibrary"][is target.output_ext ".dll"][else] <TargetExt>[target.output_ext]</TargetExt>
+[end][end][end][end] </PropertyGroup>
+[for configs][for platforms] <ItemDefinitionGroup Condition="'$(Configuration)|$(Platform)'=='[configs.name]|[platforms]'">
+ <ClCompile>
+[is configs.name "Debug"] <Optimization>Disabled</Optimization>
+[else] <Optimization>Full</Optimization>
+ <InlineFunctionExpansion>AnySuitable</InlineFunctionExpansion>
+ <IntrinsicFunctions>true</IntrinsicFunctions>
+ <FavorSizeOrSpeed>Speed</FavorSizeOrSpeed>
+ <StringPooling>true</StringPooling>
+[end] <AdditionalIncludeDirectories>[for includes][includes];[end]%(AdditionalIncludeDirectories)</AdditionalIncludeDirectories>
+ <PreprocessorDefinitions>[if-any instrument_apr_pools]APR_POOL_DEBUG=[instrument_apr_pools];[end][is platforms "x64"]WIN64;[end][for configs.defines][configs.defines];[end]%(PreprocessorDefinitions)</PreprocessorDefinitions>
+ <WarningLevel>Level4</WarningLevel>
+ <DebugInformationFormat>ProgramDatabase</DebugInformationFormat>
+ <BrowseInformation>true</BrowseInformation>
+ <ProgramDataBaseFileName>$(OutDir)$(TargetName).pdb</ProgramDataBaseFileName>
+ <DisableSpecificWarnings>4100;4127;4204;4206;4701;4706;%(DisableSpecificWarnings)</DisableSpecificWarnings>
+ <TreatSpecificWarningsAsErrors>4002;4003;4013;4020;4022;4024;4028;4029;4030;4031;4033;4047;4089;4113;4115;4715;%(TreatSpecificWarningsAsErrors)</TreatSpecificWarningsAsErrors>
+ </ClCompile>
+[is config_type "Application"] <Link>
+ <AdditionalDependencies>[for configs.libs][configs.libs];[end]%(AdditionalDependencies)</AdditionalDependencies>
+ <AdditionalLibraryDirectories>[for configs.libdirs][configs.libdirs];[end]%(AdditionalLibraryDirectories)</AdditionalLibraryDirectories>
+ <GenerateDebugInformation>true</GenerateDebugInformation>
+ <SubSystem>Console</SubSystem>
+ <EnableUAC>true</EnableUAC>
+[is configs.name "Debug"] <IgnoreSpecificDefaultLibraries>msvcrt.lib</IgnoreSpecificDefaultLibraries>
+[else] <OptimizeReferences>true</OptimizeReferences>
+[end] </Link>
+[else][is config_type "DynamicLibrary"] <Link>
+ <AdditionalDependencies>[for configs.libs][configs.libs];[end]%(AdditionalDependencies)</AdditionalDependencies>
+ <AdditionalLibraryDirectories>[for configs.libdirs][configs.libdirs];[end]%(AdditionalLibraryDirectories)</AdditionalLibraryDirectories>
+ <GenerateDebugInformation>true</GenerateDebugInformation>
+[is configs.name "Debug"] <IgnoreSpecificDefaultLibraries>msvcrt.lib</IgnoreSpecificDefaultLibraries>
+[end][if-any def_file] <ModuleDefinitionFile>[def_file]</ModuleDefinitionFile>
+[end] </Link>
+[else][is config_type "StaticLibrary"] <Lib>
+ <TargetMachine>[is platforms "X64"]MachineX64[else]MachineX86[end]</TargetMachine>
+[is configs.name "Debug"] <IgnoreSpecificDefaultLibraries>msvcrt.lib</IgnoreSpecificDefaultLibraries>
+[end] </Lib>
+[end][end][end] </ItemDefinitionGroup>
+[end][end][if-any target.desc] <ItemGroup>
+ <ResourceCompile Include="..\svn.rc">
+[for configs][for platforms] <AdditionalIncludeDirectories Condition="'$(Configuration)|$(Platform)'=='[configs.name]|[platforms]'">[for includes][includes];[end];%(AdditionalIncludeDirectories)</AdditionalIncludeDirectories>
+ <PreprocessorDefinitions Condition="'$(Configuration)|$(Platform)'=='[configs.name]|[platforms]'">SVN_FILE_NAME=[target.output_name];SVN_FILE_DESCRIPTION=[target.desc];[is configs.name "Debug"]_DEBUG[else]NDEBUG[end];%(PreprocessorDefinitions)</PreprocessorDefinitions>
+[end][end] </ResourceCompile>
+ </ItemGroup>
+[end] <ItemGroup>
+[for sources][is sources.extension ".h"][else][if-any sources.custom_build] <CustomBuild Include="[sources.path]">
+ <FileType>Document</FileType>
+[for configs][for platforms] <Command Condition="'$(Configuration)|$(Platform)'=='[configs.name]|[platforms]'">[sources.custom_build]</Command>
+ <AdditionalInputs Condition="'$(Configuration)|$(Platform)'=='[configs.name]|[platforms]'">[for sources.user_deps][sources.user_deps];[end]%(AdditionalInputs)</AdditionalInputs>
+ <Outputs Condition="'$(Configuration)|$(Platform)'=='[configs.name]|[platforms]'">[sources.custom_target];%(Outputs)</Outputs>
+[if-any sources.custom_desc]<Message Condition="'$(Configuration)|$(Platform)'=='[configs.name]|[platforms]'">[sources.custom_desc]</Message>
+[end][end][end] </CustomBuild>
+[else][is sources.extension ".c"] <ClCompile Include="[sources.path]" />
+[else][is sources.extension ".cpp"] <ClCompile Include="[sources.path]" />
+[else] <None Include="[sources.path]" />
+[end][end][end][end][end] </ItemGroup>
+ <ItemGroup>
+[for sources][is sources.extension ".h"] <ClInclude Include="[sources.path]" />
+[end][end] </ItemGroup>
+ <ItemGroup>
+[for depends] <ProjectReference Include="[depends.path]">
+ <Project>[depends.guid]</Project>
+ </ProjectReference>
+[end] </ItemGroup>
+ <Import Project="$(VCTargetsPath)\Microsoft.Cpp.targets" />
+ <ImportGroup Label="ExtensionTargets">
+ </ImportGroup>
+</Project>
+[end]
diff --git a/build/generator/templates/vcnet_vcxproj_filters.ezt b/build/generator/templates/vcnet_vcxproj_filters.ezt
new file mode 100644
index 0000000..6e26426
--- /dev/null
+++ b/build/generator/templates/vcnet_vcxproj_filters.ezt
@@ -0,0 +1,47 @@
+[define COPYRIGHT]
+<!--
+ Licensed to the Apache Software Foundation (ASF) under one
+ or more contributor license agreements. See the NOTICE file
+ distributed with this work for additional information
+ regarding copyright ownership. The ASF licenses this file
+ to you under the Apache License, Version 2.0 (the
+ "License"); you may not use this file except in compliance
+ with the License. You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+ Unless required by applicable law or agreed to in writing,
+ software distributed under the License is distributed on an
+ "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ KIND, either express or implied. See the License for the
+ specific language governing permissions and limitations
+ under the License.
+-->
+[end]<?xml version="1.0" encoding="utf-8"?>
+<Project ToolsVersion="4.0" xmlns="http://schemas.microsoft.com/developer/msbuild/2003">
+ <ItemGroup>
+ <Filter Include="Header Files">
+ <UniqueIdentifier>{b21a2522-2044-4342-af26-ca29509f90e7}</UniqueIdentifier>
+ <Extensions>h;hpp;hxx;hm;inl</Extensions>
+ </Filter>
+ <Filter Include="Source Files">
+ <UniqueIdentifier>{fd3e9c7d-dd0a-4af5-94aa-d9744db18efe}</UniqueIdentifier>
+ <Extensions>cpp;c;cxx;rc;def;r;odl;idl;hpj;bat</Extensions>
+ </Filter>
+ </ItemGroup>
+[if-any target.desc] <ItemGroup>
+ <ResourceCompile Include="..\svn.rc">
+ <Filter>Source Files</Filter>
+ </ResourceCompile>
+ </ItemGroup>
+[end] <ItemGroup>
+[for sources][is sources.extension ".h"][else] <ClCompile Include="[sources.path]">
+ <Filter>Source Files</Filter>
+ </ClCompile>
+[end][end] </ItemGroup>
+ <ItemGroup>
+[for sources][is sources.extension ".h"] <ClInclude Include="[sources.path]">
+ <Filter>Header Files</Filter>
+ </ClInclude>
+[end][end] </ItemGroup>
+</Project> \ No newline at end of file
diff --git a/build/generator/templates/zlib.dsp.ezt b/build/generator/templates/zlib.dsp.ezt
new file mode 100644
index 0000000..eb679f0
--- /dev/null
+++ b/build/generator/templates/zlib.dsp.ezt
@@ -0,0 +1,99 @@
+# Microsoft Developer Studio Project File - Name="zlib" - Package Owner=<4>
+# Microsoft Developer Studio Generated Build File, Format Version 6.00
+# ** DO NOT EDIT **
+
+# TARGTYPE "Win32 (x86) External Target" 0x0106
+
+CFG=zlib - Win32 Debug
+!MESSAGE This is not a valid makefile. To build this project using NMAKE,
+!MESSAGE use the Export Makefile command and run
+!MESSAGE
+!MESSAGE NMAKE /f "zlib.mak".
+!MESSAGE
+!MESSAGE You can specify a configuration when running NMAKE
+!MESSAGE by defining the macro CFG on the command line. For example:
+!MESSAGE
+!MESSAGE NMAKE /f "zlib.mak" CFG="zlib - Win32 Debug"
+!MESSAGE
+!MESSAGE Possible choices for configuration are:
+!MESSAGE
+!MESSAGE "zlib - Win32 Release" (based on "Win32 (x86) External Target")
+!MESSAGE "zlib - Win32 Debug" (based on "Win32 (x86) External Target")
+!MESSAGE
+
+# Begin Project
+# PROP AllowPerConfigDependencies 0
+# PROP Scc_ProjName ""
+# PROP Scc_LocalPath ""
+
+!IF "$(CFG)" == "zlib - Win32 Release"
+
+# PROP BASE Use_MFC 0
+# PROP BASE Use_Debug_Libraries 0
+# PROP BASE Output_Dir "[zlib_path]\Release"
+# PROP BASE Intermediate_Dir "[zlib_path]\Release"
+# PROP BASE Cmd_Line "build_zlib.bat release Win32"
+# PROP BASE Rebuild_Opt "rebuild"
+# PROP BASE Target_File "[zlib_path]\zlibstat.lib"
+# PROP BASE Bsc_Name ""
+# PROP Use_MFC 0
+# PROP Use_Debug_Libraries 0
+# PROP Output_Dir "[zlib_path]\Release"
+# PROP Intermediate_Dir "[zlib_path]\Release"
+# PROP Cmd_Line "cmd /c build_zlib.bat release Win32"
+# PROP Rebuild_Opt "rebuild"
+# PROP Target_File "[zlib_path]\zlibstat.lib"
+# PROP Bsc_Name ""
+
+!ELSEIF "$(CFG)" == "zlib - Win32 Debug"
+
+# PROP BASE Use_MFC 0
+# PROP BASE Use_Debug_Libraries 1
+# PROP BASE Output_Dir "[zlib_path]\Debug"
+# PROP BASE Intermediate_Dir "[zlib_path]\Debug"
+# PROP BASE Cmd_Line "build_zlib.bat debug Win32"
+# PROP BASE Rebuild_Opt "rebuild"
+# PROP BASE Target_File "[zlib_path]\zlibstatD.lib"
+# PROP BASE Bsc_Name ""
+# PROP Use_MFC 0
+# PROP Use_Debug_Libraries 1
+# PROP Output_Dir "[zlib_path]\Debug"
+# PROP Intermediate_Dir "[zlib_path]\Debug"
+# PROP Cmd_Line "cmd /c build_zlib.bat debug Win32"
+# PROP Rebuild_Opt "rebuild"
+# PROP Target_File "[zlib_path]\zlibstatD.lib"
+# PROP Bsc_Name ""
+
+!ENDIF
+
+# Begin Target
+
+# Name "zlib - Win32 Release"
+# Name "zlib - Win32 Debug"
+
+!IF "$(CFG)" == "zlib - Win32 Release"
+
+!ELSEIF "$(CFG)" == "zlib - Win32 Debug"
+
+!ENDIF
+
+# Begin Group "Source Files"
+
+# PROP Default_Filter "cpp;c;cxx;rc;def;r;odl;idl;hpj;bat"
+[for zlib_sources]# Begin Source File
+
+SOURCE="[zlib_sources]"
+# End Source File
+[end]
+# End Group
+# Begin Group "Header Files"
+
+# PROP Default_Filter "h;hpp;hxx;hm;inl"
+[for zlib_headers]# Begin Source File
+
+SOURCE="[zlib_headers]"
+# End Source File
+[end]
+# End Group
+# End Target
+# End Project
diff --git a/build/generator/templates/zlib.vcproj.ezt b/build/generator/templates/zlib.vcproj.ezt
new file mode 100644
index 0000000..537d41c
--- /dev/null
+++ b/build/generator/templates/zlib.vcproj.ezt
@@ -0,0 +1,85 @@
+[define COPYRIGHT]
+<!--
+ Licensed to the Apache Software Foundation (ASF) under one
+ or more contributor license agreements. See the NOTICE file
+ distributed with this work for additional information
+ regarding copyright ownership. The ASF licenses this file
+ to you under the Apache License, Version 2.0 (the
+ "License"); you may not use this file except in compliance
+ with the License. You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+ Unless required by applicable law or agreed to in writing,
+ software distributed under the License is distributed on an
+ "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ KIND, either express or implied. See the License for the
+ specific language governing permissions and limitations
+ under the License.
+-->
+[end]<?xml version="1.0" encoding = "Windows-1252"?>
+<VisualStudioProject
+ ProjectType="Visual C++"
+ Version="[version]"
+ Name="zlib"
+ ProjectGUID="[project_guid]"
+ Keyword="MakeFileProj">
+ <Platforms>
+[for platforms] <Platform
+ Name="[platforms]"/>
+[end] </Platforms>
+ <Configurations>
+[for platforms] <Configuration
+ Name="Debug|[platforms]"
+ OutputDirectory="[zlib_path]\Debug"
+ BuildLogFile="$(IntDir)\BuildLog_$(ProjectName).htm"
+ IntermediateDirectory="[zlib_path]\Debug"
+ ConfigurationType="0"
+ ManagedExtensions="1"
+ UseOfMFC="0"
+ ATLMinimizesCRunTimeLibraryUsage="FALSE">
+ <Tool
+ Name="VCNMakeTool"
+ BuildCommandLine="cmd /c build_zlib.bat debug [platforms]"
+ ReBuildCommandLine="cmd /c build_zlib.bat debug [platforms] rebuild"
+ CleanCommandLine="cmd /c build_zlib.bat debug [platforms] clean"
+ Output="[zlib_path]\zlibstatD.lib"/>
+ </Configuration>
+ <Configuration
+ Name="Release|[platforms]"
+ OutputDirectory="[zlib_path]\Release"
+ BuildLogFile="$(IntDir)\BuildLog_$(ProjectName).htm"
+ IntermediateDirectory="[zlib_path]\Release"
+ ConfigurationType="0"
+ ManagedExtensions="1"
+ UseOfMFC="0"
+ ATLMinimizesCRunTimeLibraryUsage="FALSE">
+ <Tool
+ Name="VCNMakeTool"
+ BuildCommandLine="cmd /c build_zlib.bat release [platforms]"
+ ReBuildCommandLine="cmd /c build_zlib.bat release [platforms] rebuild"
+ CleanCommandLine="cmd /c build_zlib.bat release [platforms] clean"
+ Output="[zlib_path]\zlibstat.lib"/>
+ </Configuration>
+[end] </Configurations>
+ <Files>
+ <Filter
+ Name="Source Files"
+ Filter="cpp;c;cxx;rc;def;r;odl;idl;hpj;bat">
+ [for zlib_sources]<File
+ RelativePath="[zlib_sources]">
+ </File>
+[end]
+ </Filter>
+ <Filter
+ Name="Header Files"
+ Filter="h;hpp;hxx;hm;inl">
+ [for zlib_headers]<File
+ RelativePath="[zlib_headers]">
+ </File>
+[end]
+ </Filter>
+ </Files>
+ <Globals>
+ </Globals>
+</VisualStudioProject>
diff --git a/build/generator/templates/zlib.vcxproj.ezt b/build/generator/templates/zlib.vcxproj.ezt
new file mode 100644
index 0000000..02eb86c
--- /dev/null
+++ b/build/generator/templates/zlib.vcxproj.ezt
@@ -0,0 +1,63 @@
+[#
+<!--
+ Licensed to the Apache Software Foundation (ASF) under one
+ or more contributor license agreements. See the NOTICE file
+ distributed with this work for additional information
+ regarding copyright ownership. The ASF licenses this file
+ to you under the Apache License, Version 2.0 (the
+ "License"); you may not use this file except in compliance
+ with the License. You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+ Unless required by applicable law or agreed to in writing,
+ software distributed under the License is distributed on an
+ "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ KIND, either express or implied. See the License for the
+ specific language governing permissions and limitations
+ under the License.
+-->
+]<?xml version="1.0" encoding="utf-8"?>
+<Project DefaultTargets="Build" ToolsVersion="4.0" xmlns="http://schemas.microsoft.com/developer/msbuild/2003">
+ <ItemGroup Label="ProjectConfigurations">
+[for configs][for platforms] <ProjectConfiguration Include="[configs]|[platforms]">
+ <Configuration>[configs]</Configuration>
+ <Platform>[platforms]</Platform>
+ </ProjectConfiguration>
+[end][end] </ItemGroup>
+ <PropertyGroup Label="Globals">
+ <ProjectGuid>[project_guid]</ProjectGuid>
+ <Keyword>MakeFileProj</Keyword>
+ </PropertyGroup>
+ <Import Project="$(VCTargetsPath)\Microsoft.Cpp.Default.props" />
+[for platforms][for configs] <PropertyGroup Condition="'$(Configuration)|$(Platform)'=='[configs]|[platforms]'" Label="Configuration">
+ <ConfigurationType>Makefile</ConfigurationType>
+ <UseDebugLibraries>[is configs "Debug"]true[else]false[end]</UseDebugLibraries>
+ </PropertyGroup>
+[end][end] <Import Project="$(VCTargetsPath)\Microsoft.Cpp.props" />
+ <ImportGroup Label="ExtensionSettings">
+ </ImportGroup>
+[for platforms][for configs] <ImportGroup Condition="'$(Configuration)|$(Platform)'=='[configs]|[platforms]'" Label="PropertySheets">
+ <Import Project="$(UserRootDir)\Microsoft.Cpp.$(Platform).user.props" Condition="exists('$(UserRootDir)\Microsoft.Cpp.$(Platform).user.props')" />
+ </ImportGroup>
+[end][end] <PropertyGroup Label="UserMacros" />
+ <PropertyGroup>
+[for configs][for platforms] <OutDir Condition="'$(Configuration)|$(Platform)'=='[configs]|[platforms]'">.\</OutDir>
+ <IntDir Condition="'$(Configuration)|$(Platform)'=='[configs]|[platforms]'">.\</IntDir>
+ <NMakeBuildCommandLine Condition="'$(Configuration)|$(Platform)'=='[configs]|[platforms]'">cmd /c "$(ProjectDir)build_zlib.bat" [configs] [platforms]</NMakeBuildCommandLine>
+ <NMakeReBuildCommandLine Condition="'$(Configuration)|$(Platform)'=='[configs]|[platforms]'">cmd /c "$(ProjectDir)build_zlib.bat" [configs] [platforms] rebuild</NMakeReBuildCommandLine>
+ <NMakeCleanCommandLine Condition="'$(Configuration)|$(Platform)'=='[configs]|[platforms]'">cmd /c "$(ProjectDir)build_zlib.bat" [configs] [platforms] clean</NMakeCleanCommandLine>
+ <NMakeOutput Condition="'$(Configuration)|$(Platform)'=='[configs]|[platforms]'">[zlib_path]\[is configs "Release"]zlibstat.lib[else]zlibstatD.lib[end]</NMakeOutput>
+[end][end] </PropertyGroup>
+ <ItemDefinitionGroup>
+ </ItemDefinitionGroup>
+ <ItemGroup>
+[for zlib_sources] <ClCompile Include="[zlib_sources]" />
+[end] </ItemGroup>
+ <ItemGroup>
+[for zlib_headers] <ClInclude Include="[zlib_headers]" />
+[end] </ItemGroup>
+ <Import Project="$(VCTargetsPath)\Microsoft.Cpp.targets" />
+ <ImportGroup Label="ExtensionTargets">
+ </ImportGroup>
+</Project>
diff --git a/build/generator/util/__init__.py b/build/generator/util/__init__.py
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/build/generator/util/__init__.py
diff --git a/build/generator/util/executable.py b/build/generator/util/executable.py
new file mode 100644
index 0000000..f04f6d2
--- /dev/null
+++ b/build/generator/util/executable.py
@@ -0,0 +1,67 @@
+#
+#
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements. See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership. The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied. See the License for the
+# specific language governing permissions and limitations
+# under the License.
+#
+#
+#
+# executable.py -- Utilities for dealing with external executables
+#
+
+import os
+import subprocess
+
+def exists(file):
+ """Is this an executable file?"""
+ return os.path.isfile(file) and os.access(file, os.X_OK)
+
+def find(file, dirs=None):
+ """Search for an executable in a given list of directories.
+ If no directories are given, search according to the PATH
+ environment variable."""
+ if not dirs:
+ dirs = os.environ["PATH"].split(os.pathsep)
+ for path in dirs:
+ if is_executable(os.path.join(path, file)):
+ return os.path.join(path, file)
+ elif is_executable(os.path.join(path, "%s.exe" % file)):
+ return os.path.join(path, "%s.exe" % file)
+ return None
+
+def output(cmd, strip=None):
+ """Run a command and collect all output"""
+ # Check that cmd is in PATH (otherwise we'd get a generic OSError later)
+ import distutils.spawn
+ if isinstance(cmd, str):
+ cmdname = cmd
+ elif isinstance(cmd, list):
+ cmdname = cmd[0]
+ if distutils.spawn.find_executable(cmdname) is None:
+ return None
+
+ # Run it
+ (output, empty_stderr) = subprocess.Popen(cmd, stdout=subprocess.PIPE, \
+ stderr=subprocess.STDOUT).communicate()
+ if strip:
+ return output.strip()
+ else:
+ return output
+
+def run(cmd):
+ """Run a command"""
+ exit_code = os.system(cmd)
+ assert(not exit_code)
diff --git a/build/get-py-info.py b/build/get-py-info.py
new file mode 100644
index 0000000..29a6c0a
--- /dev/null
+++ b/build/get-py-info.py
@@ -0,0 +1,158 @@
+#
+# get-py-info.py: get various Python info (for building)
+#
+######################################################################
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements. See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership. The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied. See the License for the
+# specific language governing permissions and limitations
+# under the License.
+######################################################################
+#
+# This should be loaded/run by the appropriate Python, rather than executed
+# directly as a program. In other words, you should:
+#
+# $ python2 get-py-info.py --includes
+#
+
+import sys
+import os
+
+def usage():
+ print('USAGE: python %s WHAT' % sys.argv[0])
+ print(' Returns information about how to build Python extensions.')
+ print(' WHAT may be one of:')
+ print(" --includes : return -I include flags")
+ print(" --compile : return a compile command")
+ print(" --link : return a link command")
+ print(" --libs : return just the library options for linking")
+ print(" --site : return the path to site-packages")
+ sys.exit(1)
+
+if len(sys.argv) != 2:
+ usage()
+
+try:
+ from distutils import sysconfig
+except ImportError:
+ # No information available
+ print("none")
+ sys.exit(1)
+
+if sys.argv[1] == '--includes':
+ inc = sysconfig.get_python_inc()
+ plat = sysconfig.get_python_inc(plat_specific=1)
+ if inc == plat:
+ print("-I" + inc)
+ else:
+ print("-I%s -I%s" % (inc, plat))
+ sys.exit(0)
+
+if sys.argv[1] == '--compile':
+ cc, ccshared = sysconfig.get_config_vars('CC', 'CCSHARED')
+ print("%s %s" % (cc, ccshared))
+ sys.exit(0)
+
+def add_option(options, name, value=None):
+ """Add option to list of options"""
+ options.append(name)
+ if value is not None:
+ options.append(value)
+
+def add_option_if_missing(options, name, value=None):
+ """Add option to list of options, if it is not already present"""
+ if options.count(name) == 0 and options.count("-Wl,%s" % name) == 0:
+ add_option(options, name, value)
+
+def link_options():
+ """Get list of Python linker options"""
+
+ # Initialize config variables
+ assert os.name == "posix"
+ options = sysconfig.get_config_var('LDSHARED').split()
+ fwdir = sysconfig.get_config_var('PYTHONFRAMEWORKDIR')
+
+ if fwdir and fwdir != "no-framework":
+
+ # Setup the framework prefix
+ fwprefix = sysconfig.get_config_var('PYTHONFRAMEWORKPREFIX')
+ if fwprefix != "/System/Library/Frameworks":
+ add_option_if_missing(options, "-F%s" % fwprefix)
+
+ # Load in the framework
+ fw = sysconfig.get_config_var('PYTHONFRAMEWORK')
+ add_option(options, "-framework", fw)
+
+ elif sys.platform == 'darwin':
+
+ # Load bundles from python
+ python_exe = os.path.join(sysconfig.get_config_var("BINDIR"),
+ sysconfig.get_config_var('PYTHON'))
+ add_option_if_missing(options, "-bundle_loader", python_exe)
+
+ elif sys.platform == 'cygwin' or sys.platform.startswith('openbsd'):
+
+ # Add flags to build against the Python library (also necessary
+ # for Darwin, but handled elsewhere).
+
+ # Find the path to the library, and add a flag to include it as a
+ # library search path.
+ shared_libdir = sysconfig.get_config_var('LIBDIR')
+ static_libdir = sysconfig.get_config_var('LIBPL')
+ ldlibrary = sysconfig.get_config_var('LDLIBRARY')
+ if os.path.exists(os.path.join(shared_libdir, ldlibrary)):
+ if shared_libdir != '/usr/lib':
+ add_option_if_missing(options, '-L%s' % shared_libdir)
+ elif os.path.exists(os.path.join(static_libdir, ldlibrary)):
+ add_option_if_missing(options, "-L%s" % static_libdir)
+
+ # Add a flag to build against the library itself.
+ python_version = sysconfig.get_config_var('VERSION')
+ add_option_if_missing(options, "-lpython%s" % python_version)
+
+ return options
+
+def lib_options():
+ """Get list of Python library options"""
+ link_command = link_options()
+ options = []
+
+ # Extract library-related options from link command
+ for i in range(len(link_command)):
+ option = link_command[i]
+ if (not option.startswith("-L:") and option.startswith("-L") or
+ option.startswith("-Wl,") or option.startswith("-l") or
+ option.startswith("-F") or option == "-bundle" or
+ option == "-flat_namespace"):
+ options.append(option)
+ elif (option == "-undefined" or option == "-bundle_loader" or
+ option == "-framework"):
+ options.append(option)
+ options.append(link_command[i+1])
+
+ return options
+
+if sys.argv[1] == '--link':
+ print(" ".join(link_options()))
+ sys.exit(0)
+
+if sys.argv[1] == '--libs':
+ print(" ".join(lib_options()))
+ sys.exit(0)
+
+if sys.argv[1] == '--site':
+ print(sysconfig.get_python_lib())
+ sys.exit(0)
+
+usage()
diff --git a/build/getversion.py b/build/getversion.py
new file mode 100755
index 0000000..7ed1e53
--- /dev/null
+++ b/build/getversion.py
@@ -0,0 +1,109 @@
+#!/usr/bin/env python
+#
+#
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements. See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership. The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied. See the License for the
+# specific language governing permissions and limitations
+# under the License.
+#
+#
+#
+# getversion.py - Parse version numbers from C header files.
+#
+
+import os
+import re
+import sys
+
+__all__ = ['Parser', 'Result']
+
+class Result:
+ pass
+
+class Parser:
+ def __init__(self):
+ self.patterns = {}
+
+ def search(self, define_name, value_name):
+ 'Add the name of a define to the list of search pattenrs.'
+ self.patterns[define_name] = value_name
+
+ def parse(self, file):
+ 'Parse the file, extracting defines into a Result object.'
+ stream = open(file, 'rt')
+ result = Result()
+ regex = re.compile(r'^\s*#\s*define\s+(\w+)\s+(\d+)')
+ for line in stream.readlines():
+ match = regex.match(line)
+ if match:
+ try:
+ name = self.patterns[match.group(1)]
+ except:
+ continue
+ setattr(result, name, int(match.group(2)))
+ stream.close()
+ return result
+
+def svn_extractor(parser, include_file):
+ '''Pull values from svn.version.h'''
+ p.search('SVN_VER_MAJOR', 'major')
+ p.search('SVN_VER_MINOR', 'minor')
+ p.search('SVN_VER_PATCH', 'patch')
+
+ try:
+ r = p.parse(include_file)
+ except IOError, e:
+ usage_and_exit(str(e))
+ sys.stdout.write("%d.%d.%d" % (r.major, r.minor, r.patch))
+
+
+def sqlite_extractor(parser, include_file):
+ '''Pull values from sqlite3.h'''
+ p.search('SQLITE_VERSION_NUMBER', 'version')
+
+ try:
+ r = p.parse(include_file)
+ except IOError, e:
+ usage_and_exit(str(e))
+ major = r.version / 1000000
+ minor = (r.version - (major * 1000000)) / 1000
+ micro = (r.version - (major * 1000000) - (minor * 1000))
+ sys.stdout.write("%d.%d.%d" % (major, minor, micro))
+
+
+extractors = {
+ 'SVN' : svn_extractor,
+ 'SQLITE' : sqlite_extractor,
+ }
+
+def usage_and_exit(msg):
+ if msg:
+ sys.stderr.write("%s\n\n" % msg)
+ sys.stderr.write("usage: %s [SVN|SQLITE] [header_file]\n" % \
+ os.path.basename(sys.argv[0]))
+ sys.stderr.flush()
+ sys.exit(1)
+
+
+if __name__ == '__main__':
+ if len(sys.argv) == 3:
+ extractor = extractors[sys.argv[1]]
+ include_file = sys.argv[2]
+ else:
+ usage_and_exit("Incorrect number of arguments")
+
+ # Extract and print the version number
+ p = Parser()
+ extractor(p, include_file)
diff --git a/build/hudson/README b/build/hudson/README
new file mode 100644
index 0000000..e28688a
--- /dev/null
+++ b/build/hudson/README
@@ -0,0 +1,20 @@
+INTRODUCTION
+------------
+The configuration specific to Hudson Build system is available in this
+directory. The Hudson jobs are configured to build Subversion code base
+periodically, generate the test report and report failures (if any) to
+pre-defined mailing list.
+
+The test results can be viewed in this page:
+ http://hudson.zones.apache.org/hudson/view/subversion
+
+SETUP DETAILS
+-------------
+ - The last 40 build reports are retained.
+ - The Subversion repository is polled every 1 hour (staggered).
+ - The failure notifications are sent to
+ notifications@subversion.apache.org mailing list.
+ - The builds corresponding to Linux are performed in a Ubuntu system.
+ - The Linux builds are tied to minerva.apache.org host.
+ - The builds corresponding to Solaris are performed in a Solaris 10 system.
+ - The Solaris builds are tied to hudson-solaris.zones.apache.org host.
diff --git a/build/hudson/jobs/subversion-1.6.x-solaris/config.xml b/build/hudson/jobs/subversion-1.6.x-solaris/config.xml
new file mode 100644
index 0000000..2fbd8b0
--- /dev/null
+++ b/build/hudson/jobs/subversion-1.6.x-solaris/config.xml
@@ -0,0 +1,114 @@
+<?xml version='1.0' encoding='UTF-8'?>
+<!--
+ Licensed to the Apache Software Foundation (ASF) under one
+ or more contributor license agreements. See the NOTICE file
+ distributed with this work for additional information
+ regarding copyright ownership. The ASF licenses this file
+ to you under the Apache License, Version 2.0 (the
+ "License"); you may not use this file except in compliance
+ with the License. You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+ Unless required by applicable law or agreed to in writing,
+ software distributed under the License is distributed on an
+ "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ KIND, either express or implied. See the License for the
+ specific language governing permissions and limitations
+ under the License.
+-->
+<project>
+ <actions />
+ <description>Build Subversion from 1.6.x branch on Solaris10 system</description>
+ <logRotator>
+ <daysToKeep>-1</daysToKeep>
+ <numToKeep>40</numToKeep>
+ <artifactDaysToKeep>-1</artifactDaysToKeep>
+ <artifactNumToKeep>-1</artifactNumToKeep>
+ </logRotator>
+ <keepDependencies>false</keepDependencies>
+ <properties>
+ <hudson.plugins.descriptionsetter.JobByDescription />
+ <hudson.plugins.disk__usage.DiskUsageProperty />
+ <hudson.plugins.jira.JiraProjectProperty>
+ <siteName>https://issues.apache.org/jira/</siteName>
+ </hudson.plugins.jira.JiraProjectProperty>
+ </properties>
+ <scm class="hudson.scm.SubversionSCM">
+ <locations>
+ <hudson.scm.SubversionSCM_-ModuleLocation>
+ <remote>https://svn.apache.org/repos/asf/subversion/branches/1.6.x</remote>
+ <local>subversion-1.6.x</local>
+ </hudson.scm.SubversionSCM_-ModuleLocation>
+ </locations>
+ <useUpdate>false</useUpdate>
+ <doRevert>false</doRevert>
+ <browser class="hudson.scm.browsers.ViewSVN">
+ <url>http://svn.apache.org/viewcvs.cgi/?root=Apache-SVN</url>
+ </browser>
+ <excludedRegions />
+ <includedRegions />
+ <excludedUsers />
+ <excludedRevprop />
+ <excludedCommitMessages />
+ </scm>
+ <assignedNode>hudson-solaris (Solaris)</assignedNode>
+ <canRoam>false</canRoam>
+ <disabled>false</disabled>
+ <blockBuildWhenUpstreamBuilding>false</blockBuildWhenUpstreamBuilding>
+ <jdk>(Default)</jdk>
+ <triggers class="vector">
+ <hudson.triggers.SCMTrigger>
+ <spec>0 * * * *</spec>
+ </hudson.triggers.SCMTrigger>
+ </triggers>
+ <concurrentBuild>false</concurrentBuild>
+ <builders>
+ <hudson.tasks.Shell>
+ <command>
+ cd subversion-1.6.x
+ PATH="/export/home/hudson/bin:/export/home/hudson/tools/java/latest/bin:/export/home/hudson/tools/ant/latest/bin:/export/home/hudson/tools/tomcat/latest/bin:/opt/subversion-current/bin:/usr/ucb:/usr/local/bin:/usr/bin:/usr/sfw/bin:/usr/sfw/sbin:/opt/sfw/bin:/opt/sfw/sbin:/opt/SUNWspro/bin:/usr/X/bin:/usr/ucb:/usr/sbin:/usr/ccs/bin:$PATH";
+ export PATH
+ ./autogen.sh
+ ./configure
+ --enable-maintainer-mode
+ --disable-shared
+ --with-custom-libtool=/opt/sfw/bin/libtool
+ --with-apr=/opt/subversion-current/apr
+ --with-apr-util=/opt/subversion-current/apr-util-1.3.9
+ --with-sqlite=/opt/subversion-current/sqlite-3.6.23.1/sqlite3.c
+
+ /usr/ccs/bin/make
+ </command>
+ </hudson.tasks.Shell>
+ <hudson.tasks.Shell>
+ <command>
+ #!/bin/bash
+ cd subversion-1.6.x
+ PATH="/export/home/hudson/bin:/export/home/hudson/tools/java/latest/bin:/export/home/hudson/tools/ant/latest/bin:/export/home/hudson/tools/tomcat/latest/bin:/opt/subversion-current/bin:/usr/ucb:/usr/local/bin:/usr/bin:/usr/sfw/bin:/usr/sfw/sbin:/opt/sfw/bin:/opt/sfw/sbin:/opt/SUNWspro/bin:/usr/X/bin:/usr/ucb:/usr/sbin:/usr/ccs/bin";
+ export PATH
+ /usr/ccs/bin/make check
+ ret=$?
+ echo "Generating junit files ..."
+ python ../../subversion-trunk-solaris/subversion-trunk/tools/dev/gen_junit_report.py
+ --log-file=./tests.log
+ --output-dir=build/junit-reports
+ exit
+ $ret
+ </command>
+ </hudson.tasks.Shell>
+ </builders>
+ <publishers>
+ <hudson.tasks.junit.JUnitResultArchiver>
+ <testResults>subversion-1.6.x/build/junit-reports/*</testResults>
+ <testDataPublishers />
+ </hudson.tasks.junit.JUnitResultArchiver>
+ <hudson.tasks.Mailer>
+ <recipients>notifications@subversion.apache.org</recipients>
+ <dontNotifyEveryUnstableBuild>false</dontNotifyEveryUnstableBuild>
+ <sendToIndividuals>false</sendToIndividuals>
+ </hudson.tasks.Mailer>
+ </publishers>
+ <buildWrappers />
+</project>
+
diff --git a/build/hudson/jobs/subversion-1.6.x-ubuntu/config.xml b/build/hudson/jobs/subversion-1.6.x-ubuntu/config.xml
new file mode 100644
index 0000000..b203d39
--- /dev/null
+++ b/build/hudson/jobs/subversion-1.6.x-ubuntu/config.xml
@@ -0,0 +1,112 @@
+<?xml version='1.0' encoding='UTF-8'?>
+<!--
+ Licensed to the Apache Software Foundation (ASF) under one
+ or more contributor license agreements. See the NOTICE file
+ distributed with this work for additional information
+ regarding copyright ownership. The ASF licenses this file
+ to you under the Apache License, Version 2.0 (the
+ "License"); you may not use this file except in compliance
+ with the License. You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+ Unless required by applicable law or agreed to in writing,
+ software distributed under the License is distributed on an
+ "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ KIND, either express or implied. See the License for the
+ specific language governing permissions and limitations
+ under the License.
+-->
+<project>
+ <actions />
+ <description>Build Subversion from 1.6.x branch on Ubuntu system</description>
+ <logRotator>
+ <daysToKeep>-1</daysToKeep>
+ <numToKeep>40</numToKeep>
+ <artifactDaysToKeep>-1</artifactDaysToKeep>
+ <artifactNumToKeep>-1</artifactNumToKeep>
+ </logRotator>
+ <keepDependencies>false</keepDependencies>
+ <properties>
+ <hudson.plugins.descriptionsetter.JobByDescription />
+ <hudson.plugins.disk__usage.DiskUsageProperty />
+ <hudson.plugins.jira.JiraProjectProperty>
+ <siteName>https://issues.apache.org/jira/</siteName>
+ </hudson.plugins.jira.JiraProjectProperty>
+ </properties>
+ <scm class="hudson.scm.SubversionSCM">
+ <locations>
+ <hudson.scm.SubversionSCM_-ModuleLocation>
+ <remote>https://svn.apache.org/repos/asf/subversion/branches/1.6.x</remote>
+ <local>subversion-1.6.x</local>
+ </hudson.scm.SubversionSCM_-ModuleLocation>
+ </locations>
+ <useUpdate>false</useUpdate>
+ <doRevert>false</doRevert>
+ <browser class="hudson.scm.browsers.ViewSVN">
+ <url>http://svn.apache.org/viewcvs.cgi/?root=Apache-SVN</url>
+ </browser>
+ <excludedRegions />
+ <includedRegions />
+ <excludedUsers />
+ <excludedRevprop />
+ <excludedCommitMessages />
+ </scm>
+ <assignedNode>minerva.apache.org (Ubuntu)</assignedNode>
+ <canRoam>false</canRoam>
+ <disabled>false</disabled>
+ <blockBuildWhenUpstreamBuilding>false</blockBuildWhenUpstreamBuilding>
+ <jdk>(Default)</jdk>
+ <triggers class="vector">
+ <hudson.triggers.SCMTrigger>
+ <spec>0 * * * *</spec>
+ </hudson.triggers.SCMTrigger>
+ </triggers>
+ <concurrentBuild>false</concurrentBuild>
+ <builders>
+ <hudson.tasks.Shell>
+ <command>
+ cd subversion-1.6.x
+ # make distclean
+ ./autogen.sh
+ # cp /home/bhuvan/apr . -r
+ # cp /home/bhuvan/apr-util . -r
+ # cp /home/bhuvan/sqlite-amalgamation . -r
+ ./configure
+ --enable-maintainer-mode
+ --disable-shared
+ --with-apr=/usr/local/apr/apr-1.4.2
+ --with-apr-util=/usr/local/apr/apr-util-1.3.9
+ --with-sqlite=/usr/local/svn-deps/subversion-1.6.11/sqlite-amalgamation/sqlite3.c
+ make
+ </command>
+ </hudson.tasks.Shell>
+ <hudson.tasks.Shell>
+ <command>
+ #!/bin/bash
+ cd subversion-1.6.x
+ make check
+ ret=$?
+ echo "Generating junit files ..."
+ python ../../subversion-trunk-ubuntu/subversion-trunk/tools/dev/gen_junit_report.py
+ --log-file=./tests.log
+ --output-dir=build/junit-reports
+ exit
+ $ret
+ </command>
+ </hudson.tasks.Shell>
+ </builders>
+ <publishers>
+ <hudson.tasks.junit.JUnitResultArchiver>
+ <testResults>subversion-1.6.x/build/junit-reports/*</testResults>
+ <testDataPublishers />
+ </hudson.tasks.junit.JUnitResultArchiver>
+ <hudson.tasks.Mailer>
+ <recipients>notifications@subversion.apache.org</recipients>
+ <dontNotifyEveryUnstableBuild>true</dontNotifyEveryUnstableBuild>
+ <sendToIndividuals>false</sendToIndividuals>
+ </hudson.tasks.Mailer>
+ </publishers>
+ <buildWrappers />
+</project>
+
diff --git a/build/hudson/jobs/subversion-doxygen/config.xml b/build/hudson/jobs/subversion-doxygen/config.xml
new file mode 100644
index 0000000..5a0252a
--- /dev/null
+++ b/build/hudson/jobs/subversion-doxygen/config.xml
@@ -0,0 +1,102 @@
+<?xml version='1.0' encoding='UTF-8'?>
+<!--
+ Licensed to the Apache Software Foundation (ASF) under one
+ or more contributor license agreements. See the NOTICE file
+ distributed with this work for additional information
+ regarding copyright ownership. The ASF licenses this file
+ to you under the Apache License, Version 2.0 (the
+ "License"); you may not use this file except in compliance
+ with the License. You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+ Unless required by applicable law or agreed to in writing,
+ software distributed under the License is distributed on an
+ "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ KIND, either express or implied. See the License for the
+ specific language governing permissions and limitations
+ under the License.
+-->
+<project>
+ <actions />
+ <description>Build Subversion doxygen doc from "trunk" branch</description>
+ <logRotator>
+ <daysToKeep>-1</daysToKeep>
+ <numToKeep>40</numToKeep>
+ <artifactDaysToKeep>-1</artifactDaysToKeep>
+ <artifactNumToKeep>-1</artifactNumToKeep>
+ </logRotator>
+ <keepDependencies>false</keepDependencies>
+ <properties>
+ <hudson.plugins.descriptionsetter.JobByDescription />
+ <hudson.plugins.disk__usage.DiskUsageProperty />
+ <hudson.plugins.jira.JiraProjectProperty>
+ <siteName>https://issues.apache.org/jira/</siteName>
+ </hudson.plugins.jira.JiraProjectProperty>
+ </properties>
+ <scm class="hudson.scm.SubversionSCM">
+ <locations>
+ <hudson.scm.SubversionSCM_-ModuleLocation>
+ <remote>https://svn.apache.org/repos/asf/subversion/trunk</remote>
+ <local>subversion-trunk</local>
+ </hudson.scm.SubversionSCM_-ModuleLocation>
+ </locations>
+ <useUpdate>false</useUpdate>
+ <doRevert>false</doRevert>
+ <browser class="hudson.scm.browsers.ViewSVN">
+ <url>http://svn.apache.org/viewcvs.cgi/?root=Apache-SVN</url>
+ </browser>
+ <excludedRegions />
+ <includedRegions />
+ <excludedUsers />
+ <excludedRevprop />
+ <excludedCommitMessages />
+ </scm>
+ <assignedNode>minerva.apache.org (Ubuntu)</assignedNode>
+ <canRoam>false</canRoam>
+ <disabled>false</disabled>
+ <blockBuildWhenUpstreamBuilding>false</blockBuildWhenUpstreamBuilding>
+ <jdk>(Default)</jdk>
+ <triggers class="vector">
+ <hudson.triggers.SCMTrigger>
+ <spec>0 23 * * *</spec>
+ </hudson.triggers.SCMTrigger>
+ </triggers>
+ <concurrentBuild>false</concurrentBuild>
+ <builders>
+ <hudson.tasks.Shell>
+ <command>
+ cd subversion-trunk
+ ./autogen.sh
+ ./configure
+ --enable-maintainer-mode
+ --disable-shared
+ --with-apr=/usr/local/apr/apr-1.4.2
+ --with-apr-util=/usr/local/apr/apr-util-1.3.9
+ --with-jdk=/home/hudson/tools/java/jdk1.6.0_17-64/
+ make
+ </command>
+ </hudson.tasks.Shell>
+ <hudson.tasks.Shell>
+ <command>
+ #!/bin/bash
+ cd subversion-trunk
+ export PATH=doxygen-1.6.1/bin:$PATH
+ make doc-api
+ </command>
+ </hudson.tasks.Shell>
+ </builders>
+ <publishers>
+ <hudson.tasks.JavadocArchiver>
+ <javadocDir>subversion-trunk/doc/doxygen/html</javadocDir>
+ <keepAll>false</keepAll>
+ </hudson.tasks.JavadocArchiver>
+ <hudson.tasks.Mailer>
+ <recipients>notifications@subversion.apache.org</recipients>
+ <dontNotifyEveryUnstableBuild>true</dontNotifyEveryUnstableBuild>
+ <sendToIndividuals>false</sendToIndividuals>
+ </hudson.tasks.Mailer>
+ </publishers>
+ <buildWrappers />
+</project>
+
diff --git a/build/hudson/jobs/subversion-javadoc/config.xml b/build/hudson/jobs/subversion-javadoc/config.xml
new file mode 100644
index 0000000..214d6c8
--- /dev/null
+++ b/build/hudson/jobs/subversion-javadoc/config.xml
@@ -0,0 +1,102 @@
+<?xml version='1.0' encoding='UTF-8'?>
+<!--
+ Licensed to the Apache Software Foundation (ASF) under one
+ or more contributor license agreements. See the NOTICE file
+ distributed with this work for additional information
+ regarding copyright ownership. The ASF licenses this file
+ to you under the Apache License, Version 2.0 (the
+ "License"); you may not use this file except in compliance
+ with the License. You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+ Unless required by applicable law or agreed to in writing,
+ software distributed under the License is distributed on an
+ "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ KIND, either express or implied. See the License for the
+ specific language governing permissions and limitations
+ under the License.
+-->
+<project>
+ <actions />
+ <description>Build Subversion javadoc from "trunk" branch</description>
+ <logRotator>
+ <daysToKeep>-1</daysToKeep>
+ <numToKeep>40</numToKeep>
+ <artifactDaysToKeep>-1</artifactDaysToKeep>
+ <artifactNumToKeep>-1</artifactNumToKeep>
+ </logRotator>
+ <keepDependencies>false</keepDependencies>
+ <properties>
+ <hudson.plugins.descriptionsetter.JobByDescription />
+ <hudson.plugins.disk__usage.DiskUsageProperty />
+ <hudson.plugins.jira.JiraProjectProperty>
+ <siteName>https://issues.apache.org/jira/</siteName>
+ </hudson.plugins.jira.JiraProjectProperty>
+ </properties>
+ <scm class="hudson.scm.SubversionSCM">
+ <locations>
+ <hudson.scm.SubversionSCM_-ModuleLocation>
+ <remote>https://svn.apache.org/repos/asf/subversion/trunk</remote>
+ <local>subversion-trunk</local>
+ </hudson.scm.SubversionSCM_-ModuleLocation>
+ </locations>
+ <useUpdate>false</useUpdate>
+ <doRevert>false</doRevert>
+ <browser class="hudson.scm.browsers.ViewSVN">
+ <url>http://svn.apache.org/viewcvs.cgi/?root=Apache-SVN</url>
+ </browser>
+ <excludedRegions />
+ <includedRegions />
+ <excludedUsers />
+ <excludedRevprop />
+ <excludedCommitMessages />
+ </scm>
+ <assignedNode>minerva.apache.org (Ubuntu)</assignedNode>
+ <canRoam>false</canRoam>
+ <disabled>false</disabled>
+ <blockBuildWhenUpstreamBuilding>false</blockBuildWhenUpstreamBuilding>
+ <jdk>(Default)</jdk>
+ <triggers class="vector">
+ <hudson.triggers.SCMTrigger>
+ <spec>0 23 * * *</spec>
+ </hudson.triggers.SCMTrigger>
+ </triggers>
+ <concurrentBuild>false</concurrentBuild>
+ <builders>
+ <hudson.tasks.Shell>
+ <command>
+ cd subversion-trunk
+ ./autogen.sh
+ ./configure
+ --enable-maintainer-mode
+ --disable-shared
+ --with-apr=/usr/local/apr/apr-1.4.2
+ --with-apr-util=/usr/local/apr/apr-util-1.3.9
+ --with-jdk=/home/hudson/tools/java/jdk1.6.0_17-64/
+ make
+ </command>
+ </hudson.tasks.Shell>
+ <hudson.tasks.Shell>
+ <command>
+ #!/bin/bash
+ cd subversion-trunk
+ export PATH=doxygen-1.6.1/bin:$PATH
+ make doc-javahl
+ </command>
+ </hudson.tasks.Shell>
+ </builders>
+ <publishers>
+ <hudson.tasks.JavadocArchiver>
+ <javadocDir>subversion-trunk/doc/javadoc/</javadocDir>
+ <keepAll>false</keepAll>
+ </hudson.tasks.JavadocArchiver>
+ <hudson.tasks.Mailer>
+ <recipients>notifications@subversion.apache.org</recipients>
+ <dontNotifyEveryUnstableBuild>true</dontNotifyEveryUnstableBuild>
+ <sendToIndividuals>false</sendToIndividuals>
+ </hudson.tasks.Mailer>
+ </publishers>
+ <buildWrappers />
+</project>
+
diff --git a/build/hudson/jobs/subversion-trunk-solaris/config.xml b/build/hudson/jobs/subversion-trunk-solaris/config.xml
new file mode 100644
index 0000000..a359262
--- /dev/null
+++ b/build/hudson/jobs/subversion-trunk-solaris/config.xml
@@ -0,0 +1,110 @@
+<?xml version='1.0' encoding='UTF-8'?>
+<!--
+ Licensed to the Apache Software Foundation (ASF) under one
+ or more contributor license agreements. See the NOTICE file
+ distributed with this work for additional information
+ regarding copyright ownership. The ASF licenses this file
+ to you under the Apache License, Version 2.0 (the
+ "License"); you may not use this file except in compliance
+ with the License. You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+ Unless required by applicable law or agreed to in writing,
+ software distributed under the License is distributed on an
+ "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ KIND, either express or implied. See the License for the
+ specific language governing permissions and limitations
+ under the License.
+-->
+<project>
+ <actions />
+ <description>Build Subversion from "trunk" branch on Solaris10 system</description>
+ <logRotator>
+ <daysToKeep>-1</daysToKeep>
+ <numToKeep>40</numToKeep>
+ <artifactDaysToKeep>-1</artifactDaysToKeep>
+ <artifactNumToKeep>-1</artifactNumToKeep>
+ </logRotator>
+ <keepDependencies>false</keepDependencies>
+ <properties>
+ <hudson.plugins.descriptionsetter.JobByDescription />
+ <hudson.plugins.disk__usage.DiskUsageProperty />
+ <hudson.plugins.jira.JiraProjectProperty>
+ <siteName>https://issues.apache.org/jira/</siteName>
+ </hudson.plugins.jira.JiraProjectProperty>
+ </properties>
+ <scm class="hudson.scm.SubversionSCM">
+ <locations>
+ <hudson.scm.SubversionSCM_-ModuleLocation>
+ <remote>https://svn.apache.org/repos/asf/subversion/trunk</remote>
+ <local>subversion-trunk</local>
+ </hudson.scm.SubversionSCM_-ModuleLocation>
+ </locations>
+ <useUpdate>false</useUpdate>
+ <doRevert>false</doRevert>
+ <excludedRegions />
+ <includedRegions />
+ <excludedUsers />
+ <excludedRevprop />
+ <excludedCommitMessages />
+ </scm>
+ <assignedNode>hudson-solaris (Solaris)</assignedNode>
+ <canRoam>false</canRoam>
+ <disabled>false</disabled>
+ <blockBuildWhenUpstreamBuilding>false</blockBuildWhenUpstreamBuilding>
+ <jdk>(Default)</jdk>
+ <triggers class="vector">
+ <hudson.triggers.SCMTrigger>
+ <spec>30 * * * *</spec>
+ </hudson.triggers.SCMTrigger>
+ </triggers>
+ <concurrentBuild>false</concurrentBuild>
+ <builders>
+ <hudson.tasks.Shell>
+ <command>
+ cd subversion-trunk
+ PATH="/export/home/hudson/bin:/export/home/hudson/tools/java/latest/bin:/export/home/hudson/tools/ant/latest/bin:/export/home/hudson/tools/tomcat/latest/bin:/opt/subversion-current/bin:/opt/subversion-current/sqlite-3.6.23.1:/usr/ucb:/usr/local/bin:/usr/bin:/usr/sfw/bin:/usr/sfw/sbin:/opt/sfw/bin:/opt/sfw/sbin:/opt/SUNWspro/bin:/usr/X/bin:/usr/ucb:/usr/sbin:/usr/ccs/bin:$PATH";
+ export PATH
+ ./autogen.sh
+ ./configure
+ --enable-maintainer-mode
+ --disable-shared
+ --with-custom-libtool=/opt/sfw/bin/libtool
+ --with-apr=/opt/subversion-current/apr
+ --with-apr-util=/opt/subversion-current/apr-util-1.3.9
+ --with-sqlite=/opt/subversion-current/sqlite-3.6.23.1/sqlite3.c
+ make
+ </command>
+ </hudson.tasks.Shell>
+ <hudson.tasks.Shell>
+ <command>
+ #!/bin/bash
+ cd subversion-trunk
+ PATH="/export/home/hudson/bin:/export/home/hudson/tools/java/latest/bin:/export/home/hudson/tools/ant/latest/bin:/export/home/hudson/tools/tomcat/latest/bin:/opt/subversion-current/bin:/usr/ucb:/usr/local/bin:/usr/bin:/usr/sfw/bin:/usr/sfw/sbin:/opt/sfw/bin:/opt/sfw/sbin:/opt/SUNWspro/bin:/usr/X/bin:/usr/ucb:/usr/sbin:/usr/ccs/bin";
+ export PATH
+ make check
+ ret=$?
+ echo "Generating junit files ..."
+ python tools/dev/gen_junit_report.py
+ --log-file=./tests.log
+ --output-dir=build/junit-reports
+ exit
+ $ret
+ </command>
+ </hudson.tasks.Shell>
+ </builders>
+ <publishers>
+ <hudson.tasks.junit.JUnitResultArchiver>
+ <testResults>subversion-trunk/build/junit-reports/*</testResults>
+ <testDataPublishers />
+ </hudson.tasks.junit.JUnitResultArchiver>
+ <hudson.tasks.Mailer>
+ <recipients>notifications@subversion.apache.org</recipients>
+ <dontNotifyEveryUnstableBuild>true</dontNotifyEveryUnstableBuild>
+ <sendToIndividuals>false</sendToIndividuals>
+ </hudson.tasks.Mailer>
+ </publishers>
+ <buildWrappers />
+</project>
+
diff --git a/build/hudson/jobs/subversion-trunk-ubuntu/config.xml b/build/hudson/jobs/subversion-trunk-ubuntu/config.xml
new file mode 100644
index 0000000..64df6c8
--- /dev/null
+++ b/build/hudson/jobs/subversion-trunk-ubuntu/config.xml
@@ -0,0 +1,106 @@
+<?xml version='1.0' encoding='UTF-8'?>
+<!--
+ Licensed to the Apache Software Foundation (ASF) under one
+ or more contributor license agreements. See the NOTICE file
+ distributed with this work for additional information
+ regarding copyright ownership. The ASF licenses this file
+ to you under the Apache License, Version 2.0 (the
+ "License"); you may not use this file except in compliance
+ with the License. You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+ Unless required by applicable law or agreed to in writing,
+ software distributed under the License is distributed on an
+ "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ KIND, either express or implied. See the License for the
+ specific language governing permissions and limitations
+ under the License.
+-->
+<project>
+ <actions />
+ <description>Build Subversion from "trunk" branch on Ubuntu system</description>
+ <logRotator>
+ <daysToKeep>-1</daysToKeep>
+ <numToKeep>40</numToKeep>
+ <artifactDaysToKeep>-1</artifactDaysToKeep>
+ <artifactNumToKeep>-1</artifactNumToKeep>
+ </logRotator>
+ <keepDependencies>false</keepDependencies>
+ <properties>
+ <hudson.plugins.descriptionsetter.JobByDescription />
+ <hudson.plugins.disk__usage.DiskUsageProperty />
+ <hudson.plugins.jira.JiraProjectProperty>
+ <siteName>https://issues.apache.org/jira/</siteName>
+ </hudson.plugins.jira.JiraProjectProperty>
+ </properties>
+ <scm class="hudson.scm.SubversionSCM">
+ <locations>
+ <hudson.scm.SubversionSCM_-ModuleLocation>
+ <remote>https://svn.apache.org/repos/asf/subversion/trunk</remote>
+ <local>subversion-trunk</local>
+ </hudson.scm.SubversionSCM_-ModuleLocation>
+ </locations>
+ <useUpdate>false</useUpdate>
+ <doRevert>false</doRevert>
+ <browser class="hudson.scm.browsers.ViewSVN">
+ <url>http://svn.apache.org/viewcvs.cgi/?root=Apache-SVN</url>
+ </browser>
+ <excludedRegions />
+ <includedRegions />
+ <excludedUsers />
+ <excludedRevprop />
+ <excludedCommitMessages />
+ </scm>
+ <assignedNode>minerva.apache.org (Ubuntu)</assignedNode>
+ <canRoam>false</canRoam>
+ <disabled>false</disabled>
+ <blockBuildWhenUpstreamBuilding>false</blockBuildWhenUpstreamBuilding>
+ <jdk>(Default)</jdk>
+ <triggers class="vector">
+ <hudson.triggers.SCMTrigger>
+ <spec>15 * * * *</spec>
+ </hudson.triggers.SCMTrigger>
+ </triggers>
+ <concurrentBuild>false</concurrentBuild>
+ <builders>
+ <hudson.tasks.Shell>
+ <command>
+ cd subversion-trunk
+ #make distclean
+ ./autogen.sh
+ ./configure
+ --enable-maintainer-mode
+ --disable-shared
+ --with-apr=/usr/local/apr/apr-1.4.2
+ --with-apr-util=/usr/local/apr/apr-util-1.3.9
+ --with-sqlite=/usr/local/svn-deps/subversion-1.6.11/sqlite-amalgamation/sqlite3.c make
+ </command>
+ </hudson.tasks.Shell>
+ <hudson.tasks.Shell>
+ <command>
+ #!/bin/bash
+ cd subversion-trunk
+ make check
+ ret=$?
+ echo "Generating junit files ..."
+ python tools/dev/gen_junit_report.py
+ --log-file=./tests.log
+ --output-dir=build/junit-reports
+ exit $ret
+ </command>
+ </hudson.tasks.Shell>
+ </builders>
+ <publishers>
+ <hudson.tasks.junit.JUnitResultArchiver>
+ <testResults>subversion-trunk/build/junit-reports/*</testResults>
+ <testDataPublishers />
+ </hudson.tasks.junit.JUnitResultArchiver>
+ <hudson.tasks.Mailer>
+ <recipients>notifications@subversion.apache.org</recipients>
+ <dontNotifyEveryUnstableBuild>true</dontNotifyEveryUnstableBuild>
+ <sendToIndividuals>false</sendToIndividuals>
+ </hudson.tasks.Mailer>
+ </publishers>
+ <buildWrappers />
+</project>
diff --git a/build/install-sh b/build/install-sh
new file mode 100755
index 0000000..6781b98
--- /dev/null
+++ b/build/install-sh
@@ -0,0 +1,520 @@
+#!/bin/sh
+# install - install a program, script, or datafile
+
+scriptversion=2009-04-28.21; # UTC
+
+# This originates from X11R5 (mit/util/scripts/install.sh), which was
+# later released in X11R6 (xc/config/util/install.sh) with the
+# following copyright and license.
+#
+# Copyright (C) 1994 X Consortium
+#
+# Permission is hereby granted, free of charge, to any person obtaining a copy
+# of this software and associated documentation files (the "Software"), to
+# deal in the Software without restriction, including without limitation the
+# rights to use, copy, modify, merge, publish, distribute, sublicense, and/or
+# sell copies of the Software, and to permit persons to whom the Software is
+# furnished to do so, subject to the following conditions:
+#
+# The above copyright notice and this permission notice shall be included in
+# all copies or substantial portions of the Software.
+#
+# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+# X CONSORTIUM BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN
+# AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNEC-
+# TION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+#
+# Except as contained in this notice, the name of the X Consortium shall not
+# be used in advertising or otherwise to promote the sale, use or other deal-
+# ings in this Software without prior written authorization from the X Consor-
+# tium.
+#
+#
+# FSF changes to this file are in the public domain.
+#
+# Calling this script install-sh is preferred over install.sh, to prevent
+# `make' implicit rules from creating a file called install from it
+# when there is no Makefile.
+#
+# This script is compatible with the BSD install script, but was written
+# from scratch.
+
+nl='
+'
+IFS=" "" $nl"
+
+# set DOITPROG to echo to test this script
+
+# Don't use :- since 4.3BSD and earlier shells don't like it.
+doit=${DOITPROG-}
+if test -z "$doit"; then
+ doit_exec=exec
+else
+ doit_exec=$doit
+fi
+
+# Put in absolute file names if you don't have them in your path;
+# or use environment vars.
+
+chgrpprog=${CHGRPPROG-chgrp}
+chmodprog=${CHMODPROG-chmod}
+chownprog=${CHOWNPROG-chown}
+cmpprog=${CMPPROG-cmp}
+cpprog=${CPPROG-cp}
+mkdirprog=${MKDIRPROG-mkdir}
+mvprog=${MVPROG-mv}
+rmprog=${RMPROG-rm}
+stripprog=${STRIPPROG-strip}
+
+posix_glob='?'
+initialize_posix_glob='
+ test "$posix_glob" != "?" || {
+ if (set -f) 2>/dev/null; then
+ posix_glob=
+ else
+ posix_glob=:
+ fi
+ }
+'
+
+posix_mkdir=
+
+# Desired mode of installed file.
+mode=0755
+
+chgrpcmd=
+chmodcmd=$chmodprog
+chowncmd=
+mvcmd=$mvprog
+rmcmd="$rmprog -f"
+stripcmd=
+
+src=
+dst=
+dir_arg=
+dst_arg=
+
+copy_on_change=false
+no_target_directory=
+
+usage="\
+Usage: $0 [OPTION]... [-T] SRCFILE DSTFILE
+ or: $0 [OPTION]... SRCFILES... DIRECTORY
+ or: $0 [OPTION]... -t DIRECTORY SRCFILES...
+ or: $0 [OPTION]... -d DIRECTORIES...
+
+In the 1st form, copy SRCFILE to DSTFILE.
+In the 2nd and 3rd, copy all SRCFILES to DIRECTORY.
+In the 4th, create DIRECTORIES.
+
+Options:
+ --help display this help and exit.
+ --version display version info and exit.
+
+ -c (ignored)
+ -C install only if different (preserve the last data modification time)
+ -d create directories instead of installing files.
+ -g GROUP $chgrpprog installed files to GROUP.
+ -m MODE $chmodprog installed files to MODE.
+ -o USER $chownprog installed files to USER.
+ -s $stripprog installed files.
+ -t DIRECTORY install into DIRECTORY.
+ -T report an error if DSTFILE is a directory.
+
+Environment variables override the default commands:
+ CHGRPPROG CHMODPROG CHOWNPROG CMPPROG CPPROG MKDIRPROG MVPROG
+ RMPROG STRIPPROG
+"
+
+while test $# -ne 0; do
+ case $1 in
+ -c) ;;
+
+ -C) copy_on_change=true;;
+
+ -d) dir_arg=true;;
+
+ -g) chgrpcmd="$chgrpprog $2"
+ shift;;
+
+ --help) echo "$usage"; exit $?;;
+
+ -m) mode=$2
+ case $mode in
+ *' '* | *' '* | *'
+'* | *'*'* | *'?'* | *'['*)
+ echo "$0: invalid mode: $mode" >&2
+ exit 1;;
+ esac
+ shift;;
+
+ -o) chowncmd="$chownprog $2"
+ shift;;
+
+ -s) stripcmd=$stripprog;;
+
+ -t) dst_arg=$2
+ shift;;
+
+ -T) no_target_directory=true;;
+
+ --version) echo "$0 $scriptversion"; exit $?;;
+
+ --) shift
+ break;;
+
+ -*) echo "$0: invalid option: $1" >&2
+ exit 1;;
+
+ *) break;;
+ esac
+ shift
+done
+
+if test $# -ne 0 && test -z "$dir_arg$dst_arg"; then
+ # When -d is used, all remaining arguments are directories to create.
+ # When -t is used, the destination is already specified.
+ # Otherwise, the last argument is the destination. Remove it from $@.
+ for arg
+ do
+ if test -n "$dst_arg"; then
+ # $@ is not empty: it contains at least $arg.
+ set fnord "$@" "$dst_arg"
+ shift # fnord
+ fi
+ shift # arg
+ dst_arg=$arg
+ done
+fi
+
+if test $# -eq 0; then
+ if test -z "$dir_arg"; then
+ echo "$0: no input file specified." >&2
+ exit 1
+ fi
+ # It's OK to call `install-sh -d' without argument.
+ # This can happen when creating conditional directories.
+ exit 0
+fi
+
+if test -z "$dir_arg"; then
+ trap '(exit $?); exit' 1 2 13 15
+
+ # Set umask so as not to create temps with too-generous modes.
+ # However, 'strip' requires both read and write access to temps.
+ case $mode in
+ # Optimize common cases.
+ *644) cp_umask=133;;
+ *755) cp_umask=22;;
+
+ *[0-7])
+ if test -z "$stripcmd"; then
+ u_plus_rw=
+ else
+ u_plus_rw='% 200'
+ fi
+ cp_umask=`expr '(' 777 - $mode % 1000 ')' $u_plus_rw`;;
+ *)
+ if test -z "$stripcmd"; then
+ u_plus_rw=
+ else
+ u_plus_rw=,u+rw
+ fi
+ cp_umask=$mode$u_plus_rw;;
+ esac
+fi
+
+for src
+do
+ # Protect names starting with `-'.
+ case $src in
+ -*) src=./$src;;
+ esac
+
+ if test -n "$dir_arg"; then
+ dst=$src
+ dstdir=$dst
+ test -d "$dstdir"
+ dstdir_status=$?
+ else
+
+ # Waiting for this to be detected by the "$cpprog $src $dsttmp" command
+ # might cause directories to be created, which would be especially bad
+ # if $src (and thus $dsttmp) contains '*'.
+ if test ! -f "$src" && test ! -d "$src"; then
+ echo "$0: $src does not exist." >&2
+ exit 1
+ fi
+
+ if test -z "$dst_arg"; then
+ echo "$0: no destination specified." >&2
+ exit 1
+ fi
+
+ dst=$dst_arg
+ # Protect names starting with `-'.
+ case $dst in
+ -*) dst=./$dst;;
+ esac
+
+ # If destination is a directory, append the input filename; won't work
+ # if double slashes aren't ignored.
+ if test -d "$dst"; then
+ if test -n "$no_target_directory"; then
+ echo "$0: $dst_arg: Is a directory" >&2
+ exit 1
+ fi
+ dstdir=$dst
+ dst=$dstdir/`basename "$src"`
+ dstdir_status=0
+ else
+ # Prefer dirname, but fall back on a substitute if dirname fails.
+ dstdir=`
+ (dirname "$dst") 2>/dev/null ||
+ expr X"$dst" : 'X\(.*[^/]\)//*[^/][^/]*/*$' \| \
+ X"$dst" : 'X\(//\)[^/]' \| \
+ X"$dst" : 'X\(//\)$' \| \
+ X"$dst" : 'X\(/\)' \| . 2>/dev/null ||
+ echo X"$dst" |
+ sed '/^X\(.*[^/]\)\/\/*[^/][^/]*\/*$/{
+ s//\1/
+ q
+ }
+ /^X\(\/\/\)[^/].*/{
+ s//\1/
+ q
+ }
+ /^X\(\/\/\)$/{
+ s//\1/
+ q
+ }
+ /^X\(\/\).*/{
+ s//\1/
+ q
+ }
+ s/.*/./; q'
+ `
+
+ test -d "$dstdir"
+ dstdir_status=$?
+ fi
+ fi
+
+ obsolete_mkdir_used=false
+
+ if test $dstdir_status != 0; then
+ case $posix_mkdir in
+ '')
+ # Create intermediate dirs using mode 755 as modified by the umask.
+ # This is like FreeBSD 'install' as of 1997-10-28.
+ umask=`umask`
+ case $stripcmd.$umask in
+ # Optimize common cases.
+ *[2367][2367]) mkdir_umask=$umask;;
+ .*0[02][02] | .[02][02] | .[02]) mkdir_umask=22;;
+
+ *[0-7])
+ mkdir_umask=`expr $umask + 22 \
+ - $umask % 100 % 40 + $umask % 20 \
+ - $umask % 10 % 4 + $umask % 2
+ `;;
+ *) mkdir_umask=$umask,go-w;;
+ esac
+
+ # With -d, create the new directory with the user-specified mode.
+ # Otherwise, rely on $mkdir_umask.
+ if test -n "$dir_arg"; then
+ mkdir_mode=-m$mode
+ else
+ mkdir_mode=
+ fi
+
+ posix_mkdir=false
+ case $umask in
+ *[123567][0-7][0-7])
+ # POSIX mkdir -p sets u+wx bits regardless of umask, which
+ # is incompatible with FreeBSD 'install' when (umask & 300) != 0.
+ ;;
+ *)
+ tmpdir=${TMPDIR-/tmp}/ins$RANDOM-$$
+ trap 'ret=$?; rmdir "$tmpdir/d" "$tmpdir" 2>/dev/null; exit $ret' 0
+
+ if (umask $mkdir_umask &&
+ exec $mkdirprog $mkdir_mode -p -- "$tmpdir/d") >/dev/null 2>&1
+ then
+ if test -z "$dir_arg" || {
+ # Check for POSIX incompatibilities with -m.
+ # HP-UX 11.23 and IRIX 6.5 mkdir -m -p sets group- or
+ # other-writeable bit of parent directory when it shouldn't.
+ # FreeBSD 6.1 mkdir -m -p sets mode of existing directory.
+ ls_ld_tmpdir=`ls -ld "$tmpdir"`
+ case $ls_ld_tmpdir in
+ d????-?r-*) different_mode=700;;
+ d????-?--*) different_mode=755;;
+ *) false;;
+ esac &&
+ $mkdirprog -m$different_mode -p -- "$tmpdir" && {
+ ls_ld_tmpdir_1=`ls -ld "$tmpdir"`
+ test "$ls_ld_tmpdir" = "$ls_ld_tmpdir_1"
+ }
+ }
+ then posix_mkdir=:
+ fi
+ rmdir "$tmpdir/d" "$tmpdir"
+ else
+ # Remove any dirs left behind by ancient mkdir implementations.
+ rmdir ./$mkdir_mode ./-p ./-- 2>/dev/null
+ fi
+ trap '' 0;;
+ esac;;
+ esac
+
+ if
+ $posix_mkdir && (
+ umask $mkdir_umask &&
+ $doit_exec $mkdirprog $mkdir_mode -p -- "$dstdir"
+ )
+ then :
+ else
+
+ # The umask is ridiculous, or mkdir does not conform to POSIX,
+ # or it failed possibly due to a race condition. Create the
+ # directory the slow way, step by step, checking for races as we go.
+
+ case $dstdir in
+ /*) prefix='/';;
+ -*) prefix='./';;
+ *) prefix='';;
+ esac
+
+ eval "$initialize_posix_glob"
+
+ oIFS=$IFS
+ IFS=/
+ $posix_glob set -f
+ set fnord $dstdir
+ shift
+ $posix_glob set +f
+ IFS=$oIFS
+
+ prefixes=
+
+ for d
+ do
+ test -z "$d" && continue
+
+ prefix=$prefix$d
+ if test -d "$prefix"; then
+ prefixes=
+ else
+ if $posix_mkdir; then
+ (umask=$mkdir_umask &&
+ $doit_exec $mkdirprog $mkdir_mode -p -- "$dstdir") && break
+ # Don't fail if two instances are running concurrently.
+ test -d "$prefix" || exit 1
+ else
+ case $prefix in
+ *\'*) qprefix=`echo "$prefix" | sed "s/'/'\\\\\\\\''/g"`;;
+ *) qprefix=$prefix;;
+ esac
+ prefixes="$prefixes '$qprefix'"
+ fi
+ fi
+ prefix=$prefix/
+ done
+
+ if test -n "$prefixes"; then
+ # Don't fail if two instances are running concurrently.
+ (umask $mkdir_umask &&
+ eval "\$doit_exec \$mkdirprog $prefixes") ||
+ test -d "$dstdir" || exit 1
+ obsolete_mkdir_used=true
+ fi
+ fi
+ fi
+
+ if test -n "$dir_arg"; then
+ { test -z "$chowncmd" || $doit $chowncmd "$dst"; } &&
+ { test -z "$chgrpcmd" || $doit $chgrpcmd "$dst"; } &&
+ { test "$obsolete_mkdir_used$chowncmd$chgrpcmd" = false ||
+ test -z "$chmodcmd" || $doit $chmodcmd $mode "$dst"; } || exit 1
+ else
+
+ # Make a couple of temp file names in the proper directory.
+ dsttmp=$dstdir/_inst.$$_
+ rmtmp=$dstdir/_rm.$$_
+
+ # Trap to clean up those temp files at exit.
+ trap 'ret=$?; rm -f "$dsttmp" "$rmtmp" && exit $ret' 0
+
+ # Copy the file name to the temp name.
+ (umask $cp_umask && $doit_exec $cpprog "$src" "$dsttmp") &&
+
+ # and set any options; do chmod last to preserve setuid bits.
+ #
+ # If any of these fail, we abort the whole thing. If we want to
+ # ignore errors from any of these, just make sure not to ignore
+ # errors from the above "$doit $cpprog $src $dsttmp" command.
+ #
+ { test -z "$chowncmd" || $doit $chowncmd "$dsttmp"; } &&
+ { test -z "$chgrpcmd" || $doit $chgrpcmd "$dsttmp"; } &&
+ { test -z "$stripcmd" || $doit $stripcmd "$dsttmp"; } &&
+ { test -z "$chmodcmd" || $doit $chmodcmd $mode "$dsttmp"; } &&
+
+ # If -C, don't bother to copy if it wouldn't change the file.
+ if $copy_on_change &&
+ old=`LC_ALL=C ls -dlL "$dst" 2>/dev/null` &&
+ new=`LC_ALL=C ls -dlL "$dsttmp" 2>/dev/null` &&
+
+ eval "$initialize_posix_glob" &&
+ $posix_glob set -f &&
+ set X $old && old=:$2:$4:$5:$6 &&
+ set X $new && new=:$2:$4:$5:$6 &&
+ $posix_glob set +f &&
+
+ test "$old" = "$new" &&
+ $cmpprog "$dst" "$dsttmp" >/dev/null 2>&1
+ then
+ rm -f "$dsttmp"
+ else
+ # Rename the file to the real destination.
+ $doit $mvcmd -f "$dsttmp" "$dst" 2>/dev/null ||
+
+ # The rename failed, perhaps because mv can't rename something else
+ # to itself, or perhaps because mv is so ancient that it does not
+ # support -f.
+ {
+ # Now remove or move aside any old file at destination location.
+ # We try this two ways since rm can't unlink itself on some
+ # systems and the destination file might be busy for other
+ # reasons. In this case, the final cleanup might fail but the new
+ # file should still install successfully.
+ {
+ test ! -f "$dst" ||
+ $doit $rmcmd -f "$dst" 2>/dev/null ||
+ { $doit $mvcmd -f "$dst" "$rmtmp" 2>/dev/null &&
+ { $doit $rmcmd -f "$rmtmp" 2>/dev/null; :; }
+ } ||
+ { echo "$0: cannot unlink or rename $dst" >&2
+ (exit 1); exit 1
+ }
+ } &&
+
+ # Now rename the file to the real destination.
+ $doit $mvcmd "$dsttmp" "$dst"
+ }
+ fi || exit 1
+
+ trap '' 0
+ fi
+done
+
+# Local variables:
+# eval: (add-hook 'write-file-hooks 'time-stamp)
+# time-stamp-start: "scriptversion="
+# time-stamp-format: "%:y-%02m-%02d.%02H"
+# time-stamp-time-zone: "UTC"
+# time-stamp-end: "; # UTC"
+# End:
diff --git a/build/libtool.m4 b/build/libtool.m4
new file mode 100644
index 0000000..d812584
--- /dev/null
+++ b/build/libtool.m4
@@ -0,0 +1,7831 @@
+# libtool.m4 - Configure libtool for the host system. -*-Autoconf-*-
+#
+# Copyright (C) 1996, 1997, 1998, 1999, 2000, 2001, 2003, 2004, 2005,
+# 2006, 2007, 2008, 2009, 2010 Free Software Foundation,
+# Inc.
+# Written by Gordon Matzigkeit, 1996
+#
+# This file is free software; the Free Software Foundation gives
+# unlimited permission to copy and/or distribute it, with or without
+# modifications, as long as this notice is preserved.
+
+m4_define([_LT_COPYING], [dnl
+# Copyright (C) 1996, 1997, 1998, 1999, 2000, 2001, 2003, 2004, 2005,
+# 2006, 2007, 2008, 2009, 2010 Free Software Foundation,
+# Inc.
+# Written by Gordon Matzigkeit, 1996
+#
+# This file is part of GNU Libtool.
+#
+# GNU Libtool is free software; you can redistribute it and/or
+# modify it under the terms of the GNU General Public License as
+# published by the Free Software Foundation; either version 2 of
+# the License, or (at your option) any later version.
+#
+# As a special exception to the GNU General Public License,
+# if you distribute this file as part of a program or library that
+# is built using GNU Libtool, you may include this file under the
+# same distribution terms that you use for the rest of that program.
+#
+# GNU Libtool is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with GNU Libtool; see the file COPYING. If not, a copy
+# can be downloaded from http://www.gnu.org/licenses/gpl.html, or
+# obtained by writing to the Free Software Foundation, Inc.,
+# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
+])
+
+# serial 57 LT_INIT
+
+
+# LT_PREREQ(VERSION)
+# ------------------
+# Complain and exit if this libtool version is less that VERSION.
+m4_defun([LT_PREREQ],
+[m4_if(m4_version_compare(m4_defn([LT_PACKAGE_VERSION]), [$1]), -1,
+ [m4_default([$3],
+ [m4_fatal([Libtool version $1 or higher is required],
+ 63)])],
+ [$2])])
+
+
+# _LT_CHECK_BUILDDIR
+# ------------------
+# Complain if the absolute build directory name contains unusual characters
+m4_defun([_LT_CHECK_BUILDDIR],
+[case `pwd` in
+ *\ * | *\ *)
+ AC_MSG_WARN([Libtool does not cope well with whitespace in `pwd`]) ;;
+esac
+])
+
+
+# LT_INIT([OPTIONS])
+# ------------------
+AC_DEFUN([LT_INIT],
+[AC_PREREQ([2.58])dnl We use AC_INCLUDES_DEFAULT
+AC_REQUIRE([AC_CONFIG_AUX_DIR_DEFAULT])dnl
+AC_BEFORE([$0], [LT_LANG])dnl
+AC_BEFORE([$0], [LT_OUTPUT])dnl
+AC_BEFORE([$0], [LTDL_INIT])dnl
+m4_require([_LT_CHECK_BUILDDIR])dnl
+
+dnl Autoconf doesn't catch unexpanded LT_ macros by default:
+m4_pattern_forbid([^_?LT_[A-Z_]+$])dnl
+m4_pattern_allow([^(_LT_EOF|LT_DLGLOBAL|LT_DLLAZY_OR_NOW|LT_MULTI_MODULE)$])dnl
+dnl aclocal doesn't pull ltoptions.m4, ltsugar.m4, or ltversion.m4
+dnl unless we require an AC_DEFUNed macro:
+AC_REQUIRE([LTOPTIONS_VERSION])dnl
+AC_REQUIRE([LTSUGAR_VERSION])dnl
+AC_REQUIRE([LTVERSION_VERSION])dnl
+AC_REQUIRE([LTOBSOLETE_VERSION])dnl
+m4_require([_LT_PROG_LTMAIN])dnl
+
+_LT_SHELL_INIT([SHELL=${CONFIG_SHELL-/bin/sh}])
+
+dnl Parse OPTIONS
+_LT_SET_OPTIONS([$0], [$1])
+
+# This can be used to rebuild libtool when needed
+LIBTOOL_DEPS="$ltmain"
+
+# Always use our own libtool.
+LIBTOOL='$(SHELL) $(top_builddir)/libtool'
+AC_SUBST(LIBTOOL)dnl
+
+_LT_SETUP
+
+# Only expand once:
+m4_define([LT_INIT])
+])# LT_INIT
+
+# Old names:
+AU_ALIAS([AC_PROG_LIBTOOL], [LT_INIT])
+AU_ALIAS([AM_PROG_LIBTOOL], [LT_INIT])
+dnl aclocal-1.4 backwards compatibility:
+dnl AC_DEFUN([AC_PROG_LIBTOOL], [])
+dnl AC_DEFUN([AM_PROG_LIBTOOL], [])
+
+
+# _LT_CC_BASENAME(CC)
+# -------------------
+# Calculate cc_basename. Skip known compiler wrappers and cross-prefix.
+m4_defun([_LT_CC_BASENAME],
+[for cc_temp in $1""; do
+ case $cc_temp in
+ compile | *[[\\/]]compile | ccache | *[[\\/]]ccache ) ;;
+ distcc | *[[\\/]]distcc | purify | *[[\\/]]purify ) ;;
+ \-*) ;;
+ *) break;;
+ esac
+done
+cc_basename=`$ECHO "$cc_temp" | $SED "s%.*/%%; s%^$host_alias-%%"`
+])
+
+
+# _LT_FILEUTILS_DEFAULTS
+# ----------------------
+# It is okay to use these file commands and assume they have been set
+# sensibly after `m4_require([_LT_FILEUTILS_DEFAULTS])'.
+m4_defun([_LT_FILEUTILS_DEFAULTS],
+[: ${CP="cp -f"}
+: ${MV="mv -f"}
+: ${RM="rm -f"}
+])# _LT_FILEUTILS_DEFAULTS
+
+
+# _LT_SETUP
+# ---------
+m4_defun([_LT_SETUP],
+[AC_REQUIRE([AC_CANONICAL_HOST])dnl
+AC_REQUIRE([AC_CANONICAL_BUILD])dnl
+AC_REQUIRE([_LT_PREPARE_SED_QUOTE_VARS])dnl
+AC_REQUIRE([_LT_PROG_ECHO_BACKSLASH])dnl
+
+_LT_DECL([], [host_alias], [0], [The host system])dnl
+_LT_DECL([], [host], [0])dnl
+_LT_DECL([], [host_os], [0])dnl
+dnl
+_LT_DECL([], [build_alias], [0], [The build system])dnl
+_LT_DECL([], [build], [0])dnl
+_LT_DECL([], [build_os], [0])dnl
+dnl
+AC_REQUIRE([AC_PROG_CC])dnl
+AC_REQUIRE([LT_PATH_LD])dnl
+AC_REQUIRE([LT_PATH_NM])dnl
+dnl
+AC_REQUIRE([AC_PROG_LN_S])dnl
+test -z "$LN_S" && LN_S="ln -s"
+_LT_DECL([], [LN_S], [1], [Whether we need soft or hard links])dnl
+dnl
+AC_REQUIRE([LT_CMD_MAX_LEN])dnl
+_LT_DECL([objext], [ac_objext], [0], [Object file suffix (normally "o")])dnl
+_LT_DECL([], [exeext], [0], [Executable file suffix (normally "")])dnl
+dnl
+m4_require([_LT_FILEUTILS_DEFAULTS])dnl
+m4_require([_LT_CHECK_SHELL_FEATURES])dnl
+m4_require([_LT_PATH_CONVERSION_FUNCTIONS])dnl
+m4_require([_LT_CMD_RELOAD])dnl
+m4_require([_LT_CHECK_MAGIC_METHOD])dnl
+m4_require([_LT_CHECK_SHAREDLIB_FROM_LINKLIB])dnl
+m4_require([_LT_CMD_OLD_ARCHIVE])dnl
+m4_require([_LT_CMD_GLOBAL_SYMBOLS])dnl
+m4_require([_LT_WITH_SYSROOT])dnl
+
+_LT_CONFIG_LIBTOOL_INIT([
+# See if we are running on zsh, and set the options which allow our
+# commands through without removal of \ escapes INIT.
+if test -n "\${ZSH_VERSION+set}" ; then
+ setopt NO_GLOB_SUBST
+fi
+])
+if test -n "${ZSH_VERSION+set}" ; then
+ setopt NO_GLOB_SUBST
+fi
+
+_LT_CHECK_OBJDIR
+
+m4_require([_LT_TAG_COMPILER])dnl
+
+case $host_os in
+aix3*)
+ # AIX sometimes has problems with the GCC collect2 program. For some
+ # reason, if we set the COLLECT_NAMES environment variable, the problems
+ # vanish in a puff of smoke.
+ if test "X${COLLECT_NAMES+set}" != Xset; then
+ COLLECT_NAMES=
+ export COLLECT_NAMES
+ fi
+ ;;
+esac
+
+# Global variables:
+ofile=libtool
+can_build_shared=yes
+
+# All known linkers require a `.a' archive for static linking (except MSVC,
+# which needs '.lib').
+libext=a
+
+with_gnu_ld="$lt_cv_prog_gnu_ld"
+
+old_CC="$CC"
+old_CFLAGS="$CFLAGS"
+
+# Set sane defaults for various variables
+test -z "$CC" && CC=cc
+test -z "$LTCC" && LTCC=$CC
+test -z "$LTCFLAGS" && LTCFLAGS=$CFLAGS
+test -z "$LD" && LD=ld
+test -z "$ac_objext" && ac_objext=o
+
+_LT_CC_BASENAME([$compiler])
+
+# Only perform the check for file, if the check method requires it
+test -z "$MAGIC_CMD" && MAGIC_CMD=file
+case $deplibs_check_method in
+file_magic*)
+ if test "$file_magic_cmd" = '$MAGIC_CMD'; then
+ _LT_PATH_MAGIC
+ fi
+ ;;
+esac
+
+# Use C for the default configuration in the libtool script
+LT_SUPPORTED_TAG([CC])
+_LT_LANG_C_CONFIG
+_LT_LANG_DEFAULT_CONFIG
+_LT_CONFIG_COMMANDS
+])# _LT_SETUP
+
+
+# _LT_PREPARE_SED_QUOTE_VARS
+# --------------------------
+# Define a few sed substitution that help us do robust quoting.
+m4_defun([_LT_PREPARE_SED_QUOTE_VARS],
+[# Backslashify metacharacters that are still active within
+# double-quoted strings.
+sed_quote_subst='s/\([["`$\\]]\)/\\\1/g'
+
+# Same as above, but do not quote variable references.
+double_quote_subst='s/\([["`\\]]\)/\\\1/g'
+
+# Sed substitution to delay expansion of an escaped shell variable in a
+# double_quote_subst'ed string.
+delay_variable_subst='s/\\\\\\\\\\\$/\\\\\\$/g'
+
+# Sed substitution to delay expansion of an escaped single quote.
+delay_single_quote_subst='s/'\''/'\'\\\\\\\'\''/g'
+
+# Sed substitution to avoid accidental globbing in evaled expressions
+no_glob_subst='s/\*/\\\*/g'
+])
+
+# _LT_PROG_LTMAIN
+# ---------------
+# Note that this code is called both from `configure', and `config.status'
+# now that we use AC_CONFIG_COMMANDS to generate libtool. Notably,
+# `config.status' has no value for ac_aux_dir unless we are using Automake,
+# so we pass a copy along to make sure it has a sensible value anyway.
+m4_defun([_LT_PROG_LTMAIN],
+[m4_ifdef([AC_REQUIRE_AUX_FILE], [AC_REQUIRE_AUX_FILE([ltmain.sh])])dnl
+_LT_CONFIG_LIBTOOL_INIT([ac_aux_dir='$ac_aux_dir'])
+ltmain="$ac_aux_dir/ltmain.sh"
+])# _LT_PROG_LTMAIN
+
+
+## ------------------------------------- ##
+## Accumulate code for creating libtool. ##
+## ------------------------------------- ##
+
+# So that we can recreate a full libtool script including additional
+# tags, we accumulate the chunks of code to send to AC_CONFIG_COMMANDS
+# in macros and then make a single call at the end using the `libtool'
+# label.
+
+
+# _LT_CONFIG_LIBTOOL_INIT([INIT-COMMANDS])
+# ----------------------------------------
+# Register INIT-COMMANDS to be passed to AC_CONFIG_COMMANDS later.
+m4_define([_LT_CONFIG_LIBTOOL_INIT],
+[m4_ifval([$1],
+ [m4_append([_LT_OUTPUT_LIBTOOL_INIT],
+ [$1
+])])])
+
+# Initialize.
+m4_define([_LT_OUTPUT_LIBTOOL_INIT])
+
+
+# _LT_CONFIG_LIBTOOL([COMMANDS])
+# ------------------------------
+# Register COMMANDS to be passed to AC_CONFIG_COMMANDS later.
+m4_define([_LT_CONFIG_LIBTOOL],
+[m4_ifval([$1],
+ [m4_append([_LT_OUTPUT_LIBTOOL_COMMANDS],
+ [$1
+])])])
+
+# Initialize.
+m4_define([_LT_OUTPUT_LIBTOOL_COMMANDS])
+
+
+# _LT_CONFIG_SAVE_COMMANDS([COMMANDS], [INIT_COMMANDS])
+# -----------------------------------------------------
+m4_defun([_LT_CONFIG_SAVE_COMMANDS],
+[_LT_CONFIG_LIBTOOL([$1])
+_LT_CONFIG_LIBTOOL_INIT([$2])
+])
+
+
+# _LT_FORMAT_COMMENT([COMMENT])
+# -----------------------------
+# Add leading comment marks to the start of each line, and a trailing
+# full-stop to the whole comment if one is not present already.
+m4_define([_LT_FORMAT_COMMENT],
+[m4_ifval([$1], [
+m4_bpatsubst([m4_bpatsubst([$1], [^ *], [# ])],
+ [['`$\]], [\\\&])]m4_bmatch([$1], [[!?.]$], [], [.])
+)])
+
+
+
+## ------------------------ ##
+## FIXME: Eliminate VARNAME ##
+## ------------------------ ##
+
+
+# _LT_DECL([CONFIGNAME], VARNAME, VALUE, [DESCRIPTION], [IS-TAGGED?])
+# -------------------------------------------------------------------
+# CONFIGNAME is the name given to the value in the libtool script.
+# VARNAME is the (base) name used in the configure script.
+# VALUE may be 0, 1 or 2 for a computed quote escaped value based on
+# VARNAME. Any other value will be used directly.
+m4_define([_LT_DECL],
+[lt_if_append_uniq([lt_decl_varnames], [$2], [, ],
+ [lt_dict_add_subkey([lt_decl_dict], [$2], [libtool_name],
+ [m4_ifval([$1], [$1], [$2])])
+ lt_dict_add_subkey([lt_decl_dict], [$2], [value], [$3])
+ m4_ifval([$4],
+ [lt_dict_add_subkey([lt_decl_dict], [$2], [description], [$4])])
+ lt_dict_add_subkey([lt_decl_dict], [$2],
+ [tagged?], [m4_ifval([$5], [yes], [no])])])
+])
+
+
+# _LT_TAGDECL([CONFIGNAME], VARNAME, VALUE, [DESCRIPTION])
+# --------------------------------------------------------
+m4_define([_LT_TAGDECL], [_LT_DECL([$1], [$2], [$3], [$4], [yes])])
+
+
+# lt_decl_tag_varnames([SEPARATOR], [VARNAME1...])
+# ------------------------------------------------
+m4_define([lt_decl_tag_varnames],
+[_lt_decl_filter([tagged?], [yes], $@)])
+
+
+# _lt_decl_filter(SUBKEY, VALUE, [SEPARATOR], [VARNAME1..])
+# ---------------------------------------------------------
+m4_define([_lt_decl_filter],
+[m4_case([$#],
+ [0], [m4_fatal([$0: too few arguments: $#])],
+ [1], [m4_fatal([$0: too few arguments: $#: $1])],
+ [2], [lt_dict_filter([lt_decl_dict], [$1], [$2], [], lt_decl_varnames)],
+ [3], [lt_dict_filter([lt_decl_dict], [$1], [$2], [$3], lt_decl_varnames)],
+ [lt_dict_filter([lt_decl_dict], $@)])[]dnl
+])
+
+
+# lt_decl_quote_varnames([SEPARATOR], [VARNAME1...])
+# --------------------------------------------------
+m4_define([lt_decl_quote_varnames],
+[_lt_decl_filter([value], [1], $@)])
+
+
+# lt_decl_dquote_varnames([SEPARATOR], [VARNAME1...])
+# ---------------------------------------------------
+m4_define([lt_decl_dquote_varnames],
+[_lt_decl_filter([value], [2], $@)])
+
+
+# lt_decl_varnames_tagged([SEPARATOR], [VARNAME1...])
+# ---------------------------------------------------
+m4_define([lt_decl_varnames_tagged],
+[m4_assert([$# <= 2])dnl
+_$0(m4_quote(m4_default([$1], [[, ]])),
+ m4_ifval([$2], [[$2]], [m4_dquote(lt_decl_tag_varnames)]),
+ m4_split(m4_normalize(m4_quote(_LT_TAGS)), [ ]))])
+m4_define([_lt_decl_varnames_tagged],
+[m4_ifval([$3], [lt_combine([$1], [$2], [_], $3)])])
+
+
+# lt_decl_all_varnames([SEPARATOR], [VARNAME1...])
+# ------------------------------------------------
+m4_define([lt_decl_all_varnames],
+[_$0(m4_quote(m4_default([$1], [[, ]])),
+ m4_if([$2], [],
+ m4_quote(lt_decl_varnames),
+ m4_quote(m4_shift($@))))[]dnl
+])
+m4_define([_lt_decl_all_varnames],
+[lt_join($@, lt_decl_varnames_tagged([$1],
+ lt_decl_tag_varnames([[, ]], m4_shift($@))))dnl
+])
+
+
+# _LT_CONFIG_STATUS_DECLARE([VARNAME])
+# ------------------------------------
+# Quote a variable value, and forward it to `config.status' so that its
+# declaration there will have the same value as in `configure'. VARNAME
+# must have a single quote delimited value for this to work.
+m4_define([_LT_CONFIG_STATUS_DECLARE],
+[$1='`$ECHO "$][$1" | $SED "$delay_single_quote_subst"`'])
+
+
+# _LT_CONFIG_STATUS_DECLARATIONS
+# ------------------------------
+# We delimit libtool config variables with single quotes, so when
+# we write them to config.status, we have to be sure to quote all
+# embedded single quotes properly. In configure, this macro expands
+# each variable declared with _LT_DECL (and _LT_TAGDECL) into:
+#
+# <var>='`$ECHO "$<var>" | $SED "$delay_single_quote_subst"`'
+m4_defun([_LT_CONFIG_STATUS_DECLARATIONS],
+[m4_foreach([_lt_var], m4_quote(lt_decl_all_varnames),
+ [m4_n([_LT_CONFIG_STATUS_DECLARE(_lt_var)])])])
+
+
+# _LT_LIBTOOL_TAGS
+# ----------------
+# Output comment and list of tags supported by the script
+m4_defun([_LT_LIBTOOL_TAGS],
+[_LT_FORMAT_COMMENT([The names of the tagged configurations supported by this script])dnl
+available_tags="_LT_TAGS"dnl
+])
+
+
+# _LT_LIBTOOL_DECLARE(VARNAME, [TAG])
+# -----------------------------------
+# Extract the dictionary values for VARNAME (optionally with TAG) and
+# expand to a commented shell variable setting:
+#
+# # Some comment about what VAR is for.
+# visible_name=$lt_internal_name
+m4_define([_LT_LIBTOOL_DECLARE],
+[_LT_FORMAT_COMMENT(m4_quote(lt_dict_fetch([lt_decl_dict], [$1],
+ [description])))[]dnl
+m4_pushdef([_libtool_name],
+ m4_quote(lt_dict_fetch([lt_decl_dict], [$1], [libtool_name])))[]dnl
+m4_case(m4_quote(lt_dict_fetch([lt_decl_dict], [$1], [value])),
+ [0], [_libtool_name=[$]$1],
+ [1], [_libtool_name=$lt_[]$1],
+ [2], [_libtool_name=$lt_[]$1],
+ [_libtool_name=lt_dict_fetch([lt_decl_dict], [$1], [value])])[]dnl
+m4_ifval([$2], [_$2])[]m4_popdef([_libtool_name])[]dnl
+])
+
+
+# _LT_LIBTOOL_CONFIG_VARS
+# -----------------------
+# Produce commented declarations of non-tagged libtool config variables
+# suitable for insertion in the LIBTOOL CONFIG section of the `libtool'
+# script. Tagged libtool config variables (even for the LIBTOOL CONFIG
+# section) are produced by _LT_LIBTOOL_TAG_VARS.
+m4_defun([_LT_LIBTOOL_CONFIG_VARS],
+[m4_foreach([_lt_var],
+ m4_quote(_lt_decl_filter([tagged?], [no], [], lt_decl_varnames)),
+ [m4_n([_LT_LIBTOOL_DECLARE(_lt_var)])])])
+
+
+# _LT_LIBTOOL_TAG_VARS(TAG)
+# -------------------------
+m4_define([_LT_LIBTOOL_TAG_VARS],
+[m4_foreach([_lt_var], m4_quote(lt_decl_tag_varnames),
+ [m4_n([_LT_LIBTOOL_DECLARE(_lt_var, [$1])])])])
+
+
+# _LT_TAGVAR(VARNAME, [TAGNAME])
+# ------------------------------
+m4_define([_LT_TAGVAR], [m4_ifval([$2], [$1_$2], [$1])])
+
+
+# _LT_CONFIG_COMMANDS
+# -------------------
+# Send accumulated output to $CONFIG_STATUS. Thanks to the lists of
+# variables for single and double quote escaping we saved from calls
+# to _LT_DECL, we can put quote escaped variables declarations
+# into `config.status', and then the shell code to quote escape them in
+# for loops in `config.status'. Finally, any additional code accumulated
+# from calls to _LT_CONFIG_LIBTOOL_INIT is expanded.
+m4_defun([_LT_CONFIG_COMMANDS],
+[AC_PROVIDE_IFELSE([LT_OUTPUT],
+ dnl If the libtool generation code has been placed in $CONFIG_LT,
+ dnl instead of duplicating it all over again into config.status,
+ dnl then we will have config.status run $CONFIG_LT later, so it
+ dnl needs to know what name is stored there:
+ [AC_CONFIG_COMMANDS([libtool],
+ [$SHELL $CONFIG_LT || AS_EXIT(1)], [CONFIG_LT='$CONFIG_LT'])],
+ dnl If the libtool generation code is destined for config.status,
+ dnl expand the accumulated commands and init code now:
+ [AC_CONFIG_COMMANDS([libtool],
+ [_LT_OUTPUT_LIBTOOL_COMMANDS], [_LT_OUTPUT_LIBTOOL_COMMANDS_INIT])])
+])#_LT_CONFIG_COMMANDS
+
+
+# Initialize.
+m4_define([_LT_OUTPUT_LIBTOOL_COMMANDS_INIT],
+[
+
+# The HP-UX ksh and POSIX shell print the target directory to stdout
+# if CDPATH is set.
+(unset CDPATH) >/dev/null 2>&1 && unset CDPATH
+
+sed_quote_subst='$sed_quote_subst'
+double_quote_subst='$double_quote_subst'
+delay_variable_subst='$delay_variable_subst'
+_LT_CONFIG_STATUS_DECLARATIONS
+LTCC='$LTCC'
+LTCFLAGS='$LTCFLAGS'
+compiler='$compiler_DEFAULT'
+
+# A function that is used when there is no print builtin or printf.
+func_fallback_echo ()
+{
+ eval 'cat <<_LTECHO_EOF
+\$[]1
+_LTECHO_EOF'
+}
+
+# Quote evaled strings.
+for var in lt_decl_all_varnames([[ \
+]], lt_decl_quote_varnames); do
+ case \`eval \\\\\$ECHO \\\\""\\\\\$\$var"\\\\"\` in
+ *[[\\\\\\\`\\"\\\$]]*)
+ eval "lt_\$var=\\\\\\"\\\`\\\$ECHO \\"\\\$\$var\\" | \\\$SED \\"\\\$sed_quote_subst\\"\\\`\\\\\\""
+ ;;
+ *)
+ eval "lt_\$var=\\\\\\"\\\$\$var\\\\\\""
+ ;;
+ esac
+done
+
+# Double-quote double-evaled strings.
+for var in lt_decl_all_varnames([[ \
+]], lt_decl_dquote_varnames); do
+ case \`eval \\\\\$ECHO \\\\""\\\\\$\$var"\\\\"\` in
+ *[[\\\\\\\`\\"\\\$]]*)
+ eval "lt_\$var=\\\\\\"\\\`\\\$ECHO \\"\\\$\$var\\" | \\\$SED -e \\"\\\$double_quote_subst\\" -e \\"\\\$sed_quote_subst\\" -e \\"\\\$delay_variable_subst\\"\\\`\\\\\\""
+ ;;
+ *)
+ eval "lt_\$var=\\\\\\"\\\$\$var\\\\\\""
+ ;;
+ esac
+done
+
+_LT_OUTPUT_LIBTOOL_INIT
+])
+
+# _LT_GENERATED_FILE_INIT(FILE, [COMMENT])
+# ------------------------------------
+# Generate a child script FILE with all initialization necessary to
+# reuse the environment learned by the parent script, and make the
+# file executable. If COMMENT is supplied, it is inserted after the
+# `#!' sequence but before initialization text begins. After this
+# macro, additional text can be appended to FILE to form the body of
+# the child script. The macro ends with non-zero status if the
+# file could not be fully written (such as if the disk is full).
+m4_ifdef([AS_INIT_GENERATED],
+[m4_defun([_LT_GENERATED_FILE_INIT],[AS_INIT_GENERATED($@)])],
+[m4_defun([_LT_GENERATED_FILE_INIT],
+[m4_require([AS_PREPARE])]dnl
+[m4_pushdef([AS_MESSAGE_LOG_FD])]dnl
+[lt_write_fail=0
+cat >$1 <<_ASEOF || lt_write_fail=1
+#! $SHELL
+# Generated by $as_me.
+$2
+SHELL=\${CONFIG_SHELL-$SHELL}
+export SHELL
+_ASEOF
+cat >>$1 <<\_ASEOF || lt_write_fail=1
+AS_SHELL_SANITIZE
+_AS_PREPARE
+exec AS_MESSAGE_FD>&1
+_ASEOF
+test $lt_write_fail = 0 && chmod +x $1[]dnl
+m4_popdef([AS_MESSAGE_LOG_FD])])])# _LT_GENERATED_FILE_INIT
+
+# LT_OUTPUT
+# ---------
+# This macro allows early generation of the libtool script (before
+# AC_OUTPUT is called), incase it is used in configure for compilation
+# tests.
+AC_DEFUN([LT_OUTPUT],
+[: ${CONFIG_LT=./config.lt}
+AC_MSG_NOTICE([creating $CONFIG_LT])
+_LT_GENERATED_FILE_INIT(["$CONFIG_LT"],
+[# Run this file to recreate a libtool stub with the current configuration.])
+
+cat >>"$CONFIG_LT" <<\_LTEOF
+lt_cl_silent=false
+exec AS_MESSAGE_LOG_FD>>config.log
+{
+ echo
+ AS_BOX([Running $as_me.])
+} >&AS_MESSAGE_LOG_FD
+
+lt_cl_help="\
+\`$as_me' creates a local libtool stub from the current configuration,
+for use in further configure time tests before the real libtool is
+generated.
+
+Usage: $[0] [[OPTIONS]]
+
+ -h, --help print this help, then exit
+ -V, --version print version number, then exit
+ -q, --quiet do not print progress messages
+ -d, --debug don't remove temporary files
+
+Report bugs to <bug-libtool@gnu.org>."
+
+lt_cl_version="\
+m4_ifset([AC_PACKAGE_NAME], [AC_PACKAGE_NAME ])config.lt[]dnl
+m4_ifset([AC_PACKAGE_VERSION], [ AC_PACKAGE_VERSION])
+configured by $[0], generated by m4_PACKAGE_STRING.
+
+Copyright (C) 2010 Free Software Foundation, Inc.
+This config.lt script is free software; the Free Software Foundation
+gives unlimited permision to copy, distribute and modify it."
+
+while test $[#] != 0
+do
+ case $[1] in
+ --version | --v* | -V )
+ echo "$lt_cl_version"; exit 0 ;;
+ --help | --h* | -h )
+ echo "$lt_cl_help"; exit 0 ;;
+ --debug | --d* | -d )
+ debug=: ;;
+ --quiet | --q* | --silent | --s* | -q )
+ lt_cl_silent=: ;;
+
+ -*) AC_MSG_ERROR([unrecognized option: $[1]
+Try \`$[0] --help' for more information.]) ;;
+
+ *) AC_MSG_ERROR([unrecognized argument: $[1]
+Try \`$[0] --help' for more information.]) ;;
+ esac
+ shift
+done
+
+if $lt_cl_silent; then
+ exec AS_MESSAGE_FD>/dev/null
+fi
+_LTEOF
+
+cat >>"$CONFIG_LT" <<_LTEOF
+_LT_OUTPUT_LIBTOOL_COMMANDS_INIT
+_LTEOF
+
+cat >>"$CONFIG_LT" <<\_LTEOF
+AC_MSG_NOTICE([creating $ofile])
+_LT_OUTPUT_LIBTOOL_COMMANDS
+AS_EXIT(0)
+_LTEOF
+chmod +x "$CONFIG_LT"
+
+# configure is writing to config.log, but config.lt does its own redirection,
+# appending to config.log, which fails on DOS, as config.log is still kept
+# open by configure. Here we exec the FD to /dev/null, effectively closing
+# config.log, so it can be properly (re)opened and appended to by config.lt.
+lt_cl_success=:
+test "$silent" = yes &&
+ lt_config_lt_args="$lt_config_lt_args --quiet"
+exec AS_MESSAGE_LOG_FD>/dev/null
+$SHELL "$CONFIG_LT" $lt_config_lt_args || lt_cl_success=false
+exec AS_MESSAGE_LOG_FD>>config.log
+$lt_cl_success || AS_EXIT(1)
+])# LT_OUTPUT
+
+
+# _LT_CONFIG(TAG)
+# ---------------
+# If TAG is the built-in tag, create an initial libtool script with a
+# default configuration from the untagged config vars. Otherwise add code
+# to config.status for appending the configuration named by TAG from the
+# matching tagged config vars.
+m4_defun([_LT_CONFIG],
+[m4_require([_LT_FILEUTILS_DEFAULTS])dnl
+_LT_CONFIG_SAVE_COMMANDS([
+ m4_define([_LT_TAG], m4_if([$1], [], [C], [$1]))dnl
+ m4_if(_LT_TAG, [C], [
+ # See if we are running on zsh, and set the options which allow our
+ # commands through without removal of \ escapes.
+ if test -n "${ZSH_VERSION+set}" ; then
+ setopt NO_GLOB_SUBST
+ fi
+
+ cfgfile="${ofile}T"
+ trap "$RM \"$cfgfile\"; exit 1" 1 2 15
+ $RM "$cfgfile"
+
+ cat <<_LT_EOF >> "$cfgfile"
+#! $SHELL
+
+# `$ECHO "$ofile" | sed 's%^.*/%%'` - Provide generalized library-building support services.
+# Generated automatically by $as_me ($PACKAGE$TIMESTAMP) $VERSION
+# Libtool was configured on host `(hostname || uname -n) 2>/dev/null | sed 1q`:
+# NOTE: Changes made to this file will be lost: look at ltmain.sh.
+#
+_LT_COPYING
+_LT_LIBTOOL_TAGS
+
+# ### BEGIN LIBTOOL CONFIG
+_LT_LIBTOOL_CONFIG_VARS
+_LT_LIBTOOL_TAG_VARS
+# ### END LIBTOOL CONFIG
+
+_LT_EOF
+
+ case $host_os in
+ aix3*)
+ cat <<\_LT_EOF >> "$cfgfile"
+# AIX sometimes has problems with the GCC collect2 program. For some
+# reason, if we set the COLLECT_NAMES environment variable, the problems
+# vanish in a puff of smoke.
+if test "X${COLLECT_NAMES+set}" != Xset; then
+ COLLECT_NAMES=
+ export COLLECT_NAMES
+fi
+_LT_EOF
+ ;;
+ esac
+
+ _LT_PROG_LTMAIN
+
+ # We use sed instead of cat because bash on DJGPP gets confused if
+ # if finds mixed CR/LF and LF-only lines. Since sed operates in
+ # text mode, it properly converts lines to CR/LF. This bash problem
+ # is reportedly fixed, but why not run on old versions too?
+ sed '$q' "$ltmain" >> "$cfgfile" \
+ || (rm -f "$cfgfile"; exit 1)
+
+ _LT_PROG_REPLACE_SHELLFNS
+
+ mv -f "$cfgfile" "$ofile" ||
+ (rm -f "$ofile" && cp "$cfgfile" "$ofile" && rm -f "$cfgfile")
+ chmod +x "$ofile"
+],
+[cat <<_LT_EOF >> "$ofile"
+
+dnl Unfortunately we have to use $1 here, since _LT_TAG is not expanded
+dnl in a comment (ie after a #).
+# ### BEGIN LIBTOOL TAG CONFIG: $1
+_LT_LIBTOOL_TAG_VARS(_LT_TAG)
+# ### END LIBTOOL TAG CONFIG: $1
+_LT_EOF
+])dnl /m4_if
+],
+[m4_if([$1], [], [
+ PACKAGE='$PACKAGE'
+ VERSION='$VERSION'
+ TIMESTAMP='$TIMESTAMP'
+ RM='$RM'
+ ofile='$ofile'], [])
+])dnl /_LT_CONFIG_SAVE_COMMANDS
+])# _LT_CONFIG
+
+
+# LT_SUPPORTED_TAG(TAG)
+# ---------------------
+# Trace this macro to discover what tags are supported by the libtool
+# --tag option, using:
+# autoconf --trace 'LT_SUPPORTED_TAG:$1'
+AC_DEFUN([LT_SUPPORTED_TAG], [])
+
+
+# C support is built-in for now
+m4_define([_LT_LANG_C_enabled], [])
+m4_define([_LT_TAGS], [])
+
+
+# LT_LANG(LANG)
+# -------------
+# Enable libtool support for the given language if not already enabled.
+AC_DEFUN([LT_LANG],
+[AC_BEFORE([$0], [LT_OUTPUT])dnl
+m4_case([$1],
+ [C], [_LT_LANG(C)],
+ [C++], [_LT_LANG(CXX)],
+ [Java], [_LT_LANG(GCJ)],
+ [Fortran 77], [_LT_LANG(F77)],
+ [Fortran], [_LT_LANG(FC)],
+ [Windows Resource], [_LT_LANG(RC)],
+ [m4_ifdef([_LT_LANG_]$1[_CONFIG],
+ [_LT_LANG($1)],
+ [m4_fatal([$0: unsupported language: "$1"])])])dnl
+])# LT_LANG
+
+
+# _LT_LANG(LANGNAME)
+# ------------------
+m4_defun([_LT_LANG],
+[m4_ifdef([_LT_LANG_]$1[_enabled], [],
+ [LT_SUPPORTED_TAG([$1])dnl
+ m4_append([_LT_TAGS], [$1 ])dnl
+ m4_define([_LT_LANG_]$1[_enabled], [])dnl
+ _LT_LANG_$1_CONFIG($1)])dnl
+])# _LT_LANG
+
+
+# _LT_LANG_DEFAULT_CONFIG
+# -----------------------
+m4_defun([_LT_LANG_DEFAULT_CONFIG],
+[AC_PROVIDE_IFELSE([AC_PROG_CXX],
+ [LT_LANG(CXX)],
+ [m4_define([AC_PROG_CXX], defn([AC_PROG_CXX])[LT_LANG(CXX)])])
+
+AC_PROVIDE_IFELSE([AC_PROG_F77],
+ [LT_LANG(F77)],
+ [m4_define([AC_PROG_F77], defn([AC_PROG_F77])[LT_LANG(F77)])])
+
+AC_PROVIDE_IFELSE([AC_PROG_FC],
+ [LT_LANG(FC)],
+ [m4_define([AC_PROG_FC], defn([AC_PROG_FC])[LT_LANG(FC)])])
+
+dnl The call to [A][M_PROG_GCJ] is quoted like that to stop aclocal
+dnl pulling things in needlessly.
+AC_PROVIDE_IFELSE([AC_PROG_GCJ],
+ [LT_LANG(GCJ)],
+ [AC_PROVIDE_IFELSE([A][M_PROG_GCJ],
+ [LT_LANG(GCJ)],
+ [AC_PROVIDE_IFELSE([LT_PROG_GCJ],
+ [LT_LANG(GCJ)],
+ [m4_ifdef([AC_PROG_GCJ],
+ [m4_define([AC_PROG_GCJ], defn([AC_PROG_GCJ])[LT_LANG(GCJ)])])
+ m4_ifdef([A][M_PROG_GCJ],
+ [m4_define([A][M_PROG_GCJ], defn([A][M_PROG_GCJ])[LT_LANG(GCJ)])])
+ m4_ifdef([LT_PROG_GCJ],
+ [m4_define([LT_PROG_GCJ], defn([LT_PROG_GCJ])[LT_LANG(GCJ)])])])])])
+
+AC_PROVIDE_IFELSE([LT_PROG_RC],
+ [LT_LANG(RC)],
+ [m4_define([LT_PROG_RC], defn([LT_PROG_RC])[LT_LANG(RC)])])
+])# _LT_LANG_DEFAULT_CONFIG
+
+# Obsolete macros:
+AU_DEFUN([AC_LIBTOOL_CXX], [LT_LANG(C++)])
+AU_DEFUN([AC_LIBTOOL_F77], [LT_LANG(Fortran 77)])
+AU_DEFUN([AC_LIBTOOL_FC], [LT_LANG(Fortran)])
+AU_DEFUN([AC_LIBTOOL_GCJ], [LT_LANG(Java)])
+AU_DEFUN([AC_LIBTOOL_RC], [LT_LANG(Windows Resource)])
+dnl aclocal-1.4 backwards compatibility:
+dnl AC_DEFUN([AC_LIBTOOL_CXX], [])
+dnl AC_DEFUN([AC_LIBTOOL_F77], [])
+dnl AC_DEFUN([AC_LIBTOOL_FC], [])
+dnl AC_DEFUN([AC_LIBTOOL_GCJ], [])
+dnl AC_DEFUN([AC_LIBTOOL_RC], [])
+
+
+# _LT_TAG_COMPILER
+# ----------------
+m4_defun([_LT_TAG_COMPILER],
+[AC_REQUIRE([AC_PROG_CC])dnl
+
+_LT_DECL([LTCC], [CC], [1], [A C compiler])dnl
+_LT_DECL([LTCFLAGS], [CFLAGS], [1], [LTCC compiler flags])dnl
+_LT_TAGDECL([CC], [compiler], [1], [A language specific compiler])dnl
+_LT_TAGDECL([with_gcc], [GCC], [0], [Is the compiler the GNU compiler?])dnl
+
+# If no C compiler was specified, use CC.
+LTCC=${LTCC-"$CC"}
+
+# If no C compiler flags were specified, use CFLAGS.
+LTCFLAGS=${LTCFLAGS-"$CFLAGS"}
+
+# Allow CC to be a program name with arguments.
+compiler=$CC
+])# _LT_TAG_COMPILER
+
+
+# _LT_COMPILER_BOILERPLATE
+# ------------------------
+# Check for compiler boilerplate output or warnings with
+# the simple compiler test code.
+m4_defun([_LT_COMPILER_BOILERPLATE],
+[m4_require([_LT_DECL_SED])dnl
+ac_outfile=conftest.$ac_objext
+echo "$lt_simple_compile_test_code" >conftest.$ac_ext
+eval "$ac_compile" 2>&1 >/dev/null | $SED '/^$/d; /^ *+/d' >conftest.err
+_lt_compiler_boilerplate=`cat conftest.err`
+$RM conftest*
+])# _LT_COMPILER_BOILERPLATE
+
+
+# _LT_LINKER_BOILERPLATE
+# ----------------------
+# Check for linker boilerplate output or warnings with
+# the simple link test code.
+m4_defun([_LT_LINKER_BOILERPLATE],
+[m4_require([_LT_DECL_SED])dnl
+ac_outfile=conftest.$ac_objext
+echo "$lt_simple_link_test_code" >conftest.$ac_ext
+eval "$ac_link" 2>&1 >/dev/null | $SED '/^$/d; /^ *+/d' >conftest.err
+_lt_linker_boilerplate=`cat conftest.err`
+$RM -r conftest*
+])# _LT_LINKER_BOILERPLATE
+
+# _LT_REQUIRED_DARWIN_CHECKS
+# -------------------------
+m4_defun_once([_LT_REQUIRED_DARWIN_CHECKS],[
+ case $host_os in
+ rhapsody* | darwin*)
+ AC_CHECK_TOOL([DSYMUTIL], [dsymutil], [:])
+ AC_CHECK_TOOL([NMEDIT], [nmedit], [:])
+ AC_CHECK_TOOL([LIPO], [lipo], [:])
+ AC_CHECK_TOOL([OTOOL], [otool], [:])
+ AC_CHECK_TOOL([OTOOL64], [otool64], [:])
+ _LT_DECL([], [DSYMUTIL], [1],
+ [Tool to manipulate archived DWARF debug symbol files on Mac OS X])
+ _LT_DECL([], [NMEDIT], [1],
+ [Tool to change global to local symbols on Mac OS X])
+ _LT_DECL([], [LIPO], [1],
+ [Tool to manipulate fat objects and archives on Mac OS X])
+ _LT_DECL([], [OTOOL], [1],
+ [ldd/readelf like tool for Mach-O binaries on Mac OS X])
+ _LT_DECL([], [OTOOL64], [1],
+ [ldd/readelf like tool for 64 bit Mach-O binaries on Mac OS X 10.4])
+
+ AC_CACHE_CHECK([for -single_module linker flag],[lt_cv_apple_cc_single_mod],
+ [lt_cv_apple_cc_single_mod=no
+ if test -z "${LT_MULTI_MODULE}"; then
+ # By default we will add the -single_module flag. You can override
+ # by either setting the environment variable LT_MULTI_MODULE
+ # non-empty at configure time, or by adding -multi_module to the
+ # link flags.
+ rm -rf libconftest.dylib*
+ echo "int foo(void){return 1;}" > conftest.c
+ echo "$LTCC $LTCFLAGS $LDFLAGS -o libconftest.dylib \
+-dynamiclib -Wl,-single_module conftest.c" >&AS_MESSAGE_LOG_FD
+ $LTCC $LTCFLAGS $LDFLAGS -o libconftest.dylib \
+ -dynamiclib -Wl,-single_module conftest.c 2>conftest.err
+ _lt_result=$?
+ if test -f libconftest.dylib && test ! -s conftest.err && test $_lt_result = 0; then
+ lt_cv_apple_cc_single_mod=yes
+ else
+ cat conftest.err >&AS_MESSAGE_LOG_FD
+ fi
+ rm -rf libconftest.dylib*
+ rm -f conftest.*
+ fi])
+ AC_CACHE_CHECK([for -exported_symbols_list linker flag],
+ [lt_cv_ld_exported_symbols_list],
+ [lt_cv_ld_exported_symbols_list=no
+ save_LDFLAGS=$LDFLAGS
+ echo "_main" > conftest.sym
+ LDFLAGS="$LDFLAGS -Wl,-exported_symbols_list,conftest.sym"
+ AC_LINK_IFELSE([AC_LANG_PROGRAM([],[])],
+ [lt_cv_ld_exported_symbols_list=yes],
+ [lt_cv_ld_exported_symbols_list=no])
+ LDFLAGS="$save_LDFLAGS"
+ ])
+ AC_CACHE_CHECK([for -force_load linker flag],[lt_cv_ld_force_load],
+ [lt_cv_ld_force_load=no
+ cat > conftest.c << _LT_EOF
+int forced_loaded() { return 2;}
+_LT_EOF
+ echo "$LTCC $LTCFLAGS -c -o conftest.o conftest.c" >&AS_MESSAGE_LOG_FD
+ $LTCC $LTCFLAGS -c -o conftest.o conftest.c 2>&AS_MESSAGE_LOG_FD
+ echo "$AR cru libconftest.a conftest.o" >&AS_MESSAGE_LOG_FD
+ $AR cru libconftest.a conftest.o 2>&AS_MESSAGE_LOG_FD
+ echo "$RANLIB libconftest.a" >&AS_MESSAGE_LOG_FD
+ $RANLIB libconftest.a 2>&AS_MESSAGE_LOG_FD
+ cat > conftest.c << _LT_EOF
+int main() { return 0;}
+_LT_EOF
+ echo "$LTCC $LTCFLAGS $LDFLAGS -o conftest conftest.c -Wl,-force_load,./libconftest.a" >&AS_MESSAGE_LOG_FD
+ $LTCC $LTCFLAGS $LDFLAGS -o conftest conftest.c -Wl,-force_load,./libconftest.a 2>conftest.err
+ _lt_result=$?
+ if test -f conftest && test ! -s conftest.err && test $_lt_result = 0 && $GREP forced_load conftest 2>&1 >/dev/null; then
+ lt_cv_ld_force_load=yes
+ else
+ cat conftest.err >&AS_MESSAGE_LOG_FD
+ fi
+ rm -f conftest.err libconftest.a conftest conftest.c
+ rm -rf conftest.dSYM
+ ])
+ case $host_os in
+ rhapsody* | darwin1.[[012]])
+ _lt_dar_allow_undefined='${wl}-undefined ${wl}suppress' ;;
+ darwin1.*)
+ _lt_dar_allow_undefined='${wl}-flat_namespace ${wl}-undefined ${wl}suppress' ;;
+ darwin*) # darwin 5.x on
+ # if running on 10.5 or later, the deployment target defaults
+ # to the OS version, if on x86, and 10.4, the deployment
+ # target defaults to 10.4. Don't you love it?
+ case ${MACOSX_DEPLOYMENT_TARGET-10.0},$host in
+ 10.0,*86*-darwin8*|10.0,*-darwin[[91]]*)
+ _lt_dar_allow_undefined='${wl}-undefined ${wl}dynamic_lookup' ;;
+ 10.[[012]]*)
+ _lt_dar_allow_undefined='${wl}-flat_namespace ${wl}-undefined ${wl}suppress' ;;
+ 10.*)
+ _lt_dar_allow_undefined='${wl}-undefined ${wl}dynamic_lookup' ;;
+ esac
+ ;;
+ esac
+ if test "$lt_cv_apple_cc_single_mod" = "yes"; then
+ _lt_dar_single_mod='$single_module'
+ fi
+ if test "$lt_cv_ld_exported_symbols_list" = "yes"; then
+ _lt_dar_export_syms=' ${wl}-exported_symbols_list,$output_objdir/${libname}-symbols.expsym'
+ else
+ _lt_dar_export_syms='~$NMEDIT -s $output_objdir/${libname}-symbols.expsym ${lib}'
+ fi
+ if test "$DSYMUTIL" != ":" && test "$lt_cv_ld_force_load" = "no"; then
+ _lt_dsymutil='~$DSYMUTIL $lib || :'
+ else
+ _lt_dsymutil=
+ fi
+ ;;
+ esac
+])
+
+
+# _LT_DARWIN_LINKER_FEATURES
+# --------------------------
+# Checks for linker and compiler features on darwin
+m4_defun([_LT_DARWIN_LINKER_FEATURES],
+[
+ m4_require([_LT_REQUIRED_DARWIN_CHECKS])
+ _LT_TAGVAR(archive_cmds_need_lc, $1)=no
+ _LT_TAGVAR(hardcode_direct, $1)=no
+ _LT_TAGVAR(hardcode_automatic, $1)=yes
+ _LT_TAGVAR(hardcode_shlibpath_var, $1)=unsupported
+ if test "$lt_cv_ld_force_load" = "yes"; then
+ _LT_TAGVAR(whole_archive_flag_spec, $1)='`for conv in $convenience\"\"; do test -n \"$conv\" && new_convenience=\"$new_convenience ${wl}-force_load,$conv\"; done; func_echo_all \"$new_convenience\"`'
+ else
+ _LT_TAGVAR(whole_archive_flag_spec, $1)=''
+ fi
+ _LT_TAGVAR(link_all_deplibs, $1)=yes
+ _LT_TAGVAR(allow_undefined_flag, $1)="$_lt_dar_allow_undefined"
+ case $cc_basename in
+ ifort*) _lt_dar_can_shared=yes ;;
+ *) _lt_dar_can_shared=$GCC ;;
+ esac
+ if test "$_lt_dar_can_shared" = "yes"; then
+ output_verbose_link_cmd=func_echo_all
+ _LT_TAGVAR(archive_cmds, $1)="\$CC -dynamiclib \$allow_undefined_flag -o \$lib \$libobjs \$deplibs \$compiler_flags -install_name \$rpath/\$soname \$verstring $_lt_dar_single_mod${_lt_dsymutil}"
+ _LT_TAGVAR(module_cmds, $1)="\$CC \$allow_undefined_flag -o \$lib -bundle \$libobjs \$deplibs \$compiler_flags${_lt_dsymutil}"
+ _LT_TAGVAR(archive_expsym_cmds, $1)="sed 's,^,_,' < \$export_symbols > \$output_objdir/\${libname}-symbols.expsym~\$CC -dynamiclib \$allow_undefined_flag -o \$lib \$libobjs \$deplibs \$compiler_flags -install_name \$rpath/\$soname \$verstring ${_lt_dar_single_mod}${_lt_dar_export_syms}${_lt_dsymutil}"
+ _LT_TAGVAR(module_expsym_cmds, $1)="sed -e 's,^,_,' < \$export_symbols > \$output_objdir/\${libname}-symbols.expsym~\$CC \$allow_undefined_flag -o \$lib -bundle \$libobjs \$deplibs \$compiler_flags${_lt_dar_export_syms}${_lt_dsymutil}"
+ m4_if([$1], [CXX],
+[ if test "$lt_cv_apple_cc_single_mod" != "yes"; then
+ _LT_TAGVAR(archive_cmds, $1)="\$CC -r -keep_private_externs -nostdlib -o \${lib}-master.o \$libobjs~\$CC -dynamiclib \$allow_undefined_flag -o \$lib \${lib}-master.o \$deplibs \$compiler_flags -install_name \$rpath/\$soname \$verstring${_lt_dsymutil}"
+ _LT_TAGVAR(archive_expsym_cmds, $1)="sed 's,^,_,' < \$export_symbols > \$output_objdir/\${libname}-symbols.expsym~\$CC -r -keep_private_externs -nostdlib -o \${lib}-master.o \$libobjs~\$CC -dynamiclib \$allow_undefined_flag -o \$lib \${lib}-master.o \$deplibs \$compiler_flags -install_name \$rpath/\$soname \$verstring${_lt_dar_export_syms}${_lt_dsymutil}"
+ fi
+],[])
+ else
+ _LT_TAGVAR(ld_shlibs, $1)=no
+ fi
+])
+
+# _LT_SYS_MODULE_PATH_AIX([TAGNAME])
+# ----------------------------------
+# Links a minimal program and checks the executable
+# for the system default hardcoded library path. In most cases,
+# this is /usr/lib:/lib, but when the MPI compilers are used
+# the location of the communication and MPI libs are included too.
+# If we don't find anything, use the default library path according
+# to the aix ld manual.
+# Store the results from the different compilers for each TAGNAME.
+# Allow to override them for all tags through lt_cv_aix_libpath.
+m4_defun([_LT_SYS_MODULE_PATH_AIX],
+[m4_require([_LT_DECL_SED])dnl
+if test "${lt_cv_aix_libpath+set}" = set; then
+ aix_libpath=$lt_cv_aix_libpath
+else
+ AC_CACHE_VAL([_LT_TAGVAR([lt_cv_aix_libpath_], [$1])],
+ [AC_LINK_IFELSE([AC_LANG_PROGRAM],[
+ lt_aix_libpath_sed='[
+ /Import File Strings/,/^$/ {
+ /^0/ {
+ s/^0 *\([^ ]*\) *$/\1/
+ p
+ }
+ }]'
+ _LT_TAGVAR([lt_cv_aix_libpath_], [$1])=`dump -H conftest$ac_exeext 2>/dev/null | $SED -n -e "$lt_aix_libpath_sed"`
+ # Check for a 64-bit object if we didn't find anything.
+ if test -z "$_LT_TAGVAR([lt_cv_aix_libpath_], [$1])"; then
+ _LT_TAGVAR([lt_cv_aix_libpath_], [$1])=`dump -HX64 conftest$ac_exeext 2>/dev/null | $SED -n -e "$lt_aix_libpath_sed"`
+ fi],[])
+ if test -z "$_LT_TAGVAR([lt_cv_aix_libpath_], [$1])"; then
+ _LT_TAGVAR([lt_cv_aix_libpath_], [$1])="/usr/lib:/lib"
+ fi
+ ])
+ aix_libpath=$_LT_TAGVAR([lt_cv_aix_libpath_], [$1])
+fi
+])# _LT_SYS_MODULE_PATH_AIX
+
+
+# _LT_SHELL_INIT(ARG)
+# -------------------
+m4_define([_LT_SHELL_INIT],
+[m4_divert_text([M4SH-INIT], [$1
+])])# _LT_SHELL_INIT
+
+
+
+# _LT_PROG_ECHO_BACKSLASH
+# -----------------------
+# Find how we can fake an echo command that does not interpret backslash.
+# In particular, with Autoconf 2.60 or later we add some code to the start
+# of the generated configure script which will find a shell with a builtin
+# printf (which we can use as an echo command).
+m4_defun([_LT_PROG_ECHO_BACKSLASH],
+[ECHO='\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\'
+ECHO=$ECHO$ECHO$ECHO$ECHO$ECHO
+ECHO=$ECHO$ECHO$ECHO$ECHO$ECHO$ECHO
+
+AC_MSG_CHECKING([how to print strings])
+# Test print first, because it will be a builtin if present.
+if test "X`( print -r -- -n ) 2>/dev/null`" = X-n && \
+ test "X`print -r -- $ECHO 2>/dev/null`" = "X$ECHO"; then
+ ECHO='print -r --'
+elif test "X`printf %s $ECHO 2>/dev/null`" = "X$ECHO"; then
+ ECHO='printf %s\n'
+else
+ # Use this function as a fallback that always works.
+ func_fallback_echo ()
+ {
+ eval 'cat <<_LTECHO_EOF
+$[]1
+_LTECHO_EOF'
+ }
+ ECHO='func_fallback_echo'
+fi
+
+# func_echo_all arg...
+# Invoke $ECHO with all args, space-separated.
+func_echo_all ()
+{
+ $ECHO "$*"
+}
+
+case "$ECHO" in
+ printf*) AC_MSG_RESULT([printf]) ;;
+ print*) AC_MSG_RESULT([print -r]) ;;
+ *) AC_MSG_RESULT([cat]) ;;
+esac
+
+m4_ifdef([_AS_DETECT_SUGGESTED],
+[_AS_DETECT_SUGGESTED([
+ test -n "${ZSH_VERSION+set}${BASH_VERSION+set}" || (
+ ECHO='\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\'
+ ECHO=$ECHO$ECHO$ECHO$ECHO$ECHO
+ ECHO=$ECHO$ECHO$ECHO$ECHO$ECHO$ECHO
+ PATH=/empty FPATH=/empty; export PATH FPATH
+ test "X`printf %s $ECHO`" = "X$ECHO" \
+ || test "X`print -r -- $ECHO`" = "X$ECHO" )])])
+
+_LT_DECL([], [SHELL], [1], [Shell to use when invoking shell scripts])
+_LT_DECL([], [ECHO], [1], [An echo program that protects backslashes])
+])# _LT_PROG_ECHO_BACKSLASH
+
+
+# _LT_WITH_SYSROOT
+# ----------------
+AC_DEFUN([_LT_WITH_SYSROOT],
+[AC_MSG_CHECKING([for sysroot])
+AC_ARG_WITH([sysroot],
+[ --with-sysroot[=DIR] Search for dependent libraries within DIR
+ (or the compiler's sysroot if not specified).],
+[], [with_sysroot=no])
+
+dnl lt_sysroot will always be passed unquoted. We quote it here
+dnl in case the user passed a directory name.
+lt_sysroot=
+case ${with_sysroot} in #(
+ yes)
+ if test "$GCC" = yes; then
+ lt_sysroot=`$CC --print-sysroot 2>/dev/null`
+ fi
+ ;; #(
+ /*)
+ lt_sysroot=`echo "$with_sysroot" | sed -e "$sed_quote_subst"`
+ ;; #(
+ no|'')
+ ;; #(
+ *)
+ AC_MSG_RESULT([${with_sysroot}])
+ AC_MSG_ERROR([The sysroot must be an absolute path.])
+ ;;
+esac
+
+ AC_MSG_RESULT([${lt_sysroot:-no}])
+_LT_DECL([], [lt_sysroot], [0], [The root where to search for ]dnl
+[dependent libraries, and in which our libraries should be installed.])])
+
+# _LT_ENABLE_LOCK
+# ---------------
+m4_defun([_LT_ENABLE_LOCK],
+[AC_ARG_ENABLE([libtool-lock],
+ [AS_HELP_STRING([--disable-libtool-lock],
+ [avoid locking (might break parallel builds)])])
+test "x$enable_libtool_lock" != xno && enable_libtool_lock=yes
+
+# Some flags need to be propagated to the compiler or linker for good
+# libtool support.
+case $host in
+ia64-*-hpux*)
+ # Find out which ABI we are using.
+ echo 'int i;' > conftest.$ac_ext
+ if AC_TRY_EVAL(ac_compile); then
+ case `/usr/bin/file conftest.$ac_objext` in
+ *ELF-32*)
+ HPUX_IA64_MODE="32"
+ ;;
+ *ELF-64*)
+ HPUX_IA64_MODE="64"
+ ;;
+ esac
+ fi
+ rm -rf conftest*
+ ;;
+*-*-irix6*)
+ # Find out which ABI we are using.
+ echo '[#]line '$LINENO' "configure"' > conftest.$ac_ext
+ if AC_TRY_EVAL(ac_compile); then
+ if test "$lt_cv_prog_gnu_ld" = yes; then
+ case `/usr/bin/file conftest.$ac_objext` in
+ *32-bit*)
+ LD="${LD-ld} -melf32bsmip"
+ ;;
+ *N32*)
+ LD="${LD-ld} -melf32bmipn32"
+ ;;
+ *64-bit*)
+ LD="${LD-ld} -melf64bmip"
+ ;;
+ esac
+ else
+ case `/usr/bin/file conftest.$ac_objext` in
+ *32-bit*)
+ LD="${LD-ld} -32"
+ ;;
+ *N32*)
+ LD="${LD-ld} -n32"
+ ;;
+ *64-bit*)
+ LD="${LD-ld} -64"
+ ;;
+ esac
+ fi
+ fi
+ rm -rf conftest*
+ ;;
+
+x86_64-*kfreebsd*-gnu|x86_64-*linux*|ppc*-*linux*|powerpc*-*linux*| \
+s390*-*linux*|s390*-*tpf*|sparc*-*linux*)
+ # Find out which ABI we are using.
+ echo 'int i;' > conftest.$ac_ext
+ if AC_TRY_EVAL(ac_compile); then
+ case `/usr/bin/file conftest.o` in
+ *32-bit*)
+ case $host in
+ x86_64-*kfreebsd*-gnu)
+ LD="${LD-ld} -m elf_i386_fbsd"
+ ;;
+ x86_64-*linux*)
+ LD="${LD-ld} -m elf_i386"
+ ;;
+ ppc64-*linux*|powerpc64-*linux*)
+ LD="${LD-ld} -m elf32ppclinux"
+ ;;
+ s390x-*linux*)
+ LD="${LD-ld} -m elf_s390"
+ ;;
+ sparc64-*linux*)
+ LD="${LD-ld} -m elf32_sparc"
+ ;;
+ esac
+ ;;
+ *64-bit*)
+ case $host in
+ x86_64-*kfreebsd*-gnu)
+ LD="${LD-ld} -m elf_x86_64_fbsd"
+ ;;
+ x86_64-*linux*)
+ LD="${LD-ld} -m elf_x86_64"
+ ;;
+ ppc*-*linux*|powerpc*-*linux*)
+ LD="${LD-ld} -m elf64ppc"
+ ;;
+ s390*-*linux*|s390*-*tpf*)
+ LD="${LD-ld} -m elf64_s390"
+ ;;
+ sparc*-*linux*)
+ LD="${LD-ld} -m elf64_sparc"
+ ;;
+ esac
+ ;;
+ esac
+ fi
+ rm -rf conftest*
+ ;;
+
+*-*-sco3.2v5*)
+ # On SCO OpenServer 5, we need -belf to get full-featured binaries.
+ SAVE_CFLAGS="$CFLAGS"
+ CFLAGS="$CFLAGS -belf"
+ AC_CACHE_CHECK([whether the C compiler needs -belf], lt_cv_cc_needs_belf,
+ [AC_LANG_PUSH(C)
+ AC_LINK_IFELSE([AC_LANG_PROGRAM([[]],[[]])],[lt_cv_cc_needs_belf=yes],[lt_cv_cc_needs_belf=no])
+ AC_LANG_POP])
+ if test x"$lt_cv_cc_needs_belf" != x"yes"; then
+ # this is probably gcc 2.8.0, egcs 1.0 or newer; no need for -belf
+ CFLAGS="$SAVE_CFLAGS"
+ fi
+ ;;
+sparc*-*solaris*)
+ # Find out which ABI we are using.
+ echo 'int i;' > conftest.$ac_ext
+ if AC_TRY_EVAL(ac_compile); then
+ case `/usr/bin/file conftest.o` in
+ *64-bit*)
+ case $lt_cv_prog_gnu_ld in
+ yes*) LD="${LD-ld} -m elf64_sparc" ;;
+ *)
+ if ${LD-ld} -64 -r -o conftest2.o conftest.o >/dev/null 2>&1; then
+ LD="${LD-ld} -64"
+ fi
+ ;;
+ esac
+ ;;
+ esac
+ fi
+ rm -rf conftest*
+ ;;
+esac
+
+need_locks="$enable_libtool_lock"
+])# _LT_ENABLE_LOCK
+
+
+# _LT_PROG_AR
+# -----------
+m4_defun([_LT_PROG_AR],
+[AC_CHECK_TOOLS(AR, [ar], false)
+: ${AR=ar}
+: ${AR_FLAGS=cru}
+_LT_DECL([], [AR], [1], [The archiver])
+_LT_DECL([], [AR_FLAGS], [1], [Flags to create an archive])
+
+AC_CACHE_CHECK([for archiver @FILE support], [lt_cv_ar_at_file],
+ [lt_cv_ar_at_file=no
+ AC_COMPILE_IFELSE([AC_LANG_PROGRAM],
+ [echo conftest.$ac_objext > conftest.lst
+ lt_ar_try='$AR $AR_FLAGS libconftest.a @conftest.lst >&AS_MESSAGE_LOG_FD'
+ AC_TRY_EVAL([lt_ar_try])
+ if test "$ac_status" -eq 0; then
+ # Ensure the archiver fails upon bogus file names.
+ rm -f conftest.$ac_objext libconftest.a
+ AC_TRY_EVAL([lt_ar_try])
+ if test "$ac_status" -ne 0; then
+ lt_cv_ar_at_file=@
+ fi
+ fi
+ rm -f conftest.* libconftest.a
+ ])
+ ])
+
+if test "x$lt_cv_ar_at_file" = xno; then
+ archiver_list_spec=
+else
+ archiver_list_spec=$lt_cv_ar_at_file
+fi
+_LT_DECL([], [archiver_list_spec], [1],
+ [How to feed a file listing to the archiver])
+])# _LT_PROG_AR
+
+
+# _LT_CMD_OLD_ARCHIVE
+# -------------------
+m4_defun([_LT_CMD_OLD_ARCHIVE],
+[_LT_PROG_AR
+
+AC_CHECK_TOOL(STRIP, strip, :)
+test -z "$STRIP" && STRIP=:
+_LT_DECL([], [STRIP], [1], [A symbol stripping program])
+
+AC_CHECK_TOOL(RANLIB, ranlib, :)
+test -z "$RANLIB" && RANLIB=:
+_LT_DECL([], [RANLIB], [1],
+ [Commands used to install an old-style archive])
+
+# Determine commands to create old-style static archives.
+old_archive_cmds='$AR $AR_FLAGS $oldlib$oldobjs'
+old_postinstall_cmds='chmod 644 $oldlib'
+old_postuninstall_cmds=
+
+if test -n "$RANLIB"; then
+ case $host_os in
+ openbsd*)
+ old_postinstall_cmds="$old_postinstall_cmds~\$RANLIB -t \$oldlib"
+ ;;
+ *)
+ old_postinstall_cmds="$old_postinstall_cmds~\$RANLIB \$oldlib"
+ ;;
+ esac
+ old_archive_cmds="$old_archive_cmds~\$RANLIB \$oldlib"
+fi
+
+case $host_os in
+ darwin*)
+ lock_old_archive_extraction=yes ;;
+ *)
+ lock_old_archive_extraction=no ;;
+esac
+_LT_DECL([], [old_postinstall_cmds], [2])
+_LT_DECL([], [old_postuninstall_cmds], [2])
+_LT_TAGDECL([], [old_archive_cmds], [2],
+ [Commands used to build an old-style archive])
+_LT_DECL([], [lock_old_archive_extraction], [0],
+ [Whether to use a lock for old archive extraction])
+])# _LT_CMD_OLD_ARCHIVE
+
+
+# _LT_COMPILER_OPTION(MESSAGE, VARIABLE-NAME, FLAGS,
+# [OUTPUT-FILE], [ACTION-SUCCESS], [ACTION-FAILURE])
+# ----------------------------------------------------------------
+# Check whether the given compiler option works
+AC_DEFUN([_LT_COMPILER_OPTION],
+[m4_require([_LT_FILEUTILS_DEFAULTS])dnl
+m4_require([_LT_DECL_SED])dnl
+AC_CACHE_CHECK([$1], [$2],
+ [$2=no
+ m4_if([$4], , [ac_outfile=conftest.$ac_objext], [ac_outfile=$4])
+ echo "$lt_simple_compile_test_code" > conftest.$ac_ext
+ lt_compiler_flag="$3"
+ # Insert the option either (1) after the last *FLAGS variable, or
+ # (2) before a word containing "conftest.", or (3) at the end.
+ # Note that $ac_compile itself does not contain backslashes and begins
+ # with a dollar sign (not a hyphen), so the echo should work correctly.
+ # The option is referenced via a variable to avoid confusing sed.
+ lt_compile=`echo "$ac_compile" | $SED \
+ -e 's:.*FLAGS}\{0,1\} :&$lt_compiler_flag :; t' \
+ -e 's: [[^ ]]*conftest\.: $lt_compiler_flag&:; t' \
+ -e 's:$: $lt_compiler_flag:'`
+ (eval echo "\"\$as_me:$LINENO: $lt_compile\"" >&AS_MESSAGE_LOG_FD)
+ (eval "$lt_compile" 2>conftest.err)
+ ac_status=$?
+ cat conftest.err >&AS_MESSAGE_LOG_FD
+ echo "$as_me:$LINENO: \$? = $ac_status" >&AS_MESSAGE_LOG_FD
+ if (exit $ac_status) && test -s "$ac_outfile"; then
+ # The compiler can only warn and ignore the option if not recognized
+ # So say no if there are warnings other than the usual output.
+ $ECHO "$_lt_compiler_boilerplate" | $SED '/^$/d' >conftest.exp
+ $SED '/^$/d; /^ *+/d' conftest.err >conftest.er2
+ if test ! -s conftest.er2 || diff conftest.exp conftest.er2 >/dev/null; then
+ $2=yes
+ fi
+ fi
+ $RM conftest*
+])
+
+if test x"[$]$2" = xyes; then
+ m4_if([$5], , :, [$5])
+else
+ m4_if([$6], , :, [$6])
+fi
+])# _LT_COMPILER_OPTION
+
+# Old name:
+AU_ALIAS([AC_LIBTOOL_COMPILER_OPTION], [_LT_COMPILER_OPTION])
+dnl aclocal-1.4 backwards compatibility:
+dnl AC_DEFUN([AC_LIBTOOL_COMPILER_OPTION], [])
+
+
+# _LT_LINKER_OPTION(MESSAGE, VARIABLE-NAME, FLAGS,
+# [ACTION-SUCCESS], [ACTION-FAILURE])
+# ----------------------------------------------------
+# Check whether the given linker option works
+AC_DEFUN([_LT_LINKER_OPTION],
+[m4_require([_LT_FILEUTILS_DEFAULTS])dnl
+m4_require([_LT_DECL_SED])dnl
+AC_CACHE_CHECK([$1], [$2],
+ [$2=no
+ save_LDFLAGS="$LDFLAGS"
+ LDFLAGS="$LDFLAGS $3"
+ echo "$lt_simple_link_test_code" > conftest.$ac_ext
+ if (eval $ac_link 2>conftest.err) && test -s conftest$ac_exeext; then
+ # The linker can only warn and ignore the option if not recognized
+ # So say no if there are warnings
+ if test -s conftest.err; then
+ # Append any errors to the config.log.
+ cat conftest.err 1>&AS_MESSAGE_LOG_FD
+ $ECHO "$_lt_linker_boilerplate" | $SED '/^$/d' > conftest.exp
+ $SED '/^$/d; /^ *+/d' conftest.err >conftest.er2
+ if diff conftest.exp conftest.er2 >/dev/null; then
+ $2=yes
+ fi
+ else
+ $2=yes
+ fi
+ fi
+ $RM -r conftest*
+ LDFLAGS="$save_LDFLAGS"
+])
+
+if test x"[$]$2" = xyes; then
+ m4_if([$4], , :, [$4])
+else
+ m4_if([$5], , :, [$5])
+fi
+])# _LT_LINKER_OPTION
+
+# Old name:
+AU_ALIAS([AC_LIBTOOL_LINKER_OPTION], [_LT_LINKER_OPTION])
+dnl aclocal-1.4 backwards compatibility:
+dnl AC_DEFUN([AC_LIBTOOL_LINKER_OPTION], [])
+
+
+# LT_CMD_MAX_LEN
+#---------------
+AC_DEFUN([LT_CMD_MAX_LEN],
+[AC_REQUIRE([AC_CANONICAL_HOST])dnl
+# find the maximum length of command line arguments
+AC_MSG_CHECKING([the maximum length of command line arguments])
+AC_CACHE_VAL([lt_cv_sys_max_cmd_len], [dnl
+ i=0
+ teststring="ABCD"
+
+ case $build_os in
+ msdosdjgpp*)
+ # On DJGPP, this test can blow up pretty badly due to problems in libc
+ # (any single argument exceeding 2000 bytes causes a buffer overrun
+ # during glob expansion). Even if it were fixed, the result of this
+ # check would be larger than it should be.
+ lt_cv_sys_max_cmd_len=12288; # 12K is about right
+ ;;
+
+ gnu*)
+ # Under GNU Hurd, this test is not required because there is
+ # no limit to the length of command line arguments.
+ # Libtool will interpret -1 as no limit whatsoever
+ lt_cv_sys_max_cmd_len=-1;
+ ;;
+
+ cygwin* | mingw* | cegcc*)
+ # On Win9x/ME, this test blows up -- it succeeds, but takes
+ # about 5 minutes as the teststring grows exponentially.
+ # Worse, since 9x/ME are not pre-emptively multitasking,
+ # you end up with a "frozen" computer, even though with patience
+ # the test eventually succeeds (with a max line length of 256k).
+ # Instead, let's just punt: use the minimum linelength reported by
+ # all of the supported platforms: 8192 (on NT/2K/XP).
+ lt_cv_sys_max_cmd_len=8192;
+ ;;
+
+ mint*)
+ # On MiNT this can take a long time and run out of memory.
+ lt_cv_sys_max_cmd_len=8192;
+ ;;
+
+ amigaos*)
+ # On AmigaOS with pdksh, this test takes hours, literally.
+ # So we just punt and use a minimum line length of 8192.
+ lt_cv_sys_max_cmd_len=8192;
+ ;;
+
+ netbsd* | freebsd* | openbsd* | darwin* | dragonfly*)
+ # This has been around since 386BSD, at least. Likely further.
+ if test -x /sbin/sysctl; then
+ lt_cv_sys_max_cmd_len=`/sbin/sysctl -n kern.argmax`
+ elif test -x /usr/sbin/sysctl; then
+ lt_cv_sys_max_cmd_len=`/usr/sbin/sysctl -n kern.argmax`
+ else
+ lt_cv_sys_max_cmd_len=65536 # usable default for all BSDs
+ fi
+ # And add a safety zone
+ lt_cv_sys_max_cmd_len=`expr $lt_cv_sys_max_cmd_len \/ 4`
+ lt_cv_sys_max_cmd_len=`expr $lt_cv_sys_max_cmd_len \* 3`
+ ;;
+
+ interix*)
+ # We know the value 262144 and hardcode it with a safety zone (like BSD)
+ lt_cv_sys_max_cmd_len=196608
+ ;;
+
+ osf*)
+ # Dr. Hans Ekkehard Plesser reports seeing a kernel panic running configure
+ # due to this test when exec_disable_arg_limit is 1 on Tru64. It is not
+ # nice to cause kernel panics so lets avoid the loop below.
+ # First set a reasonable default.
+ lt_cv_sys_max_cmd_len=16384
+ #
+ if test -x /sbin/sysconfig; then
+ case `/sbin/sysconfig -q proc exec_disable_arg_limit` in
+ *1*) lt_cv_sys_max_cmd_len=-1 ;;
+ esac
+ fi
+ ;;
+ sco3.2v5*)
+ lt_cv_sys_max_cmd_len=102400
+ ;;
+ sysv5* | sco5v6* | sysv4.2uw2*)
+ kargmax=`grep ARG_MAX /etc/conf/cf.d/stune 2>/dev/null`
+ if test -n "$kargmax"; then
+ lt_cv_sys_max_cmd_len=`echo $kargmax | sed 's/.*[[ ]]//'`
+ else
+ lt_cv_sys_max_cmd_len=32768
+ fi
+ ;;
+ *)
+ lt_cv_sys_max_cmd_len=`(getconf ARG_MAX) 2> /dev/null`
+ if test -n "$lt_cv_sys_max_cmd_len"; then
+ lt_cv_sys_max_cmd_len=`expr $lt_cv_sys_max_cmd_len \/ 4`
+ lt_cv_sys_max_cmd_len=`expr $lt_cv_sys_max_cmd_len \* 3`
+ else
+ # Make teststring a little bigger before we do anything with it.
+ # a 1K string should be a reasonable start.
+ for i in 1 2 3 4 5 6 7 8 ; do
+ teststring=$teststring$teststring
+ done
+ SHELL=${SHELL-${CONFIG_SHELL-/bin/sh}}
+ # If test is not a shell built-in, we'll probably end up computing a
+ # maximum length that is only half of the actual maximum length, but
+ # we can't tell.
+ while { test "X"`func_fallback_echo "$teststring$teststring" 2>/dev/null` \
+ = "X$teststring$teststring"; } >/dev/null 2>&1 &&
+ test $i != 17 # 1/2 MB should be enough
+ do
+ i=`expr $i + 1`
+ teststring=$teststring$teststring
+ done
+ # Only check the string length outside the loop.
+ lt_cv_sys_max_cmd_len=`expr "X$teststring" : ".*" 2>&1`
+ teststring=
+ # Add a significant safety factor because C++ compilers can tack on
+ # massive amounts of additional arguments before passing them to the
+ # linker. It appears as though 1/2 is a usable value.
+ lt_cv_sys_max_cmd_len=`expr $lt_cv_sys_max_cmd_len \/ 2`
+ fi
+ ;;
+ esac
+])
+if test -n $lt_cv_sys_max_cmd_len ; then
+ AC_MSG_RESULT($lt_cv_sys_max_cmd_len)
+else
+ AC_MSG_RESULT(none)
+fi
+max_cmd_len=$lt_cv_sys_max_cmd_len
+_LT_DECL([], [max_cmd_len], [0],
+ [What is the maximum length of a command?])
+])# LT_CMD_MAX_LEN
+
+# Old name:
+AU_ALIAS([AC_LIBTOOL_SYS_MAX_CMD_LEN], [LT_CMD_MAX_LEN])
+dnl aclocal-1.4 backwards compatibility:
+dnl AC_DEFUN([AC_LIBTOOL_SYS_MAX_CMD_LEN], [])
+
+
+# _LT_HEADER_DLFCN
+# ----------------
+m4_defun([_LT_HEADER_DLFCN],
+[AC_CHECK_HEADERS([dlfcn.h], [], [], [AC_INCLUDES_DEFAULT])dnl
+])# _LT_HEADER_DLFCN
+
+
+# _LT_TRY_DLOPEN_SELF (ACTION-IF-TRUE, ACTION-IF-TRUE-W-USCORE,
+# ACTION-IF-FALSE, ACTION-IF-CROSS-COMPILING)
+# ----------------------------------------------------------------
+m4_defun([_LT_TRY_DLOPEN_SELF],
+[m4_require([_LT_HEADER_DLFCN])dnl
+if test "$cross_compiling" = yes; then :
+ [$4]
+else
+ lt_dlunknown=0; lt_dlno_uscore=1; lt_dlneed_uscore=2
+ lt_status=$lt_dlunknown
+ cat > conftest.$ac_ext <<_LT_EOF
+[#line $LINENO "configure"
+#include "confdefs.h"
+
+#if HAVE_DLFCN_H
+#include <dlfcn.h>
+#endif
+
+#include <stdio.h>
+
+#ifdef RTLD_GLOBAL
+# define LT_DLGLOBAL RTLD_GLOBAL
+#else
+# ifdef DL_GLOBAL
+# define LT_DLGLOBAL DL_GLOBAL
+# else
+# define LT_DLGLOBAL 0
+# endif
+#endif
+
+/* We may have to define LT_DLLAZY_OR_NOW in the command line if we
+ find out it does not work in some platform. */
+#ifndef LT_DLLAZY_OR_NOW
+# ifdef RTLD_LAZY
+# define LT_DLLAZY_OR_NOW RTLD_LAZY
+# else
+# ifdef DL_LAZY
+# define LT_DLLAZY_OR_NOW DL_LAZY
+# else
+# ifdef RTLD_NOW
+# define LT_DLLAZY_OR_NOW RTLD_NOW
+# else
+# ifdef DL_NOW
+# define LT_DLLAZY_OR_NOW DL_NOW
+# else
+# define LT_DLLAZY_OR_NOW 0
+# endif
+# endif
+# endif
+# endif
+#endif
+
+/* When -fvisbility=hidden is used, assume the code has been annotated
+ correspondingly for the symbols needed. */
+#if defined(__GNUC__) && (((__GNUC__ == 3) && (__GNUC_MINOR__ >= 3)) || (__GNUC__ > 3))
+int fnord () __attribute__((visibility("default")));
+#endif
+
+int fnord () { return 42; }
+int main ()
+{
+ void *self = dlopen (0, LT_DLGLOBAL|LT_DLLAZY_OR_NOW);
+ int status = $lt_dlunknown;
+
+ if (self)
+ {
+ if (dlsym (self,"fnord")) status = $lt_dlno_uscore;
+ else
+ {
+ if (dlsym( self,"_fnord")) status = $lt_dlneed_uscore;
+ else puts (dlerror ());
+ }
+ /* dlclose (self); */
+ }
+ else
+ puts (dlerror ());
+
+ return status;
+}]
+_LT_EOF
+ if AC_TRY_EVAL(ac_link) && test -s conftest${ac_exeext} 2>/dev/null; then
+ (./conftest; exit; ) >&AS_MESSAGE_LOG_FD 2>/dev/null
+ lt_status=$?
+ case x$lt_status in
+ x$lt_dlno_uscore) $1 ;;
+ x$lt_dlneed_uscore) $2 ;;
+ x$lt_dlunknown|x*) $3 ;;
+ esac
+ else :
+ # compilation failed
+ $3
+ fi
+fi
+rm -fr conftest*
+])# _LT_TRY_DLOPEN_SELF
+
+
+# LT_SYS_DLOPEN_SELF
+# ------------------
+AC_DEFUN([LT_SYS_DLOPEN_SELF],
+[m4_require([_LT_HEADER_DLFCN])dnl
+if test "x$enable_dlopen" != xyes; then
+ enable_dlopen=unknown
+ enable_dlopen_self=unknown
+ enable_dlopen_self_static=unknown
+else
+ lt_cv_dlopen=no
+ lt_cv_dlopen_libs=
+
+ case $host_os in
+ beos*)
+ lt_cv_dlopen="load_add_on"
+ lt_cv_dlopen_libs=
+ lt_cv_dlopen_self=yes
+ ;;
+
+ mingw* | pw32* | cegcc*)
+ lt_cv_dlopen="LoadLibrary"
+ lt_cv_dlopen_libs=
+ ;;
+
+ cygwin*)
+ lt_cv_dlopen="dlopen"
+ lt_cv_dlopen_libs=
+ ;;
+
+ darwin*)
+ # if libdl is installed we need to link against it
+ AC_CHECK_LIB([dl], [dlopen],
+ [lt_cv_dlopen="dlopen" lt_cv_dlopen_libs="-ldl"],[
+ lt_cv_dlopen="dyld"
+ lt_cv_dlopen_libs=
+ lt_cv_dlopen_self=yes
+ ])
+ ;;
+
+ *)
+ AC_CHECK_FUNC([shl_load],
+ [lt_cv_dlopen="shl_load"],
+ [AC_CHECK_LIB([dld], [shl_load],
+ [lt_cv_dlopen="shl_load" lt_cv_dlopen_libs="-ldld"],
+ [AC_CHECK_FUNC([dlopen],
+ [lt_cv_dlopen="dlopen"],
+ [AC_CHECK_LIB([dl], [dlopen],
+ [lt_cv_dlopen="dlopen" lt_cv_dlopen_libs="-ldl"],
+ [AC_CHECK_LIB([svld], [dlopen],
+ [lt_cv_dlopen="dlopen" lt_cv_dlopen_libs="-lsvld"],
+ [AC_CHECK_LIB([dld], [dld_link],
+ [lt_cv_dlopen="dld_link" lt_cv_dlopen_libs="-ldld"])
+ ])
+ ])
+ ])
+ ])
+ ])
+ ;;
+ esac
+
+ if test "x$lt_cv_dlopen" != xno; then
+ enable_dlopen=yes
+ else
+ enable_dlopen=no
+ fi
+
+ case $lt_cv_dlopen in
+ dlopen)
+ save_CPPFLAGS="$CPPFLAGS"
+ test "x$ac_cv_header_dlfcn_h" = xyes && CPPFLAGS="$CPPFLAGS -DHAVE_DLFCN_H"
+
+ save_LDFLAGS="$LDFLAGS"
+ wl=$lt_prog_compiler_wl eval LDFLAGS=\"\$LDFLAGS $export_dynamic_flag_spec\"
+
+ save_LIBS="$LIBS"
+ LIBS="$lt_cv_dlopen_libs $LIBS"
+
+ AC_CACHE_CHECK([whether a program can dlopen itself],
+ lt_cv_dlopen_self, [dnl
+ _LT_TRY_DLOPEN_SELF(
+ lt_cv_dlopen_self=yes, lt_cv_dlopen_self=yes,
+ lt_cv_dlopen_self=no, lt_cv_dlopen_self=cross)
+ ])
+
+ if test "x$lt_cv_dlopen_self" = xyes; then
+ wl=$lt_prog_compiler_wl eval LDFLAGS=\"\$LDFLAGS $lt_prog_compiler_static\"
+ AC_CACHE_CHECK([whether a statically linked program can dlopen itself],
+ lt_cv_dlopen_self_static, [dnl
+ _LT_TRY_DLOPEN_SELF(
+ lt_cv_dlopen_self_static=yes, lt_cv_dlopen_self_static=yes,
+ lt_cv_dlopen_self_static=no, lt_cv_dlopen_self_static=cross)
+ ])
+ fi
+
+ CPPFLAGS="$save_CPPFLAGS"
+ LDFLAGS="$save_LDFLAGS"
+ LIBS="$save_LIBS"
+ ;;
+ esac
+
+ case $lt_cv_dlopen_self in
+ yes|no) enable_dlopen_self=$lt_cv_dlopen_self ;;
+ *) enable_dlopen_self=unknown ;;
+ esac
+
+ case $lt_cv_dlopen_self_static in
+ yes|no) enable_dlopen_self_static=$lt_cv_dlopen_self_static ;;
+ *) enable_dlopen_self_static=unknown ;;
+ esac
+fi
+_LT_DECL([dlopen_support], [enable_dlopen], [0],
+ [Whether dlopen is supported])
+_LT_DECL([dlopen_self], [enable_dlopen_self], [0],
+ [Whether dlopen of programs is supported])
+_LT_DECL([dlopen_self_static], [enable_dlopen_self_static], [0],
+ [Whether dlopen of statically linked programs is supported])
+])# LT_SYS_DLOPEN_SELF
+
+# Old name:
+AU_ALIAS([AC_LIBTOOL_DLOPEN_SELF], [LT_SYS_DLOPEN_SELF])
+dnl aclocal-1.4 backwards compatibility:
+dnl AC_DEFUN([AC_LIBTOOL_DLOPEN_SELF], [])
+
+
+# _LT_COMPILER_C_O([TAGNAME])
+# ---------------------------
+# Check to see if options -c and -o are simultaneously supported by compiler.
+# This macro does not hard code the compiler like AC_PROG_CC_C_O.
+m4_defun([_LT_COMPILER_C_O],
+[m4_require([_LT_DECL_SED])dnl
+m4_require([_LT_FILEUTILS_DEFAULTS])dnl
+m4_require([_LT_TAG_COMPILER])dnl
+AC_CACHE_CHECK([if $compiler supports -c -o file.$ac_objext],
+ [_LT_TAGVAR(lt_cv_prog_compiler_c_o, $1)],
+ [_LT_TAGVAR(lt_cv_prog_compiler_c_o, $1)=no
+ $RM -r conftest 2>/dev/null
+ mkdir conftest
+ cd conftest
+ mkdir out
+ echo "$lt_simple_compile_test_code" > conftest.$ac_ext
+
+ lt_compiler_flag="-o out/conftest2.$ac_objext"
+ # Insert the option either (1) after the last *FLAGS variable, or
+ # (2) before a word containing "conftest.", or (3) at the end.
+ # Note that $ac_compile itself does not contain backslashes and begins
+ # with a dollar sign (not a hyphen), so the echo should work correctly.
+ lt_compile=`echo "$ac_compile" | $SED \
+ -e 's:.*FLAGS}\{0,1\} :&$lt_compiler_flag :; t' \
+ -e 's: [[^ ]]*conftest\.: $lt_compiler_flag&:; t' \
+ -e 's:$: $lt_compiler_flag:'`
+ (eval echo "\"\$as_me:$LINENO: $lt_compile\"" >&AS_MESSAGE_LOG_FD)
+ (eval "$lt_compile" 2>out/conftest.err)
+ ac_status=$?
+ cat out/conftest.err >&AS_MESSAGE_LOG_FD
+ echo "$as_me:$LINENO: \$? = $ac_status" >&AS_MESSAGE_LOG_FD
+ if (exit $ac_status) && test -s out/conftest2.$ac_objext
+ then
+ # The compiler can only warn and ignore the option if not recognized
+ # So say no if there are warnings
+ $ECHO "$_lt_compiler_boilerplate" | $SED '/^$/d' > out/conftest.exp
+ $SED '/^$/d; /^ *+/d' out/conftest.err >out/conftest.er2
+ if test ! -s out/conftest.er2 || diff out/conftest.exp out/conftest.er2 >/dev/null; then
+ _LT_TAGVAR(lt_cv_prog_compiler_c_o, $1)=yes
+ fi
+ fi
+ chmod u+w . 2>&AS_MESSAGE_LOG_FD
+ $RM conftest*
+ # SGI C++ compiler will create directory out/ii_files/ for
+ # template instantiation
+ test -d out/ii_files && $RM out/ii_files/* && rmdir out/ii_files
+ $RM out/* && rmdir out
+ cd ..
+ $RM -r conftest
+ $RM conftest*
+])
+_LT_TAGDECL([compiler_c_o], [lt_cv_prog_compiler_c_o], [1],
+ [Does compiler simultaneously support -c and -o options?])
+])# _LT_COMPILER_C_O
+
+
+# _LT_COMPILER_FILE_LOCKS([TAGNAME])
+# ----------------------------------
+# Check to see if we can do hard links to lock some files if needed
+m4_defun([_LT_COMPILER_FILE_LOCKS],
+[m4_require([_LT_ENABLE_LOCK])dnl
+m4_require([_LT_FILEUTILS_DEFAULTS])dnl
+_LT_COMPILER_C_O([$1])
+
+hard_links="nottested"
+if test "$_LT_TAGVAR(lt_cv_prog_compiler_c_o, $1)" = no && test "$need_locks" != no; then
+ # do not overwrite the value of need_locks provided by the user
+ AC_MSG_CHECKING([if we can lock with hard links])
+ hard_links=yes
+ $RM conftest*
+ ln conftest.a conftest.b 2>/dev/null && hard_links=no
+ touch conftest.a
+ ln conftest.a conftest.b 2>&5 || hard_links=no
+ ln conftest.a conftest.b 2>/dev/null && hard_links=no
+ AC_MSG_RESULT([$hard_links])
+ if test "$hard_links" = no; then
+ AC_MSG_WARN([`$CC' does not support `-c -o', so `make -j' may be unsafe])
+ need_locks=warn
+ fi
+else
+ need_locks=no
+fi
+_LT_DECL([], [need_locks], [1], [Must we lock files when doing compilation?])
+])# _LT_COMPILER_FILE_LOCKS
+
+
+# _LT_CHECK_OBJDIR
+# ----------------
+m4_defun([_LT_CHECK_OBJDIR],
+[AC_CACHE_CHECK([for objdir], [lt_cv_objdir],
+[rm -f .libs 2>/dev/null
+mkdir .libs 2>/dev/null
+if test -d .libs; then
+ lt_cv_objdir=.libs
+else
+ # MS-DOS does not allow filenames that begin with a dot.
+ lt_cv_objdir=_libs
+fi
+rmdir .libs 2>/dev/null])
+objdir=$lt_cv_objdir
+_LT_DECL([], [objdir], [0],
+ [The name of the directory that contains temporary libtool files])dnl
+m4_pattern_allow([LT_OBJDIR])dnl
+AC_DEFINE_UNQUOTED(LT_OBJDIR, "$lt_cv_objdir/",
+ [Define to the sub-directory in which libtool stores uninstalled libraries.])
+])# _LT_CHECK_OBJDIR
+
+
+# _LT_LINKER_HARDCODE_LIBPATH([TAGNAME])
+# --------------------------------------
+# Check hardcoding attributes.
+m4_defun([_LT_LINKER_HARDCODE_LIBPATH],
+[AC_MSG_CHECKING([how to hardcode library paths into programs])
+_LT_TAGVAR(hardcode_action, $1)=
+if test -n "$_LT_TAGVAR(hardcode_libdir_flag_spec, $1)" ||
+ test -n "$_LT_TAGVAR(runpath_var, $1)" ||
+ test "X$_LT_TAGVAR(hardcode_automatic, $1)" = "Xyes" ; then
+
+ # We can hardcode non-existent directories.
+ if test "$_LT_TAGVAR(hardcode_direct, $1)" != no &&
+ # If the only mechanism to avoid hardcoding is shlibpath_var, we
+ # have to relink, otherwise we might link with an installed library
+ # when we should be linking with a yet-to-be-installed one
+ ## test "$_LT_TAGVAR(hardcode_shlibpath_var, $1)" != no &&
+ test "$_LT_TAGVAR(hardcode_minus_L, $1)" != no; then
+ # Linking always hardcodes the temporary library directory.
+ _LT_TAGVAR(hardcode_action, $1)=relink
+ else
+ # We can link without hardcoding, and we can hardcode nonexisting dirs.
+ _LT_TAGVAR(hardcode_action, $1)=immediate
+ fi
+else
+ # We cannot hardcode anything, or else we can only hardcode existing
+ # directories.
+ _LT_TAGVAR(hardcode_action, $1)=unsupported
+fi
+AC_MSG_RESULT([$_LT_TAGVAR(hardcode_action, $1)])
+
+if test "$_LT_TAGVAR(hardcode_action, $1)" = relink ||
+ test "$_LT_TAGVAR(inherit_rpath, $1)" = yes; then
+ # Fast installation is not supported
+ enable_fast_install=no
+elif test "$shlibpath_overrides_runpath" = yes ||
+ test "$enable_shared" = no; then
+ # Fast installation is not necessary
+ enable_fast_install=needless
+fi
+_LT_TAGDECL([], [hardcode_action], [0],
+ [How to hardcode a shared library path into an executable])
+])# _LT_LINKER_HARDCODE_LIBPATH
+
+
+# _LT_CMD_STRIPLIB
+# ----------------
+m4_defun([_LT_CMD_STRIPLIB],
+[m4_require([_LT_DECL_EGREP])
+striplib=
+old_striplib=
+AC_MSG_CHECKING([whether stripping libraries is possible])
+if test -n "$STRIP" && $STRIP -V 2>&1 | $GREP "GNU strip" >/dev/null; then
+ test -z "$old_striplib" && old_striplib="$STRIP --strip-debug"
+ test -z "$striplib" && striplib="$STRIP --strip-unneeded"
+ AC_MSG_RESULT([yes])
+else
+# FIXME - insert some real tests, host_os isn't really good enough
+ case $host_os in
+ darwin*)
+ if test -n "$STRIP" ; then
+ striplib="$STRIP -x"
+ old_striplib="$STRIP -S"
+ AC_MSG_RESULT([yes])
+ else
+ AC_MSG_RESULT([no])
+ fi
+ ;;
+ *)
+ AC_MSG_RESULT([no])
+ ;;
+ esac
+fi
+_LT_DECL([], [old_striplib], [1], [Commands to strip libraries])
+_LT_DECL([], [striplib], [1])
+])# _LT_CMD_STRIPLIB
+
+
+# _LT_SYS_DYNAMIC_LINKER([TAG])
+# -----------------------------
+# PORTME Fill in your ld.so characteristics
+m4_defun([_LT_SYS_DYNAMIC_LINKER],
+[AC_REQUIRE([AC_CANONICAL_HOST])dnl
+m4_require([_LT_DECL_EGREP])dnl
+m4_require([_LT_FILEUTILS_DEFAULTS])dnl
+m4_require([_LT_DECL_OBJDUMP])dnl
+m4_require([_LT_DECL_SED])dnl
+m4_require([_LT_CHECK_SHELL_FEATURES])dnl
+AC_MSG_CHECKING([dynamic linker characteristics])
+m4_if([$1],
+ [], [
+if test "$GCC" = yes; then
+ case $host_os in
+ darwin*) lt_awk_arg="/^libraries:/,/LR/" ;;
+ *) lt_awk_arg="/^libraries:/" ;;
+ esac
+ case $host_os in
+ mingw* | cegcc*) lt_sed_strip_eq="s,=\([[A-Za-z]]:\),\1,g" ;;
+ *) lt_sed_strip_eq="s,=/,/,g" ;;
+ esac
+ lt_search_path_spec=`$CC -print-search-dirs | awk $lt_awk_arg | $SED -e "s/^libraries://" -e $lt_sed_strip_eq`
+ case $lt_search_path_spec in
+ *\;*)
+ # if the path contains ";" then we assume it to be the separator
+ # otherwise default to the standard path separator (i.e. ":") - it is
+ # assumed that no part of a normal pathname contains ";" but that should
+ # okay in the real world where ";" in dirpaths is itself problematic.
+ lt_search_path_spec=`$ECHO "$lt_search_path_spec" | $SED 's/;/ /g'`
+ ;;
+ *)
+ lt_search_path_spec=`$ECHO "$lt_search_path_spec" | $SED "s/$PATH_SEPARATOR/ /g"`
+ ;;
+ esac
+ # Ok, now we have the path, separated by spaces, we can step through it
+ # and add multilib dir if necessary.
+ lt_tmp_lt_search_path_spec=
+ lt_multi_os_dir=`$CC $CPPFLAGS $CFLAGS $LDFLAGS -print-multi-os-directory 2>/dev/null`
+ for lt_sys_path in $lt_search_path_spec; do
+ if test -d "$lt_sys_path/$lt_multi_os_dir"; then
+ lt_tmp_lt_search_path_spec="$lt_tmp_lt_search_path_spec $lt_sys_path/$lt_multi_os_dir"
+ else
+ test -d "$lt_sys_path" && \
+ lt_tmp_lt_search_path_spec="$lt_tmp_lt_search_path_spec $lt_sys_path"
+ fi
+ done
+ lt_search_path_spec=`$ECHO "$lt_tmp_lt_search_path_spec" | awk '
+BEGIN {RS=" "; FS="/|\n";} {
+ lt_foo="";
+ lt_count=0;
+ for (lt_i = NF; lt_i > 0; lt_i--) {
+ if ($lt_i != "" && $lt_i != ".") {
+ if ($lt_i == "..") {
+ lt_count++;
+ } else {
+ if (lt_count == 0) {
+ lt_foo="/" $lt_i lt_foo;
+ } else {
+ lt_count--;
+ }
+ }
+ }
+ }
+ if (lt_foo != "") { lt_freq[[lt_foo]]++; }
+ if (lt_freq[[lt_foo]] == 1) { print lt_foo; }
+}'`
+ # AWK program above erroneously prepends '/' to C:/dos/paths
+ # for these hosts.
+ case $host_os in
+ mingw* | cegcc*) lt_search_path_spec=`$ECHO "$lt_search_path_spec" |\
+ $SED 's,/\([[A-Za-z]]:\),\1,g'` ;;
+ esac
+ sys_lib_search_path_spec=`$ECHO "$lt_search_path_spec" | $lt_NL2SP`
+else
+ sys_lib_search_path_spec="/lib /usr/lib /usr/local/lib"
+fi])
+library_names_spec=
+libname_spec='lib$name'
+soname_spec=
+shrext_cmds=".so"
+postinstall_cmds=
+postuninstall_cmds=
+finish_cmds=
+finish_eval=
+shlibpath_var=
+shlibpath_overrides_runpath=unknown
+version_type=none
+dynamic_linker="$host_os ld.so"
+sys_lib_dlsearch_path_spec="/lib /usr/lib"
+need_lib_prefix=unknown
+hardcode_into_libs=no
+
+# when you set need_version to no, make sure it does not cause -set_version
+# flags to be left without arguments
+need_version=unknown
+
+case $host_os in
+aix3*)
+ version_type=linux
+ library_names_spec='${libname}${release}${shared_ext}$versuffix $libname.a'
+ shlibpath_var=LIBPATH
+
+ # AIX 3 has no versioning support, so we append a major version to the name.
+ soname_spec='${libname}${release}${shared_ext}$major'
+ ;;
+
+aix[[4-9]]*)
+ version_type=linux
+ need_lib_prefix=no
+ need_version=no
+ hardcode_into_libs=yes
+ if test "$host_cpu" = ia64; then
+ # AIX 5 supports IA64
+ library_names_spec='${libname}${release}${shared_ext}$major ${libname}${release}${shared_ext}$versuffix $libname${shared_ext}'
+ shlibpath_var=LD_LIBRARY_PATH
+ else
+ # With GCC up to 2.95.x, collect2 would create an import file
+ # for dependence libraries. The import file would start with
+ # the line `#! .'. This would cause the generated library to
+ # depend on `.', always an invalid library. This was fixed in
+ # development snapshots of GCC prior to 3.0.
+ case $host_os in
+ aix4 | aix4.[[01]] | aix4.[[01]].*)
+ if { echo '#if __GNUC__ > 2 || (__GNUC__ == 2 && __GNUC_MINOR__ >= 97)'
+ echo ' yes '
+ echo '#endif'; } | ${CC} -E - | $GREP yes > /dev/null; then
+ :
+ else
+ can_build_shared=no
+ fi
+ ;;
+ esac
+ # AIX (on Power*) has no versioning support, so currently we can not hardcode correct
+ # soname into executable. Probably we can add versioning support to
+ # collect2, so additional links can be useful in future.
+ if test "$aix_use_runtimelinking" = yes; then
+ # If using run time linking (on AIX 4.2 or later) use lib<name>.so
+ # instead of lib<name>.a to let people know that these are not
+ # typical AIX shared libraries.
+ library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}$major $libname${shared_ext}'
+ else
+ # We preserve .a as extension for shared libraries through AIX4.2
+ # and later when we are not doing run time linking.
+ library_names_spec='${libname}${release}.a $libname.a'
+ soname_spec='${libname}${release}${shared_ext}$major'
+ fi
+ shlibpath_var=LIBPATH
+ fi
+ ;;
+
+amigaos*)
+ case $host_cpu in
+ powerpc)
+ # Since July 2007 AmigaOS4 officially supports .so libraries.
+ # When compiling the executable, add -use-dynld -Lsobjs: to the compileline.
+ library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}$major $libname${shared_ext}'
+ ;;
+ m68k)
+ library_names_spec='$libname.ixlibrary $libname.a'
+ # Create ${libname}_ixlibrary.a entries in /sys/libs.
+ finish_eval='for lib in `ls $libdir/*.ixlibrary 2>/dev/null`; do libname=`func_echo_all "$lib" | $SED '\''s%^.*/\([[^/]]*\)\.ixlibrary$%\1%'\''`; test $RM /sys/libs/${libname}_ixlibrary.a; $show "cd /sys/libs && $LN_S $lib ${libname}_ixlibrary.a"; cd /sys/libs && $LN_S $lib ${libname}_ixlibrary.a || exit 1; done'
+ ;;
+ esac
+ ;;
+
+beos*)
+ library_names_spec='${libname}${shared_ext}'
+ dynamic_linker="$host_os ld.so"
+ shlibpath_var=LIBRARY_PATH
+ ;;
+
+bsdi[[45]]*)
+ version_type=linux
+ need_version=no
+ library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}$major $libname${shared_ext}'
+ soname_spec='${libname}${release}${shared_ext}$major'
+ finish_cmds='PATH="\$PATH:/sbin" ldconfig $libdir'
+ shlibpath_var=LD_LIBRARY_PATH
+ sys_lib_search_path_spec="/shlib /usr/lib /usr/X11/lib /usr/contrib/lib /lib /usr/local/lib"
+ sys_lib_dlsearch_path_spec="/shlib /usr/lib /usr/local/lib"
+ # the default ld.so.conf also contains /usr/contrib/lib and
+ # /usr/X11R6/lib (/usr/X11 is a link to /usr/X11R6), but let us allow
+ # libtool to hard-code these into programs
+ ;;
+
+cygwin* | mingw* | pw32* | cegcc*)
+ version_type=windows
+ shrext_cmds=".dll"
+ need_version=no
+ need_lib_prefix=no
+
+ case $GCC,$cc_basename in
+ yes,*)
+ # gcc
+ library_names_spec='$libname.dll.a'
+ # DLL is installed to $(libdir)/../bin by postinstall_cmds
+ postinstall_cmds='base_file=`basename \${file}`~
+ dlpath=`$SHELL 2>&1 -c '\''. $dir/'\''\${base_file}'\''i; echo \$dlname'\''`~
+ dldir=$destdir/`dirname \$dlpath`~
+ test -d \$dldir || mkdir -p \$dldir~
+ $install_prog $dir/$dlname \$dldir/$dlname~
+ chmod a+x \$dldir/$dlname~
+ if test -n '\''$stripme'\'' && test -n '\''$striplib'\''; then
+ eval '\''$striplib \$dldir/$dlname'\'' || exit \$?;
+ fi'
+ postuninstall_cmds='dldll=`$SHELL 2>&1 -c '\''. $file; echo \$dlname'\''`~
+ dlpath=$dir/\$dldll~
+ $RM \$dlpath'
+ shlibpath_overrides_runpath=yes
+
+ case $host_os in
+ cygwin*)
+ # Cygwin DLLs use 'cyg' prefix rather than 'lib'
+ soname_spec='`echo ${libname} | sed -e 's/^lib/cyg/'``echo ${release} | $SED -e 's/[[.]]/-/g'`${versuffix}${shared_ext}'
+m4_if([$1], [],[
+ sys_lib_search_path_spec="$sys_lib_search_path_spec /usr/lib/w32api"])
+ ;;
+ mingw* | cegcc*)
+ # MinGW DLLs use traditional 'lib' prefix
+ soname_spec='${libname}`echo ${release} | $SED -e 's/[[.]]/-/g'`${versuffix}${shared_ext}'
+ ;;
+ pw32*)
+ # pw32 DLLs use 'pw' prefix rather than 'lib'
+ library_names_spec='`echo ${libname} | sed -e 's/^lib/pw/'``echo ${release} | $SED -e 's/[[.]]/-/g'`${versuffix}${shared_ext}'
+ ;;
+ esac
+ dynamic_linker='Win32 ld.exe'
+ ;;
+
+ *,cl*)
+ # Native MSVC
+ libname_spec='$name'
+ soname_spec='${libname}`echo ${release} | $SED -e 's/[[.]]/-/g'`${versuffix}${shared_ext}'
+ library_names_spec='${libname}.dll.lib'
+
+ case $build_os in
+ mingw*)
+ sys_lib_search_path_spec=
+ lt_save_ifs=$IFS
+ IFS=';'
+ for lt_path in $LIB
+ do
+ IFS=$lt_save_ifs
+ # Let DOS variable expansion print the short 8.3 style file name.
+ lt_path=`cd "$lt_path" 2>/dev/null && cmd //C "for %i in (".") do @echo %~si"`
+ sys_lib_search_path_spec="$sys_lib_search_path_spec $lt_path"
+ done
+ IFS=$lt_save_ifs
+ # Convert to MSYS style.
+ sys_lib_search_path_spec=`$ECHO "$sys_lib_search_path_spec" | sed -e 's|\\\\|/|g' -e 's| \\([[a-zA-Z]]\\):| /\\1|g' -e 's|^ ||'`
+ ;;
+ cygwin*)
+ # Convert to unix form, then to dos form, then back to unix form
+ # but this time dos style (no spaces!) so that the unix form looks
+ # like /cygdrive/c/PROGRA~1:/cygdr...
+ sys_lib_search_path_spec=`cygpath --path --unix "$LIB"`
+ sys_lib_search_path_spec=`cygpath --path --dos "$sys_lib_search_path_spec" 2>/dev/null`
+ sys_lib_search_path_spec=`cygpath --path --unix "$sys_lib_search_path_spec" | $SED -e "s/$PATH_SEPARATOR/ /g"`
+ ;;
+ *)
+ sys_lib_search_path_spec="$LIB"
+ if $ECHO "$sys_lib_search_path_spec" | [$GREP ';[c-zC-Z]:/' >/dev/null]; then
+ # It is most probably a Windows format PATH.
+ sys_lib_search_path_spec=`$ECHO "$sys_lib_search_path_spec" | $SED -e 's/;/ /g'`
+ else
+ sys_lib_search_path_spec=`$ECHO "$sys_lib_search_path_spec" | $SED -e "s/$PATH_SEPARATOR/ /g"`
+ fi
+ # FIXME: find the short name or the path components, as spaces are
+ # common. (e.g. "Program Files" -> "PROGRA~1")
+ ;;
+ esac
+
+ # DLL is installed to $(libdir)/../bin by postinstall_cmds
+ postinstall_cmds='base_file=`basename \${file}`~
+ dlpath=`$SHELL 2>&1 -c '\''. $dir/'\''\${base_file}'\''i; echo \$dlname'\''`~
+ dldir=$destdir/`dirname \$dlpath`~
+ test -d \$dldir || mkdir -p \$dldir~
+ $install_prog $dir/$dlname \$dldir/$dlname'
+ postuninstall_cmds='dldll=`$SHELL 2>&1 -c '\''. $file; echo \$dlname'\''`~
+ dlpath=$dir/\$dldll~
+ $RM \$dlpath'
+ shlibpath_overrides_runpath=yes
+ dynamic_linker='Win32 link.exe'
+ ;;
+
+ *)
+ # Assume MSVC wrapper
+ library_names_spec='${libname}`echo ${release} | $SED -e 's/[[.]]/-/g'`${versuffix}${shared_ext} $libname.lib'
+ dynamic_linker='Win32 ld.exe'
+ ;;
+ esac
+ # FIXME: first we should search . and the directory the executable is in
+ shlibpath_var=PATH
+ ;;
+
+darwin* | rhapsody*)
+ dynamic_linker="$host_os dyld"
+ version_type=darwin
+ need_lib_prefix=no
+ need_version=no
+ library_names_spec='${libname}${release}${major}$shared_ext ${libname}$shared_ext'
+ soname_spec='${libname}${release}${major}$shared_ext'
+ shlibpath_overrides_runpath=yes
+ shlibpath_var=DYLD_LIBRARY_PATH
+ shrext_cmds='`test .$module = .yes && echo .so || echo .dylib`'
+m4_if([$1], [],[
+ sys_lib_search_path_spec="$sys_lib_search_path_spec /usr/local/lib"])
+ sys_lib_dlsearch_path_spec='/usr/local/lib /lib /usr/lib'
+ ;;
+
+dgux*)
+ version_type=linux
+ need_lib_prefix=no
+ need_version=no
+ library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}$major $libname$shared_ext'
+ soname_spec='${libname}${release}${shared_ext}$major'
+ shlibpath_var=LD_LIBRARY_PATH
+ ;;
+
+freebsd1*)
+ dynamic_linker=no
+ ;;
+
+freebsd* | dragonfly*)
+ # DragonFly does not have aout. When/if they implement a new
+ # versioning mechanism, adjust this.
+ if test -x /usr/bin/objformat; then
+ objformat=`/usr/bin/objformat`
+ else
+ case $host_os in
+ freebsd[[123]]*) objformat=aout ;;
+ *) objformat=elf ;;
+ esac
+ fi
+ version_type=freebsd-$objformat
+ case $version_type in
+ freebsd-elf*)
+ library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext} $libname${shared_ext}'
+ need_version=no
+ need_lib_prefix=no
+ ;;
+ freebsd-*)
+ library_names_spec='${libname}${release}${shared_ext}$versuffix $libname${shared_ext}$versuffix'
+ need_version=yes
+ ;;
+ esac
+ shlibpath_var=LD_LIBRARY_PATH
+ case $host_os in
+ freebsd2*)
+ shlibpath_overrides_runpath=yes
+ ;;
+ freebsd3.[[01]]* | freebsdelf3.[[01]]*)
+ shlibpath_overrides_runpath=yes
+ hardcode_into_libs=yes
+ ;;
+ freebsd3.[[2-9]]* | freebsdelf3.[[2-9]]* | \
+ freebsd4.[[0-5]] | freebsdelf4.[[0-5]] | freebsd4.1.1 | freebsdelf4.1.1)
+ shlibpath_overrides_runpath=no
+ hardcode_into_libs=yes
+ ;;
+ *) # from 4.6 on, and DragonFly
+ shlibpath_overrides_runpath=yes
+ hardcode_into_libs=yes
+ ;;
+ esac
+ ;;
+
+gnu*)
+ version_type=linux
+ need_lib_prefix=no
+ need_version=no
+ library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}${major} ${libname}${shared_ext}'
+ soname_spec='${libname}${release}${shared_ext}$major'
+ shlibpath_var=LD_LIBRARY_PATH
+ hardcode_into_libs=yes
+ ;;
+
+haiku*)
+ version_type=linux
+ need_lib_prefix=no
+ need_version=no
+ dynamic_linker="$host_os runtime_loader"
+ library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}${major} ${libname}${shared_ext}'
+ soname_spec='${libname}${release}${shared_ext}$major'
+ shlibpath_var=LIBRARY_PATH
+ shlibpath_overrides_runpath=yes
+ sys_lib_dlsearch_path_spec='/boot/home/config/lib /boot/common/lib /boot/system/lib'
+ hardcode_into_libs=yes
+ ;;
+
+hpux9* | hpux10* | hpux11*)
+ # Give a soname corresponding to the major version so that dld.sl refuses to
+ # link against other versions.
+ version_type=sunos
+ need_lib_prefix=no
+ need_version=no
+ case $host_cpu in
+ ia64*)
+ shrext_cmds='.so'
+ hardcode_into_libs=yes
+ dynamic_linker="$host_os dld.so"
+ shlibpath_var=LD_LIBRARY_PATH
+ shlibpath_overrides_runpath=yes # Unless +noenvvar is specified.
+ library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}$major $libname${shared_ext}'
+ soname_spec='${libname}${release}${shared_ext}$major'
+ if test "X$HPUX_IA64_MODE" = X32; then
+ sys_lib_search_path_spec="/usr/lib/hpux32 /usr/local/lib/hpux32 /usr/local/lib"
+ else
+ sys_lib_search_path_spec="/usr/lib/hpux64 /usr/local/lib/hpux64"
+ fi
+ sys_lib_dlsearch_path_spec=$sys_lib_search_path_spec
+ ;;
+ hppa*64*)
+ shrext_cmds='.sl'
+ hardcode_into_libs=yes
+ dynamic_linker="$host_os dld.sl"
+ shlibpath_var=LD_LIBRARY_PATH # How should we handle SHLIB_PATH
+ shlibpath_overrides_runpath=yes # Unless +noenvvar is specified.
+ library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}$major $libname${shared_ext}'
+ soname_spec='${libname}${release}${shared_ext}$major'
+ sys_lib_search_path_spec="/usr/lib/pa20_64 /usr/ccs/lib/pa20_64"
+ sys_lib_dlsearch_path_spec=$sys_lib_search_path_spec
+ ;;
+ *)
+ shrext_cmds='.sl'
+ dynamic_linker="$host_os dld.sl"
+ shlibpath_var=SHLIB_PATH
+ shlibpath_overrides_runpath=no # +s is required to enable SHLIB_PATH
+ library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}$major $libname${shared_ext}'
+ soname_spec='${libname}${release}${shared_ext}$major'
+ ;;
+ esac
+ # HP-UX runs *really* slowly unless shared libraries are mode 555, ...
+ postinstall_cmds='chmod 555 $lib'
+ # or fails outright, so override atomically:
+ install_override_mode=555
+ ;;
+
+interix[[3-9]]*)
+ version_type=linux
+ need_lib_prefix=no
+ need_version=no
+ library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}$major ${libname}${shared_ext}'
+ soname_spec='${libname}${release}${shared_ext}$major'
+ dynamic_linker='Interix 3.x ld.so.1 (PE, like ELF)'
+ shlibpath_var=LD_LIBRARY_PATH
+ shlibpath_overrides_runpath=no
+ hardcode_into_libs=yes
+ ;;
+
+irix5* | irix6* | nonstopux*)
+ case $host_os in
+ nonstopux*) version_type=nonstopux ;;
+ *)
+ if test "$lt_cv_prog_gnu_ld" = yes; then
+ version_type=linux
+ else
+ version_type=irix
+ fi ;;
+ esac
+ need_lib_prefix=no
+ need_version=no
+ soname_spec='${libname}${release}${shared_ext}$major'
+ library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}$major ${libname}${release}${shared_ext} $libname${shared_ext}'
+ case $host_os in
+ irix5* | nonstopux*)
+ libsuff= shlibsuff=
+ ;;
+ *)
+ case $LD in # libtool.m4 will add one of these switches to LD
+ *-32|*"-32 "|*-melf32bsmip|*"-melf32bsmip ")
+ libsuff= shlibsuff= libmagic=32-bit;;
+ *-n32|*"-n32 "|*-melf32bmipn32|*"-melf32bmipn32 ")
+ libsuff=32 shlibsuff=N32 libmagic=N32;;
+ *-64|*"-64 "|*-melf64bmip|*"-melf64bmip ")
+ libsuff=64 shlibsuff=64 libmagic=64-bit;;
+ *) libsuff= shlibsuff= libmagic=never-match;;
+ esac
+ ;;
+ esac
+ shlibpath_var=LD_LIBRARY${shlibsuff}_PATH
+ shlibpath_overrides_runpath=no
+ sys_lib_search_path_spec="/usr/lib${libsuff} /lib${libsuff} /usr/local/lib${libsuff}"
+ sys_lib_dlsearch_path_spec="/usr/lib${libsuff} /lib${libsuff}"
+ hardcode_into_libs=yes
+ ;;
+
+# No shared lib support for Linux oldld, aout, or coff.
+linux*oldld* | linux*aout* | linux*coff*)
+ dynamic_linker=no
+ ;;
+
+# This must be Linux ELF.
+linux* | k*bsd*-gnu | kopensolaris*-gnu)
+ version_type=linux
+ need_lib_prefix=no
+ need_version=no
+ library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}$major $libname${shared_ext}'
+ soname_spec='${libname}${release}${shared_ext}$major'
+ finish_cmds='PATH="\$PATH:/sbin" ldconfig -n $libdir'
+ shlibpath_var=LD_LIBRARY_PATH
+ shlibpath_overrides_runpath=no
+
+ # Some binutils ld are patched to set DT_RUNPATH
+ AC_CACHE_VAL([lt_cv_shlibpath_overrides_runpath],
+ [lt_cv_shlibpath_overrides_runpath=no
+ save_LDFLAGS=$LDFLAGS
+ save_libdir=$libdir
+ eval "libdir=/foo; wl=\"$_LT_TAGVAR(lt_prog_compiler_wl, $1)\"; \
+ LDFLAGS=\"\$LDFLAGS $_LT_TAGVAR(hardcode_libdir_flag_spec, $1)\""
+ AC_LINK_IFELSE([AC_LANG_PROGRAM([],[])],
+ [AS_IF([ ($OBJDUMP -p conftest$ac_exeext) 2>/dev/null | grep "RUNPATH.*$libdir" >/dev/null],
+ [lt_cv_shlibpath_overrides_runpath=yes])])
+ LDFLAGS=$save_LDFLAGS
+ libdir=$save_libdir
+ ])
+ shlibpath_overrides_runpath=$lt_cv_shlibpath_overrides_runpath
+
+ # This implies no fast_install, which is unacceptable.
+ # Some rework will be needed to allow for fast_install
+ # before this can be enabled.
+ hardcode_into_libs=yes
+
+ # Append ld.so.conf contents to the search path
+ if test -f /etc/ld.so.conf; then
+ lt_ld_extra=`awk '/^include / { system(sprintf("cd /etc; cat %s 2>/dev/null", \[$]2)); skip = 1; } { if (!skip) print \[$]0; skip = 0; }' < /etc/ld.so.conf | $SED -e 's/#.*//;/^[ ]*hwcap[ ]/d;s/[:, ]/ /g;s/=[^=]*$//;s/=[^= ]* / /g;s/"//g;/^$/d' | tr '\n' ' '`
+ sys_lib_dlsearch_path_spec="/lib /usr/lib $lt_ld_extra"
+ fi
+
+ # We used to test for /lib/ld.so.1 and disable shared libraries on
+ # powerpc, because MkLinux only supported shared libraries with the
+ # GNU dynamic linker. Since this was broken with cross compilers,
+ # most powerpc-linux boxes support dynamic linking these days and
+ # people can always --disable-shared, the test was removed, and we
+ # assume the GNU/Linux dynamic linker is in use.
+ dynamic_linker='GNU/Linux ld.so'
+ ;;
+
+netbsd*)
+ version_type=sunos
+ need_lib_prefix=no
+ need_version=no
+ if echo __ELF__ | $CC -E - | $GREP __ELF__ >/dev/null; then
+ library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${shared_ext}$versuffix'
+ finish_cmds='PATH="\$PATH:/sbin" ldconfig -m $libdir'
+ dynamic_linker='NetBSD (a.out) ld.so'
+ else
+ library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}$major ${libname}${shared_ext}'
+ soname_spec='${libname}${release}${shared_ext}$major'
+ dynamic_linker='NetBSD ld.elf_so'
+ fi
+ shlibpath_var=LD_LIBRARY_PATH
+ shlibpath_overrides_runpath=yes
+ hardcode_into_libs=yes
+ ;;
+
+newsos6)
+ version_type=linux
+ library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}$major $libname${shared_ext}'
+ shlibpath_var=LD_LIBRARY_PATH
+ shlibpath_overrides_runpath=yes
+ ;;
+
+*nto* | *qnx*)
+ version_type=qnx
+ need_lib_prefix=no
+ need_version=no
+ library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}$major $libname${shared_ext}'
+ soname_spec='${libname}${release}${shared_ext}$major'
+ shlibpath_var=LD_LIBRARY_PATH
+ shlibpath_overrides_runpath=no
+ hardcode_into_libs=yes
+ dynamic_linker='ldqnx.so'
+ ;;
+
+openbsd*)
+ version_type=sunos
+ sys_lib_dlsearch_path_spec="/usr/lib"
+ need_lib_prefix=no
+ # Some older versions of OpenBSD (3.3 at least) *do* need versioned libs.
+ case $host_os in
+ openbsd3.3 | openbsd3.3.*) need_version=yes ;;
+ *) need_version=no ;;
+ esac
+ library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${shared_ext}$versuffix'
+ finish_cmds='PATH="\$PATH:/sbin" ldconfig -m $libdir'
+ shlibpath_var=LD_LIBRARY_PATH
+ if test -z "`echo __ELF__ | $CC -E - | $GREP __ELF__`" || test "$host_os-$host_cpu" = "openbsd2.8-powerpc"; then
+ case $host_os in
+ openbsd2.[[89]] | openbsd2.[[89]].*)
+ shlibpath_overrides_runpath=no
+ ;;
+ *)
+ shlibpath_overrides_runpath=yes
+ ;;
+ esac
+ else
+ shlibpath_overrides_runpath=yes
+ fi
+ ;;
+
+os2*)
+ libname_spec='$name'
+ shrext_cmds=".dll"
+ need_lib_prefix=no
+ library_names_spec='$libname${shared_ext} $libname.a'
+ dynamic_linker='OS/2 ld.exe'
+ shlibpath_var=LIBPATH
+ ;;
+
+osf3* | osf4* | osf5*)
+ version_type=osf
+ need_lib_prefix=no
+ need_version=no
+ soname_spec='${libname}${release}${shared_ext}$major'
+ library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}$major $libname${shared_ext}'
+ shlibpath_var=LD_LIBRARY_PATH
+ sys_lib_search_path_spec="/usr/shlib /usr/ccs/lib /usr/lib/cmplrs/cc /usr/lib /usr/local/lib /var/shlib"
+ sys_lib_dlsearch_path_spec="$sys_lib_search_path_spec"
+ ;;
+
+rdos*)
+ dynamic_linker=no
+ ;;
+
+solaris*)
+ version_type=linux
+ need_lib_prefix=no
+ need_version=no
+ library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}$major $libname${shared_ext}'
+ soname_spec='${libname}${release}${shared_ext}$major'
+ shlibpath_var=LD_LIBRARY_PATH
+ shlibpath_overrides_runpath=yes
+ hardcode_into_libs=yes
+ # ldd complains unless libraries are executable
+ postinstall_cmds='chmod +x $lib'
+ ;;
+
+sunos4*)
+ version_type=sunos
+ library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${shared_ext}$versuffix'
+ finish_cmds='PATH="\$PATH:/usr/etc" ldconfig $libdir'
+ shlibpath_var=LD_LIBRARY_PATH
+ shlibpath_overrides_runpath=yes
+ if test "$with_gnu_ld" = yes; then
+ need_lib_prefix=no
+ fi
+ need_version=yes
+ ;;
+
+sysv4 | sysv4.3*)
+ version_type=linux
+ library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}$major $libname${shared_ext}'
+ soname_spec='${libname}${release}${shared_ext}$major'
+ shlibpath_var=LD_LIBRARY_PATH
+ case $host_vendor in
+ sni)
+ shlibpath_overrides_runpath=no
+ need_lib_prefix=no
+ runpath_var=LD_RUN_PATH
+ ;;
+ siemens)
+ need_lib_prefix=no
+ ;;
+ motorola)
+ need_lib_prefix=no
+ need_version=no
+ shlibpath_overrides_runpath=no
+ sys_lib_search_path_spec='/lib /usr/lib /usr/ccs/lib'
+ ;;
+ esac
+ ;;
+
+sysv4*MP*)
+ if test -d /usr/nec ;then
+ version_type=linux
+ library_names_spec='$libname${shared_ext}.$versuffix $libname${shared_ext}.$major $libname${shared_ext}'
+ soname_spec='$libname${shared_ext}.$major'
+ shlibpath_var=LD_LIBRARY_PATH
+ fi
+ ;;
+
+sysv5* | sco3.2v5* | sco5v6* | unixware* | OpenUNIX* | sysv4*uw2*)
+ version_type=freebsd-elf
+ need_lib_prefix=no
+ need_version=no
+ library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext} $libname${shared_ext}'
+ soname_spec='${libname}${release}${shared_ext}$major'
+ shlibpath_var=LD_LIBRARY_PATH
+ shlibpath_overrides_runpath=yes
+ hardcode_into_libs=yes
+ if test "$with_gnu_ld" = yes; then
+ sys_lib_search_path_spec='/usr/local/lib /usr/gnu/lib /usr/ccs/lib /usr/lib /lib'
+ else
+ sys_lib_search_path_spec='/usr/ccs/lib /usr/lib'
+ case $host_os in
+ sco3.2v5*)
+ sys_lib_search_path_spec="$sys_lib_search_path_spec /lib"
+ ;;
+ esac
+ fi
+ sys_lib_dlsearch_path_spec='/usr/lib'
+ ;;
+
+tpf*)
+ # TPF is a cross-target only. Preferred cross-host = GNU/Linux.
+ version_type=linux
+ need_lib_prefix=no
+ need_version=no
+ library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}$major $libname${shared_ext}'
+ shlibpath_var=LD_LIBRARY_PATH
+ shlibpath_overrides_runpath=no
+ hardcode_into_libs=yes
+ ;;
+
+uts4*)
+ version_type=linux
+ library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}$major $libname${shared_ext}'
+ soname_spec='${libname}${release}${shared_ext}$major'
+ shlibpath_var=LD_LIBRARY_PATH
+ ;;
+
+*)
+ dynamic_linker=no
+ ;;
+esac
+AC_MSG_RESULT([$dynamic_linker])
+test "$dynamic_linker" = no && can_build_shared=no
+
+variables_saved_for_relink="PATH $shlibpath_var $runpath_var"
+if test "$GCC" = yes; then
+ variables_saved_for_relink="$variables_saved_for_relink GCC_EXEC_PREFIX COMPILER_PATH LIBRARY_PATH"
+fi
+
+if test "${lt_cv_sys_lib_search_path_spec+set}" = set; then
+ sys_lib_search_path_spec="$lt_cv_sys_lib_search_path_spec"
+fi
+if test "${lt_cv_sys_lib_dlsearch_path_spec+set}" = set; then
+ sys_lib_dlsearch_path_spec="$lt_cv_sys_lib_dlsearch_path_spec"
+fi
+
+_LT_DECL([], [variables_saved_for_relink], [1],
+ [Variables whose values should be saved in libtool wrapper scripts and
+ restored at link time])
+_LT_DECL([], [need_lib_prefix], [0],
+ [Do we need the "lib" prefix for modules?])
+_LT_DECL([], [need_version], [0], [Do we need a version for libraries?])
+_LT_DECL([], [version_type], [0], [Library versioning type])
+_LT_DECL([], [runpath_var], [0], [Shared library runtime path variable])
+_LT_DECL([], [shlibpath_var], [0],[Shared library path variable])
+_LT_DECL([], [shlibpath_overrides_runpath], [0],
+ [Is shlibpath searched before the hard-coded library search path?])
+_LT_DECL([], [libname_spec], [1], [Format of library name prefix])
+_LT_DECL([], [library_names_spec], [1],
+ [[List of archive names. First name is the real one, the rest are links.
+ The last name is the one that the linker finds with -lNAME]])
+_LT_DECL([], [soname_spec], [1],
+ [[The coded name of the library, if different from the real name]])
+_LT_DECL([], [install_override_mode], [1],
+ [Permission mode override for installation of shared libraries])
+_LT_DECL([], [postinstall_cmds], [2],
+ [Command to use after installation of a shared archive])
+_LT_DECL([], [postuninstall_cmds], [2],
+ [Command to use after uninstallation of a shared archive])
+_LT_DECL([], [finish_cmds], [2],
+ [Commands used to finish a libtool library installation in a directory])
+_LT_DECL([], [finish_eval], [1],
+ [[As "finish_cmds", except a single script fragment to be evaled but
+ not shown]])
+_LT_DECL([], [hardcode_into_libs], [0],
+ [Whether we should hardcode library paths into libraries])
+_LT_DECL([], [sys_lib_search_path_spec], [2],
+ [Compile-time system search path for libraries])
+_LT_DECL([], [sys_lib_dlsearch_path_spec], [2],
+ [Run-time system search path for libraries])
+])# _LT_SYS_DYNAMIC_LINKER
+
+
+# _LT_PATH_TOOL_PREFIX(TOOL)
+# --------------------------
+# find a file program which can recognize shared library
+AC_DEFUN([_LT_PATH_TOOL_PREFIX],
+[m4_require([_LT_DECL_EGREP])dnl
+AC_MSG_CHECKING([for $1])
+AC_CACHE_VAL(lt_cv_path_MAGIC_CMD,
+[case $MAGIC_CMD in
+[[\\/*] | ?:[\\/]*])
+ lt_cv_path_MAGIC_CMD="$MAGIC_CMD" # Let the user override the test with a path.
+ ;;
+*)
+ lt_save_MAGIC_CMD="$MAGIC_CMD"
+ lt_save_ifs="$IFS"; IFS=$PATH_SEPARATOR
+dnl $ac_dummy forces splitting on constant user-supplied paths.
+dnl POSIX.2 word splitting is done only on the output of word expansions,
+dnl not every word. This closes a longstanding sh security hole.
+ ac_dummy="m4_if([$2], , $PATH, [$2])"
+ for ac_dir in $ac_dummy; do
+ IFS="$lt_save_ifs"
+ test -z "$ac_dir" && ac_dir=.
+ if test -f $ac_dir/$1; then
+ lt_cv_path_MAGIC_CMD="$ac_dir/$1"
+ if test -n "$file_magic_test_file"; then
+ case $deplibs_check_method in
+ "file_magic "*)
+ file_magic_regex=`expr "$deplibs_check_method" : "file_magic \(.*\)"`
+ MAGIC_CMD="$lt_cv_path_MAGIC_CMD"
+ if eval $file_magic_cmd \$file_magic_test_file 2> /dev/null |
+ $EGREP "$file_magic_regex" > /dev/null; then
+ :
+ else
+ cat <<_LT_EOF 1>&2
+
+*** Warning: the command libtool uses to detect shared libraries,
+*** $file_magic_cmd, produces output that libtool cannot recognize.
+*** The result is that libtool may fail to recognize shared libraries
+*** as such. This will affect the creation of libtool libraries that
+*** depend on shared libraries, but programs linked with such libtool
+*** libraries will work regardless of this problem. Nevertheless, you
+*** may want to report the problem to your system manager and/or to
+*** bug-libtool@gnu.org
+
+_LT_EOF
+ fi ;;
+ esac
+ fi
+ break
+ fi
+ done
+ IFS="$lt_save_ifs"
+ MAGIC_CMD="$lt_save_MAGIC_CMD"
+ ;;
+esac])
+MAGIC_CMD="$lt_cv_path_MAGIC_CMD"
+if test -n "$MAGIC_CMD"; then
+ AC_MSG_RESULT($MAGIC_CMD)
+else
+ AC_MSG_RESULT(no)
+fi
+_LT_DECL([], [MAGIC_CMD], [0],
+ [Used to examine libraries when file_magic_cmd begins with "file"])dnl
+])# _LT_PATH_TOOL_PREFIX
+
+# Old name:
+AU_ALIAS([AC_PATH_TOOL_PREFIX], [_LT_PATH_TOOL_PREFIX])
+dnl aclocal-1.4 backwards compatibility:
+dnl AC_DEFUN([AC_PATH_TOOL_PREFIX], [])
+
+
+# _LT_PATH_MAGIC
+# --------------
+# find a file program which can recognize a shared library
+m4_defun([_LT_PATH_MAGIC],
+[_LT_PATH_TOOL_PREFIX(${ac_tool_prefix}file, /usr/bin$PATH_SEPARATOR$PATH)
+if test -z "$lt_cv_path_MAGIC_CMD"; then
+ if test -n "$ac_tool_prefix"; then
+ _LT_PATH_TOOL_PREFIX(file, /usr/bin$PATH_SEPARATOR$PATH)
+ else
+ MAGIC_CMD=:
+ fi
+fi
+])# _LT_PATH_MAGIC
+
+
+# LT_PATH_LD
+# ----------
+# find the pathname to the GNU or non-GNU linker
+AC_DEFUN([LT_PATH_LD],
+[AC_REQUIRE([AC_PROG_CC])dnl
+AC_REQUIRE([AC_CANONICAL_HOST])dnl
+AC_REQUIRE([AC_CANONICAL_BUILD])dnl
+m4_require([_LT_DECL_SED])dnl
+m4_require([_LT_DECL_EGREP])dnl
+m4_require([_LT_PROG_ECHO_BACKSLASH])dnl
+
+AC_ARG_WITH([gnu-ld],
+ [AS_HELP_STRING([--with-gnu-ld],
+ [assume the C compiler uses GNU ld @<:@default=no@:>@])],
+ [test "$withval" = no || with_gnu_ld=yes],
+ [with_gnu_ld=no])dnl
+
+ac_prog=ld
+if test "$GCC" = yes; then
+ # Check if gcc -print-prog-name=ld gives a path.
+ AC_MSG_CHECKING([for ld used by $CC])
+ case $host in
+ *-*-mingw*)
+ # gcc leaves a trailing carriage return which upsets mingw
+ ac_prog=`($CC -print-prog-name=ld) 2>&5 | tr -d '\015'` ;;
+ *)
+ ac_prog=`($CC -print-prog-name=ld) 2>&5` ;;
+ esac
+ case $ac_prog in
+ # Accept absolute paths.
+ [[\\/]]* | ?:[[\\/]]*)
+ re_direlt='/[[^/]][[^/]]*/\.\./'
+ # Canonicalize the pathname of ld
+ ac_prog=`$ECHO "$ac_prog"| $SED 's%\\\\%/%g'`
+ while $ECHO "$ac_prog" | $GREP "$re_direlt" > /dev/null 2>&1; do
+ ac_prog=`$ECHO $ac_prog| $SED "s%$re_direlt%/%"`
+ done
+ test -z "$LD" && LD="$ac_prog"
+ ;;
+ "")
+ # If it fails, then pretend we aren't using GCC.
+ ac_prog=ld
+ ;;
+ *)
+ # If it is relative, then search for the first ld in PATH.
+ with_gnu_ld=unknown
+ ;;
+ esac
+elif test "$with_gnu_ld" = yes; then
+ AC_MSG_CHECKING([for GNU ld])
+else
+ AC_MSG_CHECKING([for non-GNU ld])
+fi
+AC_CACHE_VAL(lt_cv_path_LD,
+[if test -z "$LD"; then
+ lt_save_ifs="$IFS"; IFS=$PATH_SEPARATOR
+ for ac_dir in $PATH; do
+ IFS="$lt_save_ifs"
+ test -z "$ac_dir" && ac_dir=.
+ if test -f "$ac_dir/$ac_prog" || test -f "$ac_dir/$ac_prog$ac_exeext"; then
+ lt_cv_path_LD="$ac_dir/$ac_prog"
+ # Check to see if the program is GNU ld. I'd rather use --version,
+ # but apparently some variants of GNU ld only accept -v.
+ # Break only if it was the GNU/non-GNU ld that we prefer.
+ case `"$lt_cv_path_LD" -v 2>&1 </dev/null` in
+ *GNU* | *'with BFD'*)
+ test "$with_gnu_ld" != no && break
+ ;;
+ *)
+ test "$with_gnu_ld" != yes && break
+ ;;
+ esac
+ fi
+ done
+ IFS="$lt_save_ifs"
+else
+ lt_cv_path_LD="$LD" # Let the user override the test with a path.
+fi])
+LD="$lt_cv_path_LD"
+if test -n "$LD"; then
+ AC_MSG_RESULT($LD)
+else
+ AC_MSG_RESULT(no)
+fi
+test -z "$LD" && AC_MSG_ERROR([no acceptable ld found in \$PATH])
+_LT_PATH_LD_GNU
+AC_SUBST([LD])
+
+_LT_TAGDECL([], [LD], [1], [The linker used to build libraries])
+])# LT_PATH_LD
+
+# Old names:
+AU_ALIAS([AM_PROG_LD], [LT_PATH_LD])
+AU_ALIAS([AC_PROG_LD], [LT_PATH_LD])
+dnl aclocal-1.4 backwards compatibility:
+dnl AC_DEFUN([AM_PROG_LD], [])
+dnl AC_DEFUN([AC_PROG_LD], [])
+
+
+# _LT_PATH_LD_GNU
+#- --------------
+m4_defun([_LT_PATH_LD_GNU],
+[AC_CACHE_CHECK([if the linker ($LD) is GNU ld], lt_cv_prog_gnu_ld,
+[# I'd rather use --version here, but apparently some GNU lds only accept -v.
+case `$LD -v 2>&1 </dev/null` in
+*GNU* | *'with BFD'*)
+ lt_cv_prog_gnu_ld=yes
+ ;;
+*)
+ lt_cv_prog_gnu_ld=no
+ ;;
+esac])
+with_gnu_ld=$lt_cv_prog_gnu_ld
+])# _LT_PATH_LD_GNU
+
+
+# _LT_CMD_RELOAD
+# --------------
+# find reload flag for linker
+# -- PORTME Some linkers may need a different reload flag.
+m4_defun([_LT_CMD_RELOAD],
+[AC_CACHE_CHECK([for $LD option to reload object files],
+ lt_cv_ld_reload_flag,
+ [lt_cv_ld_reload_flag='-r'])
+reload_flag=$lt_cv_ld_reload_flag
+case $reload_flag in
+"" | " "*) ;;
+*) reload_flag=" $reload_flag" ;;
+esac
+reload_cmds='$LD$reload_flag -o $output$reload_objs'
+case $host_os in
+ cygwin* | mingw* | pw32* | cegcc*)
+ if test "$GCC" != yes; then
+ reload_cmds=false
+ fi
+ ;;
+ darwin*)
+ if test "$GCC" = yes; then
+ reload_cmds='$LTCC $LTCFLAGS -nostdlib ${wl}-r -o $output$reload_objs'
+ else
+ reload_cmds='$LD$reload_flag -o $output$reload_objs'
+ fi
+ ;;
+esac
+_LT_TAGDECL([], [reload_flag], [1], [How to create reloadable object files])dnl
+_LT_TAGDECL([], [reload_cmds], [2])dnl
+])# _LT_CMD_RELOAD
+
+
+# _LT_CHECK_MAGIC_METHOD
+# ----------------------
+# how to check for library dependencies
+# -- PORTME fill in with the dynamic library characteristics
+m4_defun([_LT_CHECK_MAGIC_METHOD],
+[m4_require([_LT_DECL_EGREP])
+m4_require([_LT_DECL_OBJDUMP])
+AC_CACHE_CHECK([how to recognize dependent libraries],
+lt_cv_deplibs_check_method,
+[lt_cv_file_magic_cmd='$MAGIC_CMD'
+lt_cv_file_magic_test_file=
+lt_cv_deplibs_check_method='unknown'
+# Need to set the preceding variable on all platforms that support
+# interlibrary dependencies.
+# 'none' -- dependencies not supported.
+# `unknown' -- same as none, but documents that we really don't know.
+# 'pass_all' -- all dependencies passed with no checks.
+# 'test_compile' -- check by making test program.
+# 'file_magic [[regex]]' -- check by looking for files in library path
+# which responds to the $file_magic_cmd with a given extended regex.
+# If you have `file' or equivalent on your system and you're not sure
+# whether `pass_all' will *always* work, you probably want this one.
+
+case $host_os in
+aix[[4-9]]*)
+ lt_cv_deplibs_check_method=pass_all
+ ;;
+
+beos*)
+ lt_cv_deplibs_check_method=pass_all
+ ;;
+
+bsdi[[45]]*)
+ lt_cv_deplibs_check_method='file_magic ELF [[0-9]][[0-9]]*-bit [[ML]]SB (shared object|dynamic lib)'
+ lt_cv_file_magic_cmd='/usr/bin/file -L'
+ lt_cv_file_magic_test_file=/shlib/libc.so
+ ;;
+
+cygwin*)
+ # func_win32_libid is a shell function defined in ltmain.sh
+ lt_cv_deplibs_check_method='file_magic ^x86 archive import|^x86 DLL'
+ lt_cv_file_magic_cmd='func_win32_libid'
+ ;;
+
+mingw* | pw32*)
+ # Base MSYS/MinGW do not provide the 'file' command needed by
+ # func_win32_libid shell function, so use a weaker test based on 'objdump',
+ # unless we find 'file', for example because we are cross-compiling.
+ # func_win32_libid assumes BSD nm, so disallow it if using MS dumpbin.
+ if ( test "$lt_cv_nm_interface" = "BSD nm" && file / ) >/dev/null 2>&1; then
+ lt_cv_deplibs_check_method='file_magic ^x86 archive import|^x86 DLL'
+ lt_cv_file_magic_cmd='func_win32_libid'
+ else
+ # Keep this pattern in sync with the one in func_win32_libid.
+ lt_cv_deplibs_check_method='file_magic file format (pei*-i386(.*architecture: i386)?|pe-arm-wince|pe-x86-64)'
+ lt_cv_file_magic_cmd='$OBJDUMP -f'
+ fi
+ ;;
+
+cegcc*)
+ # use the weaker test based on 'objdump'. See mingw*.
+ lt_cv_deplibs_check_method='file_magic file format pe-arm-.*little(.*architecture: arm)?'
+ lt_cv_file_magic_cmd='$OBJDUMP -f'
+ ;;
+
+darwin* | rhapsody*)
+ lt_cv_deplibs_check_method=pass_all
+ ;;
+
+freebsd* | dragonfly*)
+ if echo __ELF__ | $CC -E - | $GREP __ELF__ > /dev/null; then
+ case $host_cpu in
+ i*86 )
+ # Not sure whether the presence of OpenBSD here was a mistake.
+ # Let's accept both of them until this is cleared up.
+ lt_cv_deplibs_check_method='file_magic (FreeBSD|OpenBSD|DragonFly)/i[[3-9]]86 (compact )?demand paged shared library'
+ lt_cv_file_magic_cmd=/usr/bin/file
+ lt_cv_file_magic_test_file=`echo /usr/lib/libc.so.*`
+ ;;
+ esac
+ else
+ lt_cv_deplibs_check_method=pass_all
+ fi
+ ;;
+
+gnu*)
+ lt_cv_deplibs_check_method=pass_all
+ ;;
+
+haiku*)
+ lt_cv_deplibs_check_method=pass_all
+ ;;
+
+hpux10.20* | hpux11*)
+ lt_cv_file_magic_cmd=/usr/bin/file
+ case $host_cpu in
+ ia64*)
+ lt_cv_deplibs_check_method='file_magic (s[[0-9]][[0-9]][[0-9]]|ELF-[[0-9]][[0-9]]) shared object file - IA64'
+ lt_cv_file_magic_test_file=/usr/lib/hpux32/libc.so
+ ;;
+ hppa*64*)
+ [lt_cv_deplibs_check_method='file_magic (s[0-9][0-9][0-9]|ELF[ -][0-9][0-9])(-bit)?( [LM]SB)? shared object( file)?[, -]* PA-RISC [0-9]\.[0-9]']
+ lt_cv_file_magic_test_file=/usr/lib/pa20_64/libc.sl
+ ;;
+ *)
+ lt_cv_deplibs_check_method='file_magic (s[[0-9]][[0-9]][[0-9]]|PA-RISC[[0-9]]\.[[0-9]]) shared library'
+ lt_cv_file_magic_test_file=/usr/lib/libc.sl
+ ;;
+ esac
+ ;;
+
+interix[[3-9]]*)
+ # PIC code is broken on Interix 3.x, that's why |\.a not |_pic\.a here
+ lt_cv_deplibs_check_method='match_pattern /lib[[^/]]+(\.so|\.a)$'
+ ;;
+
+irix5* | irix6* | nonstopux*)
+ case $LD in
+ *-32|*"-32 ") libmagic=32-bit;;
+ *-n32|*"-n32 ") libmagic=N32;;
+ *-64|*"-64 ") libmagic=64-bit;;
+ *) libmagic=never-match;;
+ esac
+ lt_cv_deplibs_check_method=pass_all
+ ;;
+
+# This must be Linux ELF.
+linux* | k*bsd*-gnu | kopensolaris*-gnu)
+ lt_cv_deplibs_check_method=pass_all
+ ;;
+
+netbsd*)
+ if echo __ELF__ | $CC -E - | $GREP __ELF__ > /dev/null; then
+ lt_cv_deplibs_check_method='match_pattern /lib[[^/]]+(\.so\.[[0-9]]+\.[[0-9]]+|_pic\.a)$'
+ else
+ lt_cv_deplibs_check_method='match_pattern /lib[[^/]]+(\.so|_pic\.a)$'
+ fi
+ ;;
+
+newos6*)
+ lt_cv_deplibs_check_method='file_magic ELF [[0-9]][[0-9]]*-bit [[ML]]SB (executable|dynamic lib)'
+ lt_cv_file_magic_cmd=/usr/bin/file
+ lt_cv_file_magic_test_file=/usr/lib/libnls.so
+ ;;
+
+*nto* | *qnx*)
+ lt_cv_deplibs_check_method=pass_all
+ ;;
+
+openbsd*)
+ if test -z "`echo __ELF__ | $CC -E - | $GREP __ELF__`" || test "$host_os-$host_cpu" = "openbsd2.8-powerpc"; then
+ lt_cv_deplibs_check_method='match_pattern /lib[[^/]]+(\.so\.[[0-9]]+\.[[0-9]]+|\.so|_pic\.a)$'
+ else
+ lt_cv_deplibs_check_method='match_pattern /lib[[^/]]+(\.so\.[[0-9]]+\.[[0-9]]+|_pic\.a)$'
+ fi
+ ;;
+
+osf3* | osf4* | osf5*)
+ lt_cv_deplibs_check_method=pass_all
+ ;;
+
+rdos*)
+ lt_cv_deplibs_check_method=pass_all
+ ;;
+
+solaris*)
+ lt_cv_deplibs_check_method=pass_all
+ ;;
+
+sysv5* | sco3.2v5* | sco5v6* | unixware* | OpenUNIX* | sysv4*uw2*)
+ lt_cv_deplibs_check_method=pass_all
+ ;;
+
+sysv4 | sysv4.3*)
+ case $host_vendor in
+ motorola)
+ lt_cv_deplibs_check_method='file_magic ELF [[0-9]][[0-9]]*-bit [[ML]]SB (shared object|dynamic lib) M[[0-9]][[0-9]]* Version [[0-9]]'
+ lt_cv_file_magic_test_file=`echo /usr/lib/libc.so*`
+ ;;
+ ncr)
+ lt_cv_deplibs_check_method=pass_all
+ ;;
+ sequent)
+ lt_cv_file_magic_cmd='/bin/file'
+ lt_cv_deplibs_check_method='file_magic ELF [[0-9]][[0-9]]*-bit [[LM]]SB (shared object|dynamic lib )'
+ ;;
+ sni)
+ lt_cv_file_magic_cmd='/bin/file'
+ lt_cv_deplibs_check_method="file_magic ELF [[0-9]][[0-9]]*-bit [[LM]]SB dynamic lib"
+ lt_cv_file_magic_test_file=/lib/libc.so
+ ;;
+ siemens)
+ lt_cv_deplibs_check_method=pass_all
+ ;;
+ pc)
+ lt_cv_deplibs_check_method=pass_all
+ ;;
+ esac
+ ;;
+
+tpf*)
+ lt_cv_deplibs_check_method=pass_all
+ ;;
+esac
+])
+
+file_magic_glob=
+want_nocaseglob=no
+if test "$build" = "$host"; then
+ case $host_os in
+ mingw* | pw32*)
+ if ( shopt | grep nocaseglob ) >/dev/null 2>&1; then
+ want_nocaseglob=yes
+ else
+ file_magic_glob=`echo aAbBcCdDeEfFgGhHiIjJkKlLmMnNoOpPqQrRsStTuUvVwWxXyYzZ | $SED -e "s/\(..\)/s\/[[\1]]\/[[\1]]\/g;/g"`
+ fi
+ ;;
+ esac
+fi
+
+file_magic_cmd=$lt_cv_file_magic_cmd
+deplibs_check_method=$lt_cv_deplibs_check_method
+test -z "$deplibs_check_method" && deplibs_check_method=unknown
+
+_LT_DECL([], [deplibs_check_method], [1],
+ [Method to check whether dependent libraries are shared objects])
+_LT_DECL([], [file_magic_cmd], [1],
+ [Command to use when deplibs_check_method = "file_magic"])
+_LT_DECL([], [file_magic_glob], [1],
+ [How to find potential files when deplibs_check_method = "file_magic"])
+_LT_DECL([], [want_nocaseglob], [1],
+ [Find potential files using nocaseglob when deplibs_check_method = "file_magic"])
+])# _LT_CHECK_MAGIC_METHOD
+
+
+# LT_PATH_NM
+# ----------
+# find the pathname to a BSD- or MS-compatible name lister
+AC_DEFUN([LT_PATH_NM],
+[AC_REQUIRE([AC_PROG_CC])dnl
+AC_CACHE_CHECK([for BSD- or MS-compatible name lister (nm)], lt_cv_path_NM,
+[if test -n "$NM"; then
+ # Let the user override the test.
+ lt_cv_path_NM="$NM"
+else
+ lt_nm_to_check="${ac_tool_prefix}nm"
+ if test -n "$ac_tool_prefix" && test "$build" = "$host"; then
+ lt_nm_to_check="$lt_nm_to_check nm"
+ fi
+ for lt_tmp_nm in $lt_nm_to_check; do
+ lt_save_ifs="$IFS"; IFS=$PATH_SEPARATOR
+ for ac_dir in $PATH /usr/ccs/bin/elf /usr/ccs/bin /usr/ucb /bin; do
+ IFS="$lt_save_ifs"
+ test -z "$ac_dir" && ac_dir=.
+ tmp_nm="$ac_dir/$lt_tmp_nm"
+ if test -f "$tmp_nm" || test -f "$tmp_nm$ac_exeext" ; then
+ # Check to see if the nm accepts a BSD-compat flag.
+ # Adding the `sed 1q' prevents false positives on HP-UX, which says:
+ # nm: unknown option "B" ignored
+ # Tru64's nm complains that /dev/null is an invalid object file
+ case `"$tmp_nm" -B /dev/null 2>&1 | sed '1q'` in
+ */dev/null* | *'Invalid file or object type'*)
+ lt_cv_path_NM="$tmp_nm -B"
+ break
+ ;;
+ *)
+ case `"$tmp_nm" -p /dev/null 2>&1 | sed '1q'` in
+ */dev/null*)
+ lt_cv_path_NM="$tmp_nm -p"
+ break
+ ;;
+ *)
+ lt_cv_path_NM=${lt_cv_path_NM="$tmp_nm"} # keep the first match, but
+ continue # so that we can try to find one that supports BSD flags
+ ;;
+ esac
+ ;;
+ esac
+ fi
+ done
+ IFS="$lt_save_ifs"
+ done
+ : ${lt_cv_path_NM=no}
+fi])
+if test "$lt_cv_path_NM" != "no"; then
+ NM="$lt_cv_path_NM"
+else
+ # Didn't find any BSD compatible name lister, look for dumpbin.
+ if test -n "$DUMPBIN"; then :
+ # Let the user override the test.
+ else
+ AC_CHECK_TOOLS(DUMPBIN, [dumpbin "link -dump"], :)
+ case `$DUMPBIN -symbols /dev/null 2>&1 | sed '1q'` in
+ *COFF*)
+ DUMPBIN="$DUMPBIN -symbols"
+ ;;
+ *)
+ DUMPBIN=:
+ ;;
+ esac
+ fi
+ AC_SUBST([DUMPBIN])
+ if test "$DUMPBIN" != ":"; then
+ NM="$DUMPBIN"
+ fi
+fi
+test -z "$NM" && NM=nm
+AC_SUBST([NM])
+_LT_DECL([], [NM], [1], [A BSD- or MS-compatible name lister])dnl
+
+AC_CACHE_CHECK([the name lister ($NM) interface], [lt_cv_nm_interface],
+ [lt_cv_nm_interface="BSD nm"
+ echo "int some_variable = 0;" > conftest.$ac_ext
+ (eval echo "\"\$as_me:$LINENO: $ac_compile\"" >&AS_MESSAGE_LOG_FD)
+ (eval "$ac_compile" 2>conftest.err)
+ cat conftest.err >&AS_MESSAGE_LOG_FD
+ (eval echo "\"\$as_me:$LINENO: $NM \\\"conftest.$ac_objext\\\"\"" >&AS_MESSAGE_LOG_FD)
+ (eval "$NM \"conftest.$ac_objext\"" 2>conftest.err > conftest.out)
+ cat conftest.err >&AS_MESSAGE_LOG_FD
+ (eval echo "\"\$as_me:$LINENO: output\"" >&AS_MESSAGE_LOG_FD)
+ cat conftest.out >&AS_MESSAGE_LOG_FD
+ if $GREP 'External.*some_variable' conftest.out > /dev/null; then
+ lt_cv_nm_interface="MS dumpbin"
+ fi
+ rm -f conftest*])
+])# LT_PATH_NM
+
+# Old names:
+AU_ALIAS([AM_PROG_NM], [LT_PATH_NM])
+AU_ALIAS([AC_PROG_NM], [LT_PATH_NM])
+dnl aclocal-1.4 backwards compatibility:
+dnl AC_DEFUN([AM_PROG_NM], [])
+dnl AC_DEFUN([AC_PROG_NM], [])
+
+# _LT_CHECK_SHAREDLIB_FROM_LINKLIB
+# --------------------------------
+# how to determine the name of the shared library
+# associated with a specific link library.
+# -- PORTME fill in with the dynamic library characteristics
+m4_defun([_LT_CHECK_SHAREDLIB_FROM_LINKLIB],
+[m4_require([_LT_DECL_EGREP])
+m4_require([_LT_DECL_OBJDUMP])
+m4_require([_LT_DECL_DLLTOOL])
+AC_CACHE_CHECK([how to associate runtime and link libraries],
+lt_cv_sharedlib_from_linklib_cmd,
+[lt_cv_sharedlib_from_linklib_cmd='unknown'
+
+case $host_os in
+cygwin* | mingw* | pw32* | cegcc*)
+ # two different shell functions defined in ltmain.sh
+ # decide which to use based on capabilities of $DLLTOOL
+ case `$DLLTOOL --help 2>&1` in
+ *--identify-strict*)
+ lt_cv_sharedlib_from_linklib_cmd=func_cygming_dll_for_implib
+ ;;
+ *)
+ lt_cv_sharedlib_from_linklib_cmd=func_cygming_dll_for_implib_fallback
+ ;;
+ esac
+ ;;
+*)
+ # fallback: assume linklib IS sharedlib
+ lt_cv_sharedlib_from_linklib_cmd="$ECHO"
+ ;;
+esac
+])
+sharedlib_from_linklib_cmd=$lt_cv_sharedlib_from_linklib_cmd
+test -z "$sharedlib_from_linklib_cmd" && sharedlib_from_linklib_cmd=$ECHO
+
+_LT_DECL([], [sharedlib_from_linklib_cmd], [1],
+ [Command to associate shared and link libraries])
+])# _LT_CHECK_SHAREDLIB_FROM_LINKLIB
+
+
+# _LT_PATH_MANIFEST_TOOL
+# ----------------------
+# locate the manifest tool
+m4_defun([_LT_PATH_MANIFEST_TOOL],
+[AC_CHECK_TOOL(MANIFEST_TOOL, mt, :)
+test -z "$MANIFEST_TOOL" && MANIFEST_TOOL=mt
+AC_CACHE_CHECK([if $MANIFEST_TOOL is a manifest tool], [lt_cv_path_mainfest_tool],
+ [lt_cv_path_mainfest_tool=no
+ echo "$as_me:$LINENO: $MANIFEST_TOOL '-?'" >&AS_MESSAGE_LOG_FD
+ $MANIFEST_TOOL '-?' 2>conftest.err > conftest.out
+ cat conftest.err >&AS_MESSAGE_LOG_FD
+ if $GREP 'Manifest Tool' conftest.out > /dev/null; then
+ lt_cv_path_mainfest_tool=yes
+ fi
+ rm -f conftest*])
+if test "x$lt_cv_path_mainfest_tool" != xyes; then
+ MANIFEST_TOOL=:
+fi
+_LT_DECL([], [MANIFEST_TOOL], [1], [Manifest tool])dnl
+])# _LT_PATH_MANIFEST_TOOL
+
+
+# LT_LIB_M
+# --------
+# check for math library
+AC_DEFUN([LT_LIB_M],
+[AC_REQUIRE([AC_CANONICAL_HOST])dnl
+LIBM=
+case $host in
+*-*-beos* | *-*-cegcc* | *-*-cygwin* | *-*-haiku* | *-*-pw32* | *-*-darwin*)
+ # These system don't have libm, or don't need it
+ ;;
+*-ncr-sysv4.3*)
+ AC_CHECK_LIB(mw, _mwvalidcheckl, LIBM="-lmw")
+ AC_CHECK_LIB(m, cos, LIBM="$LIBM -lm")
+ ;;
+*)
+ AC_CHECK_LIB(m, cos, LIBM="-lm")
+ ;;
+esac
+AC_SUBST([LIBM])
+])# LT_LIB_M
+
+# Old name:
+AU_ALIAS([AC_CHECK_LIBM], [LT_LIB_M])
+dnl aclocal-1.4 backwards compatibility:
+dnl AC_DEFUN([AC_CHECK_LIBM], [])
+
+
+# _LT_COMPILER_NO_RTTI([TAGNAME])
+# -------------------------------
+m4_defun([_LT_COMPILER_NO_RTTI],
+[m4_require([_LT_TAG_COMPILER])dnl
+
+_LT_TAGVAR(lt_prog_compiler_no_builtin_flag, $1)=
+
+if test "$GCC" = yes; then
+ case $cc_basename in
+ nvcc*)
+ _LT_TAGVAR(lt_prog_compiler_no_builtin_flag, $1)=' -Xcompiler -fno-builtin' ;;
+ *)
+ _LT_TAGVAR(lt_prog_compiler_no_builtin_flag, $1)=' -fno-builtin' ;;
+ esac
+
+ _LT_COMPILER_OPTION([if $compiler supports -fno-rtti -fno-exceptions],
+ lt_cv_prog_compiler_rtti_exceptions,
+ [-fno-rtti -fno-exceptions], [],
+ [_LT_TAGVAR(lt_prog_compiler_no_builtin_flag, $1)="$_LT_TAGVAR(lt_prog_compiler_no_builtin_flag, $1) -fno-rtti -fno-exceptions"])
+fi
+_LT_TAGDECL([no_builtin_flag], [lt_prog_compiler_no_builtin_flag], [1],
+ [Compiler flag to turn off builtin functions])
+])# _LT_COMPILER_NO_RTTI
+
+
+# _LT_CMD_GLOBAL_SYMBOLS
+# ----------------------
+m4_defun([_LT_CMD_GLOBAL_SYMBOLS],
+[AC_REQUIRE([AC_CANONICAL_HOST])dnl
+AC_REQUIRE([AC_PROG_CC])dnl
+AC_REQUIRE([AC_PROG_AWK])dnl
+AC_REQUIRE([LT_PATH_NM])dnl
+AC_REQUIRE([LT_PATH_LD])dnl
+m4_require([_LT_DECL_SED])dnl
+m4_require([_LT_DECL_EGREP])dnl
+m4_require([_LT_TAG_COMPILER])dnl
+
+# Check for command to grab the raw symbol name followed by C symbol from nm.
+AC_MSG_CHECKING([command to parse $NM output from $compiler object])
+AC_CACHE_VAL([lt_cv_sys_global_symbol_pipe],
+[
+# These are sane defaults that work on at least a few old systems.
+# [They come from Ultrix. What could be older than Ultrix?!! ;)]
+
+# Character class describing NM global symbol codes.
+symcode='[[BCDEGRST]]'
+
+# Regexp to match symbols that can be accessed directly from C.
+sympat='\([[_A-Za-z]][[_A-Za-z0-9]]*\)'
+
+# Define system-specific variables.
+case $host_os in
+aix*)
+ symcode='[[BCDT]]'
+ ;;
+cygwin* | mingw* | pw32* | cegcc*)
+ symcode='[[ABCDGISTW]]'
+ ;;
+hpux*)
+ if test "$host_cpu" = ia64; then
+ symcode='[[ABCDEGRST]]'
+ fi
+ ;;
+irix* | nonstopux*)
+ symcode='[[BCDEGRST]]'
+ ;;
+osf*)
+ symcode='[[BCDEGQRST]]'
+ ;;
+solaris*)
+ symcode='[[BDRT]]'
+ ;;
+sco3.2v5*)
+ symcode='[[DT]]'
+ ;;
+sysv4.2uw2*)
+ symcode='[[DT]]'
+ ;;
+sysv5* | sco5v6* | unixware* | OpenUNIX*)
+ symcode='[[ABDT]]'
+ ;;
+sysv4)
+ symcode='[[DFNSTU]]'
+ ;;
+esac
+
+# If we're using GNU nm, then use its standard symbol codes.
+case `$NM -V 2>&1` in
+*GNU* | *'with BFD'*)
+ symcode='[[ABCDGIRSTW]]' ;;
+esac
+
+# Transform an extracted symbol line into a proper C declaration.
+# Some systems (esp. on ia64) link data and code symbols differently,
+# so use this general approach.
+lt_cv_sys_global_symbol_to_cdecl="sed -n -e 's/^T .* \(.*\)$/extern int \1();/p' -e 's/^$symcode* .* \(.*\)$/extern char \1;/p'"
+
+# Transform an extracted symbol line into symbol name and symbol address
+lt_cv_sys_global_symbol_to_c_name_address="sed -n -e 's/^: \([[^ ]]*\)[[ ]]*$/ {\\\"\1\\\", (void *) 0},/p' -e 's/^$symcode* \([[^ ]]*\) \([[^ ]]*\)$/ {\"\2\", (void *) \&\2},/p'"
+lt_cv_sys_global_symbol_to_c_name_address_lib_prefix="sed -n -e 's/^: \([[^ ]]*\)[[ ]]*$/ {\\\"\1\\\", (void *) 0},/p' -e 's/^$symcode* \([[^ ]]*\) \(lib[[^ ]]*\)$/ {\"\2\", (void *) \&\2},/p' -e 's/^$symcode* \([[^ ]]*\) \([[^ ]]*\)$/ {\"lib\2\", (void *) \&\2},/p'"
+
+# Handle CRLF in mingw tool chain
+opt_cr=
+case $build_os in
+mingw*)
+ opt_cr=`$ECHO 'x\{0,1\}' | tr x '\015'` # option cr in regexp
+ ;;
+esac
+
+# Try without a prefix underscore, then with it.
+for ac_symprfx in "" "_"; do
+
+ # Transform symcode, sympat, and symprfx into a raw symbol and a C symbol.
+ symxfrm="\\1 $ac_symprfx\\2 \\2"
+
+ # Write the raw and C identifiers.
+ if test "$lt_cv_nm_interface" = "MS dumpbin"; then
+ # Fake it for dumpbin and say T for any non-static function
+ # and D for any global variable.
+ # Also find C++ and __fastcall symbols from MSVC++,
+ # which start with @ or ?.
+ lt_cv_sys_global_symbol_pipe="$AWK ['"\
+" {last_section=section; section=\$ 3};"\
+" /Section length .*#relocs.*(pick any)/{hide[last_section]=1};"\
+" \$ 0!~/External *\|/{next};"\
+" / 0+ UNDEF /{next}; / UNDEF \([^|]\)*()/{next};"\
+" {if(hide[section]) next};"\
+" {f=0}; \$ 0~/\(\).*\|/{f=1}; {printf f ? \"T \" : \"D \"};"\
+" {split(\$ 0, a, /\||\r/); split(a[2], s)};"\
+" s[1]~/^[@?]/{print s[1], s[1]; next};"\
+" s[1]~prfx {split(s[1],t,\"@\"); print t[1], substr(t[1],length(prfx))}"\
+" ' prfx=^$ac_symprfx]"
+ else
+ lt_cv_sys_global_symbol_pipe="sed -n -e 's/^.*[[ ]]\($symcode$symcode*\)[[ ]][[ ]]*$ac_symprfx$sympat$opt_cr$/$symxfrm/p'"
+ fi
+ lt_cv_sys_global_symbol_pipe="$lt_cv_sys_global_symbol_pipe | sed '/ __gnu_lto/d'"
+
+ # Check to see that the pipe works correctly.
+ pipe_works=no
+
+ rm -f conftest*
+ cat > conftest.$ac_ext <<_LT_EOF
+#ifdef __cplusplus
+extern "C" {
+#endif
+char nm_test_var;
+void nm_test_func(void);
+void nm_test_func(void){}
+#ifdef __cplusplus
+}
+#endif
+int main(){nm_test_var='a';nm_test_func();return(0);}
+_LT_EOF
+
+ if AC_TRY_EVAL(ac_compile); then
+ # Now try to grab the symbols.
+ nlist=conftest.nm
+ if AC_TRY_EVAL(NM conftest.$ac_objext \| "$lt_cv_sys_global_symbol_pipe" \> $nlist) && test -s "$nlist"; then
+ # Try sorting and uniquifying the output.
+ if sort "$nlist" | uniq > "$nlist"T; then
+ mv -f "$nlist"T "$nlist"
+ else
+ rm -f "$nlist"T
+ fi
+
+ # Make sure that we snagged all the symbols we need.
+ if $GREP ' nm_test_var$' "$nlist" >/dev/null; then
+ if $GREP ' nm_test_func$' "$nlist" >/dev/null; then
+ cat <<_LT_EOF > conftest.$ac_ext
+/* Keep this code in sync between libtool.m4, ltmain, lt_system.h, and tests. */
+#if defined(_WIN32) || defined(__CYGWIN__) || defined(_WIN32_WCE)
+/* DATA imports from DLLs on WIN32 con't be const, because runtime
+ relocations are performed -- see ld's documentation on pseudo-relocs. */
+# define LT@&t@_DLSYM_CONST
+#elif defined(__osf__)
+/* This system does not cope well with relocations in const data. */
+# define LT@&t@_DLSYM_CONST
+#else
+# define LT@&t@_DLSYM_CONST const
+#endif
+
+#ifdef __cplusplus
+extern "C" {
+#endif
+
+_LT_EOF
+ # Now generate the symbol file.
+ eval "$lt_cv_sys_global_symbol_to_cdecl"' < "$nlist" | $GREP -v main >> conftest.$ac_ext'
+
+ cat <<_LT_EOF >> conftest.$ac_ext
+
+/* The mapping between symbol names and symbols. */
+LT@&t@_DLSYM_CONST struct {
+ const char *name;
+ void *address;
+}
+lt__PROGRAM__LTX_preloaded_symbols[[]] =
+{
+ { "@PROGRAM@", (void *) 0 },
+_LT_EOF
+ $SED "s/^$symcode$symcode* \(.*\) \(.*\)$/ {\"\2\", (void *) \&\2},/" < "$nlist" | $GREP -v main >> conftest.$ac_ext
+ cat <<\_LT_EOF >> conftest.$ac_ext
+ {0, (void *) 0}
+};
+
+/* This works around a problem in FreeBSD linker */
+#ifdef FREEBSD_WORKAROUND
+static const void *lt_preloaded_setup() {
+ return lt__PROGRAM__LTX_preloaded_symbols;
+}
+#endif
+
+#ifdef __cplusplus
+}
+#endif
+_LT_EOF
+ # Now try linking the two files.
+ mv conftest.$ac_objext conftstm.$ac_objext
+ lt_globsym_save_LIBS=$LIBS
+ lt_globsym_save_CFLAGS=$CFLAGS
+ LIBS="conftstm.$ac_objext"
+ CFLAGS="$CFLAGS$_LT_TAGVAR(lt_prog_compiler_no_builtin_flag, $1)"
+ if AC_TRY_EVAL(ac_link) && test -s conftest${ac_exeext}; then
+ pipe_works=yes
+ fi
+ LIBS=$lt_globsym_save_LIBS
+ CFLAGS=$lt_globsym_save_CFLAGS
+ else
+ echo "cannot find nm_test_func in $nlist" >&AS_MESSAGE_LOG_FD
+ fi
+ else
+ echo "cannot find nm_test_var in $nlist" >&AS_MESSAGE_LOG_FD
+ fi
+ else
+ echo "cannot run $lt_cv_sys_global_symbol_pipe" >&AS_MESSAGE_LOG_FD
+ fi
+ else
+ echo "$progname: failed program was:" >&AS_MESSAGE_LOG_FD
+ cat conftest.$ac_ext >&5
+ fi
+ rm -rf conftest* conftst*
+
+ # Do not use the global_symbol_pipe unless it works.
+ if test "$pipe_works" = yes; then
+ break
+ else
+ lt_cv_sys_global_symbol_pipe=
+ fi
+done
+])
+if test -z "$lt_cv_sys_global_symbol_pipe"; then
+ lt_cv_sys_global_symbol_to_cdecl=
+fi
+if test -z "$lt_cv_sys_global_symbol_pipe$lt_cv_sys_global_symbol_to_cdecl"; then
+ AC_MSG_RESULT(failed)
+else
+ AC_MSG_RESULT(ok)
+fi
+
+# Response file support.
+if test "$lt_cv_nm_interface" = "MS dumpbin"; then
+ nm_file_list_spec='@'
+elif $NM --help 2>/dev/null | grep '[[@]]FILE' >/dev/null; then
+ nm_file_list_spec='@'
+fi
+
+_LT_DECL([global_symbol_pipe], [lt_cv_sys_global_symbol_pipe], [1],
+ [Take the output of nm and produce a listing of raw symbols and C names])
+_LT_DECL([global_symbol_to_cdecl], [lt_cv_sys_global_symbol_to_cdecl], [1],
+ [Transform the output of nm in a proper C declaration])
+_LT_DECL([global_symbol_to_c_name_address],
+ [lt_cv_sys_global_symbol_to_c_name_address], [1],
+ [Transform the output of nm in a C name address pair])
+_LT_DECL([global_symbol_to_c_name_address_lib_prefix],
+ [lt_cv_sys_global_symbol_to_c_name_address_lib_prefix], [1],
+ [Transform the output of nm in a C name address pair when lib prefix is needed])
+_LT_DECL([], [nm_file_list_spec], [1],
+ [Specify filename containing input files for $NM])
+]) # _LT_CMD_GLOBAL_SYMBOLS
+
+
+# _LT_COMPILER_PIC([TAGNAME])
+# ---------------------------
+m4_defun([_LT_COMPILER_PIC],
+[m4_require([_LT_TAG_COMPILER])dnl
+_LT_TAGVAR(lt_prog_compiler_wl, $1)=
+_LT_TAGVAR(lt_prog_compiler_pic, $1)=
+_LT_TAGVAR(lt_prog_compiler_static, $1)=
+
+m4_if([$1], [CXX], [
+ # C++ specific cases for pic, static, wl, etc.
+ if test "$GXX" = yes; then
+ _LT_TAGVAR(lt_prog_compiler_wl, $1)='-Wl,'
+ _LT_TAGVAR(lt_prog_compiler_static, $1)='-static'
+
+ case $host_os in
+ aix*)
+ # All AIX code is PIC.
+ if test "$host_cpu" = ia64; then
+ # AIX 5 now supports IA64 processor
+ _LT_TAGVAR(lt_prog_compiler_static, $1)='-Bstatic'
+ fi
+ ;;
+
+ amigaos*)
+ case $host_cpu in
+ powerpc)
+ # see comment about AmigaOS4 .so support
+ _LT_TAGVAR(lt_prog_compiler_pic, $1)='-fPIC'
+ ;;
+ m68k)
+ # FIXME: we need at least 68020 code to build shared libraries, but
+ # adding the `-m68020' flag to GCC prevents building anything better,
+ # like `-m68040'.
+ _LT_TAGVAR(lt_prog_compiler_pic, $1)='-m68020 -resident32 -malways-restore-a4'
+ ;;
+ esac
+ ;;
+
+ beos* | irix5* | irix6* | nonstopux* | osf3* | osf4* | osf5*)
+ # PIC is the default for these OSes.
+ ;;
+ mingw* | cygwin* | os2* | pw32* | cegcc*)
+ # This hack is so that the source file can tell whether it is being
+ # built for inclusion in a dll (and should export symbols for example).
+ # Although the cygwin gcc ignores -fPIC, still need this for old-style
+ # (--disable-auto-import) libraries
+ m4_if([$1], [GCJ], [],
+ [_LT_TAGVAR(lt_prog_compiler_pic, $1)='-DDLL_EXPORT'])
+ ;;
+ darwin* | rhapsody*)
+ # PIC is the default on this platform
+ # Common symbols not allowed in MH_DYLIB files
+ _LT_TAGVAR(lt_prog_compiler_pic, $1)='-fno-common'
+ ;;
+ *djgpp*)
+ # DJGPP does not support shared libraries at all
+ _LT_TAGVAR(lt_prog_compiler_pic, $1)=
+ ;;
+ haiku*)
+ # PIC is the default for Haiku.
+ # The "-static" flag exists, but is broken.
+ _LT_TAGVAR(lt_prog_compiler_static, $1)=
+ ;;
+ interix[[3-9]]*)
+ # Interix 3.x gcc -fpic/-fPIC options generate broken code.
+ # Instead, we relocate shared libraries at runtime.
+ ;;
+ sysv4*MP*)
+ if test -d /usr/nec; then
+ _LT_TAGVAR(lt_prog_compiler_pic, $1)=-Kconform_pic
+ fi
+ ;;
+ hpux*)
+ # PIC is the default for 64-bit PA HP-UX, but not for 32-bit
+ # PA HP-UX. On IA64 HP-UX, PIC is the default but the pic flag
+ # sets the default TLS model and affects inlining.
+ case $host_cpu in
+ hppa*64*)
+ ;;
+ *)
+ _LT_TAGVAR(lt_prog_compiler_pic, $1)='-fPIC'
+ ;;
+ esac
+ ;;
+ *qnx* | *nto*)
+ # QNX uses GNU C++, but need to define -shared option too, otherwise
+ # it will coredump.
+ _LT_TAGVAR(lt_prog_compiler_pic, $1)='-fPIC -shared'
+ ;;
+ *)
+ _LT_TAGVAR(lt_prog_compiler_pic, $1)='-fPIC'
+ ;;
+ esac
+ else
+ case $host_os in
+ aix[[4-9]]*)
+ # All AIX code is PIC.
+ if test "$host_cpu" = ia64; then
+ # AIX 5 now supports IA64 processor
+ _LT_TAGVAR(lt_prog_compiler_static, $1)='-Bstatic'
+ else
+ _LT_TAGVAR(lt_prog_compiler_static, $1)='-bnso -bI:/lib/syscalls.exp'
+ fi
+ ;;
+ chorus*)
+ case $cc_basename in
+ cxch68*)
+ # Green Hills C++ Compiler
+ # _LT_TAGVAR(lt_prog_compiler_static, $1)="--no_auto_instantiation -u __main -u __premain -u _abort -r $COOL_DIR/lib/libOrb.a $MVME_DIR/lib/CC/libC.a $MVME_DIR/lib/classix/libcx.s.a"
+ ;;
+ esac
+ ;;
+ mingw* | cygwin* | os2* | pw32* | cegcc*)
+ # This hack is so that the source file can tell whether it is being
+ # built for inclusion in a dll (and should export symbols for example).
+ m4_if([$1], [GCJ], [],
+ [_LT_TAGVAR(lt_prog_compiler_pic, $1)='-DDLL_EXPORT'])
+ ;;
+ dgux*)
+ case $cc_basename in
+ ec++*)
+ _LT_TAGVAR(lt_prog_compiler_pic, $1)='-KPIC'
+ ;;
+ ghcx*)
+ # Green Hills C++ Compiler
+ _LT_TAGVAR(lt_prog_compiler_pic, $1)='-pic'
+ ;;
+ *)
+ ;;
+ esac
+ ;;
+ freebsd* | dragonfly*)
+ # FreeBSD uses GNU C++
+ ;;
+ hpux9* | hpux10* | hpux11*)
+ case $cc_basename in
+ CC*)
+ _LT_TAGVAR(lt_prog_compiler_wl, $1)='-Wl,'
+ _LT_TAGVAR(lt_prog_compiler_static, $1)='${wl}-a ${wl}archive'
+ if test "$host_cpu" != ia64; then
+ _LT_TAGVAR(lt_prog_compiler_pic, $1)='+Z'
+ fi
+ ;;
+ aCC*)
+ _LT_TAGVAR(lt_prog_compiler_wl, $1)='-Wl,'
+ _LT_TAGVAR(lt_prog_compiler_static, $1)='${wl}-a ${wl}archive'
+ case $host_cpu in
+ hppa*64*|ia64*)
+ # +Z the default
+ ;;
+ *)
+ _LT_TAGVAR(lt_prog_compiler_pic, $1)='+Z'
+ ;;
+ esac
+ ;;
+ *)
+ ;;
+ esac
+ ;;
+ interix*)
+ # This is c89, which is MS Visual C++ (no shared libs)
+ # Anyone wants to do a port?
+ ;;
+ irix5* | irix6* | nonstopux*)
+ case $cc_basename in
+ CC*)
+ _LT_TAGVAR(lt_prog_compiler_wl, $1)='-Wl,'
+ _LT_TAGVAR(lt_prog_compiler_static, $1)='-non_shared'
+ # CC pic flag -KPIC is the default.
+ ;;
+ *)
+ ;;
+ esac
+ ;;
+ linux* | k*bsd*-gnu | kopensolaris*-gnu)
+ case $cc_basename in
+ KCC*)
+ # KAI C++ Compiler
+ _LT_TAGVAR(lt_prog_compiler_wl, $1)='--backend -Wl,'
+ _LT_TAGVAR(lt_prog_compiler_pic, $1)='-fPIC'
+ ;;
+ ecpc* )
+ # old Intel C++ for x86_64 which still supported -KPIC.
+ _LT_TAGVAR(lt_prog_compiler_wl, $1)='-Wl,'
+ _LT_TAGVAR(lt_prog_compiler_pic, $1)='-KPIC'
+ _LT_TAGVAR(lt_prog_compiler_static, $1)='-static'
+ ;;
+ icpc* )
+ # Intel C++, used to be incompatible with GCC.
+ # ICC 10 doesn't accept -KPIC any more.
+ _LT_TAGVAR(lt_prog_compiler_wl, $1)='-Wl,'
+ _LT_TAGVAR(lt_prog_compiler_pic, $1)='-fPIC'
+ _LT_TAGVAR(lt_prog_compiler_static, $1)='-static'
+ ;;
+ pgCC* | pgcpp*)
+ # Portland Group C++ compiler
+ _LT_TAGVAR(lt_prog_compiler_wl, $1)='-Wl,'
+ _LT_TAGVAR(lt_prog_compiler_pic, $1)='-fpic'
+ _LT_TAGVAR(lt_prog_compiler_static, $1)='-Bstatic'
+ ;;
+ cxx*)
+ # Compaq C++
+ # Make sure the PIC flag is empty. It appears that all Alpha
+ # Linux and Compaq Tru64 Unix objects are PIC.
+ _LT_TAGVAR(lt_prog_compiler_pic, $1)=
+ _LT_TAGVAR(lt_prog_compiler_static, $1)='-non_shared'
+ ;;
+ xlc* | xlC* | bgxl[[cC]]* | mpixl[[cC]]*)
+ # IBM XL 8.0, 9.0 on PPC and BlueGene
+ _LT_TAGVAR(lt_prog_compiler_wl, $1)='-Wl,'
+ _LT_TAGVAR(lt_prog_compiler_pic, $1)='-qpic'
+ _LT_TAGVAR(lt_prog_compiler_static, $1)='-qstaticlink'
+ ;;
+ *)
+ case `$CC -V 2>&1 | sed 5q` in
+ *Sun\ C*)
+ # Sun C++ 5.9
+ _LT_TAGVAR(lt_prog_compiler_pic, $1)='-KPIC'
+ _LT_TAGVAR(lt_prog_compiler_static, $1)='-Bstatic'
+ _LT_TAGVAR(lt_prog_compiler_wl, $1)='-Qoption ld '
+ ;;
+ esac
+ ;;
+ esac
+ ;;
+ lynxos*)
+ ;;
+ m88k*)
+ ;;
+ mvs*)
+ case $cc_basename in
+ cxx*)
+ _LT_TAGVAR(lt_prog_compiler_pic, $1)='-W c,exportall'
+ ;;
+ *)
+ ;;
+ esac
+ ;;
+ netbsd*)
+ ;;
+ *qnx* | *nto*)
+ # QNX uses GNU C++, but need to define -shared option too, otherwise
+ # it will coredump.
+ _LT_TAGVAR(lt_prog_compiler_pic, $1)='-fPIC -shared'
+ ;;
+ osf3* | osf4* | osf5*)
+ case $cc_basename in
+ KCC*)
+ _LT_TAGVAR(lt_prog_compiler_wl, $1)='--backend -Wl,'
+ ;;
+ RCC*)
+ # Rational C++ 2.4.1
+ _LT_TAGVAR(lt_prog_compiler_pic, $1)='-pic'
+ ;;
+ cxx*)
+ # Digital/Compaq C++
+ _LT_TAGVAR(lt_prog_compiler_wl, $1)='-Wl,'
+ # Make sure the PIC flag is empty. It appears that all Alpha
+ # Linux and Compaq Tru64 Unix objects are PIC.
+ _LT_TAGVAR(lt_prog_compiler_pic, $1)=
+ _LT_TAGVAR(lt_prog_compiler_static, $1)='-non_shared'
+ ;;
+ *)
+ ;;
+ esac
+ ;;
+ psos*)
+ ;;
+ solaris*)
+ case $cc_basename in
+ CC* | sunCC*)
+ # Sun C++ 4.2, 5.x and Centerline C++
+ _LT_TAGVAR(lt_prog_compiler_pic, $1)='-KPIC'
+ _LT_TAGVAR(lt_prog_compiler_static, $1)='-Bstatic'
+ _LT_TAGVAR(lt_prog_compiler_wl, $1)='-Qoption ld '
+ ;;
+ gcx*)
+ # Green Hills C++ Compiler
+ _LT_TAGVAR(lt_prog_compiler_pic, $1)='-PIC'
+ ;;
+ *)
+ ;;
+ esac
+ ;;
+ sunos4*)
+ case $cc_basename in
+ CC*)
+ # Sun C++ 4.x
+ _LT_TAGVAR(lt_prog_compiler_pic, $1)='-pic'
+ _LT_TAGVAR(lt_prog_compiler_static, $1)='-Bstatic'
+ ;;
+ lcc*)
+ # Lucid
+ _LT_TAGVAR(lt_prog_compiler_pic, $1)='-pic'
+ ;;
+ *)
+ ;;
+ esac
+ ;;
+ sysv5* | unixware* | sco3.2v5* | sco5v6* | OpenUNIX*)
+ case $cc_basename in
+ CC*)
+ _LT_TAGVAR(lt_prog_compiler_wl, $1)='-Wl,'
+ _LT_TAGVAR(lt_prog_compiler_pic, $1)='-KPIC'
+ _LT_TAGVAR(lt_prog_compiler_static, $1)='-Bstatic'
+ ;;
+ esac
+ ;;
+ tandem*)
+ case $cc_basename in
+ NCC*)
+ # NonStop-UX NCC 3.20
+ _LT_TAGVAR(lt_prog_compiler_pic, $1)='-KPIC'
+ ;;
+ *)
+ ;;
+ esac
+ ;;
+ vxworks*)
+ ;;
+ *)
+ _LT_TAGVAR(lt_prog_compiler_can_build_shared, $1)=no
+ ;;
+ esac
+ fi
+],
+[
+ if test "$GCC" = yes; then
+ _LT_TAGVAR(lt_prog_compiler_wl, $1)='-Wl,'
+ _LT_TAGVAR(lt_prog_compiler_static, $1)='-static'
+
+ case $host_os in
+ aix*)
+ # All AIX code is PIC.
+ if test "$host_cpu" = ia64; then
+ # AIX 5 now supports IA64 processor
+ _LT_TAGVAR(lt_prog_compiler_static, $1)='-Bstatic'
+ fi
+ ;;
+
+ amigaos*)
+ case $host_cpu in
+ powerpc)
+ # see comment about AmigaOS4 .so support
+ _LT_TAGVAR(lt_prog_compiler_pic, $1)='-fPIC'
+ ;;
+ m68k)
+ # FIXME: we need at least 68020 code to build shared libraries, but
+ # adding the `-m68020' flag to GCC prevents building anything better,
+ # like `-m68040'.
+ _LT_TAGVAR(lt_prog_compiler_pic, $1)='-m68020 -resident32 -malways-restore-a4'
+ ;;
+ esac
+ ;;
+
+ beos* | irix5* | irix6* | nonstopux* | osf3* | osf4* | osf5*)
+ # PIC is the default for these OSes.
+ ;;
+
+ mingw* | cygwin* | pw32* | os2* | cegcc*)
+ # This hack is so that the source file can tell whether it is being
+ # built for inclusion in a dll (and should export symbols for example).
+ # Although the cygwin gcc ignores -fPIC, still need this for old-style
+ # (--disable-auto-import) libraries
+ m4_if([$1], [GCJ], [],
+ [_LT_TAGVAR(lt_prog_compiler_pic, $1)='-DDLL_EXPORT'])
+ ;;
+
+ darwin* | rhapsody*)
+ # PIC is the default on this platform
+ # Common symbols not allowed in MH_DYLIB files
+ _LT_TAGVAR(lt_prog_compiler_pic, $1)='-fno-common'
+ ;;
+
+ haiku*)
+ # PIC is the default for Haiku.
+ # The "-static" flag exists, but is broken.
+ _LT_TAGVAR(lt_prog_compiler_static, $1)=
+ ;;
+
+ hpux*)
+ # PIC is the default for 64-bit PA HP-UX, but not for 32-bit
+ # PA HP-UX. On IA64 HP-UX, PIC is the default but the pic flag
+ # sets the default TLS model and affects inlining.
+ case $host_cpu in
+ hppa*64*)
+ # +Z the default
+ ;;
+ *)
+ _LT_TAGVAR(lt_prog_compiler_pic, $1)='-fPIC'
+ ;;
+ esac
+ ;;
+
+ interix[[3-9]]*)
+ # Interix 3.x gcc -fpic/-fPIC options generate broken code.
+ # Instead, we relocate shared libraries at runtime.
+ ;;
+
+ msdosdjgpp*)
+ # Just because we use GCC doesn't mean we suddenly get shared libraries
+ # on systems that don't support them.
+ _LT_TAGVAR(lt_prog_compiler_can_build_shared, $1)=no
+ enable_shared=no
+ ;;
+
+ *nto* | *qnx*)
+ # QNX uses GNU C++, but need to define -shared option too, otherwise
+ # it will coredump.
+ _LT_TAGVAR(lt_prog_compiler_pic, $1)='-fPIC -shared'
+ ;;
+
+ sysv4*MP*)
+ if test -d /usr/nec; then
+ _LT_TAGVAR(lt_prog_compiler_pic, $1)=-Kconform_pic
+ fi
+ ;;
+
+ *)
+ _LT_TAGVAR(lt_prog_compiler_pic, $1)='-fPIC'
+ ;;
+ esac
+
+ case $cc_basename in
+ nvcc*) # Cuda Compiler Driver 2.2
+ _LT_TAGVAR(lt_prog_compiler_wl, $1)='-Xlinker '
+ _LT_TAGVAR(lt_prog_compiler_pic, $1)='-Xcompiler -fPIC'
+ ;;
+ esac
+ else
+ # PORTME Check for flag to pass linker flags through the system compiler.
+ case $host_os in
+ aix*)
+ _LT_TAGVAR(lt_prog_compiler_wl, $1)='-Wl,'
+ if test "$host_cpu" = ia64; then
+ # AIX 5 now supports IA64 processor
+ _LT_TAGVAR(lt_prog_compiler_static, $1)='-Bstatic'
+ else
+ _LT_TAGVAR(lt_prog_compiler_static, $1)='-bnso -bI:/lib/syscalls.exp'
+ fi
+ ;;
+
+ mingw* | cygwin* | pw32* | os2* | cegcc*)
+ # This hack is so that the source file can tell whether it is being
+ # built for inclusion in a dll (and should export symbols for example).
+ m4_if([$1], [GCJ], [],
+ [_LT_TAGVAR(lt_prog_compiler_pic, $1)='-DDLL_EXPORT'])
+ ;;
+
+ hpux9* | hpux10* | hpux11*)
+ _LT_TAGVAR(lt_prog_compiler_wl, $1)='-Wl,'
+ # PIC is the default for IA64 HP-UX and 64-bit HP-UX, but
+ # not for PA HP-UX.
+ case $host_cpu in
+ hppa*64*|ia64*)
+ # +Z the default
+ ;;
+ *)
+ _LT_TAGVAR(lt_prog_compiler_pic, $1)='+Z'
+ ;;
+ esac
+ # Is there a better lt_prog_compiler_static that works with the bundled CC?
+ _LT_TAGVAR(lt_prog_compiler_static, $1)='${wl}-a ${wl}archive'
+ ;;
+
+ irix5* | irix6* | nonstopux*)
+ _LT_TAGVAR(lt_prog_compiler_wl, $1)='-Wl,'
+ # PIC (with -KPIC) is the default.
+ _LT_TAGVAR(lt_prog_compiler_static, $1)='-non_shared'
+ ;;
+
+ linux* | k*bsd*-gnu | kopensolaris*-gnu)
+ case $cc_basename in
+ # old Intel for x86_64 which still supported -KPIC.
+ ecc*)
+ _LT_TAGVAR(lt_prog_compiler_wl, $1)='-Wl,'
+ _LT_TAGVAR(lt_prog_compiler_pic, $1)='-KPIC'
+ _LT_TAGVAR(lt_prog_compiler_static, $1)='-static'
+ ;;
+ # icc used to be incompatible with GCC.
+ # ICC 10 doesn't accept -KPIC any more.
+ icc* | ifort*)
+ _LT_TAGVAR(lt_prog_compiler_wl, $1)='-Wl,'
+ _LT_TAGVAR(lt_prog_compiler_pic, $1)='-fPIC'
+ _LT_TAGVAR(lt_prog_compiler_static, $1)='-static'
+ ;;
+ # Lahey Fortran 8.1.
+ lf95*)
+ _LT_TAGVAR(lt_prog_compiler_wl, $1)='-Wl,'
+ _LT_TAGVAR(lt_prog_compiler_pic, $1)='--shared'
+ _LT_TAGVAR(lt_prog_compiler_static, $1)='--static'
+ ;;
+ nagfor*)
+ # NAG Fortran compiler
+ _LT_TAGVAR(lt_prog_compiler_wl, $1)='-Wl,-Wl,,'
+ _LT_TAGVAR(lt_prog_compiler_pic, $1)='-PIC'
+ _LT_TAGVAR(lt_prog_compiler_static, $1)='-Bstatic'
+ ;;
+ pgcc* | pgf77* | pgf90* | pgf95* | pgfortran*)
+ # Portland Group compilers (*not* the Pentium gcc compiler,
+ # which looks to be a dead project)
+ _LT_TAGVAR(lt_prog_compiler_wl, $1)='-Wl,'
+ _LT_TAGVAR(lt_prog_compiler_pic, $1)='-fpic'
+ _LT_TAGVAR(lt_prog_compiler_static, $1)='-Bstatic'
+ ;;
+ ccc*)
+ _LT_TAGVAR(lt_prog_compiler_wl, $1)='-Wl,'
+ # All Alpha code is PIC.
+ _LT_TAGVAR(lt_prog_compiler_static, $1)='-non_shared'
+ ;;
+ xl* | bgxl* | bgf* | mpixl*)
+ # IBM XL C 8.0/Fortran 10.1, 11.1 on PPC and BlueGene
+ _LT_TAGVAR(lt_prog_compiler_wl, $1)='-Wl,'
+ _LT_TAGVAR(lt_prog_compiler_pic, $1)='-qpic'
+ _LT_TAGVAR(lt_prog_compiler_static, $1)='-qstaticlink'
+ ;;
+ *)
+ case `$CC -V 2>&1 | sed 5q` in
+ *Sun\ F* | *Sun*Fortran*)
+ # Sun Fortran 8.3 passes all unrecognized flags to the linker
+ _LT_TAGVAR(lt_prog_compiler_pic, $1)='-KPIC'
+ _LT_TAGVAR(lt_prog_compiler_static, $1)='-Bstatic'
+ _LT_TAGVAR(lt_prog_compiler_wl, $1)=''
+ ;;
+ *Sun\ C*)
+ # Sun C 5.9
+ _LT_TAGVAR(lt_prog_compiler_pic, $1)='-KPIC'
+ _LT_TAGVAR(lt_prog_compiler_static, $1)='-Bstatic'
+ _LT_TAGVAR(lt_prog_compiler_wl, $1)='-Wl,'
+ ;;
+ esac
+ ;;
+ esac
+ ;;
+
+ newsos6)
+ _LT_TAGVAR(lt_prog_compiler_pic, $1)='-KPIC'
+ _LT_TAGVAR(lt_prog_compiler_static, $1)='-Bstatic'
+ ;;
+
+ *nto* | *qnx*)
+ # QNX uses GNU C++, but need to define -shared option too, otherwise
+ # it will coredump.
+ _LT_TAGVAR(lt_prog_compiler_pic, $1)='-fPIC -shared'
+ ;;
+
+ osf3* | osf4* | osf5*)
+ _LT_TAGVAR(lt_prog_compiler_wl, $1)='-Wl,'
+ # All OSF/1 code is PIC.
+ _LT_TAGVAR(lt_prog_compiler_static, $1)='-non_shared'
+ ;;
+
+ rdos*)
+ _LT_TAGVAR(lt_prog_compiler_static, $1)='-non_shared'
+ ;;
+
+ solaris*)
+ _LT_TAGVAR(lt_prog_compiler_pic, $1)='-KPIC'
+ _LT_TAGVAR(lt_prog_compiler_static, $1)='-Bstatic'
+ case $cc_basename in
+ f77* | f90* | f95* | sunf77* | sunf90* | sunf95*)
+ _LT_TAGVAR(lt_prog_compiler_wl, $1)='-Qoption ld ';;
+ *)
+ _LT_TAGVAR(lt_prog_compiler_wl, $1)='-Wl,';;
+ esac
+ ;;
+
+ sunos4*)
+ _LT_TAGVAR(lt_prog_compiler_wl, $1)='-Qoption ld '
+ _LT_TAGVAR(lt_prog_compiler_pic, $1)='-PIC'
+ _LT_TAGVAR(lt_prog_compiler_static, $1)='-Bstatic'
+ ;;
+
+ sysv4 | sysv4.2uw2* | sysv4.3*)
+ _LT_TAGVAR(lt_prog_compiler_wl, $1)='-Wl,'
+ _LT_TAGVAR(lt_prog_compiler_pic, $1)='-KPIC'
+ _LT_TAGVAR(lt_prog_compiler_static, $1)='-Bstatic'
+ ;;
+
+ sysv4*MP*)
+ if test -d /usr/nec ;then
+ _LT_TAGVAR(lt_prog_compiler_pic, $1)='-Kconform_pic'
+ _LT_TAGVAR(lt_prog_compiler_static, $1)='-Bstatic'
+ fi
+ ;;
+
+ sysv5* | unixware* | sco3.2v5* | sco5v6* | OpenUNIX*)
+ _LT_TAGVAR(lt_prog_compiler_wl, $1)='-Wl,'
+ _LT_TAGVAR(lt_prog_compiler_pic, $1)='-KPIC'
+ _LT_TAGVAR(lt_prog_compiler_static, $1)='-Bstatic'
+ ;;
+
+ unicos*)
+ _LT_TAGVAR(lt_prog_compiler_wl, $1)='-Wl,'
+ _LT_TAGVAR(lt_prog_compiler_can_build_shared, $1)=no
+ ;;
+
+ uts4*)
+ _LT_TAGVAR(lt_prog_compiler_pic, $1)='-pic'
+ _LT_TAGVAR(lt_prog_compiler_static, $1)='-Bstatic'
+ ;;
+
+ *)
+ _LT_TAGVAR(lt_prog_compiler_can_build_shared, $1)=no
+ ;;
+ esac
+ fi
+])
+case $host_os in
+ # For platforms which do not support PIC, -DPIC is meaningless:
+ *djgpp*)
+ _LT_TAGVAR(lt_prog_compiler_pic, $1)=
+ ;;
+ *)
+ _LT_TAGVAR(lt_prog_compiler_pic, $1)="$_LT_TAGVAR(lt_prog_compiler_pic, $1)@&t@m4_if([$1],[],[ -DPIC],[m4_if([$1],[CXX],[ -DPIC],[])])"
+ ;;
+esac
+
+AC_CACHE_CHECK([for $compiler option to produce PIC],
+ [_LT_TAGVAR(lt_cv_prog_compiler_pic, $1)],
+ [_LT_TAGVAR(lt_cv_prog_compiler_pic, $1)=$_LT_TAGVAR(lt_prog_compiler_pic, $1)])
+_LT_TAGVAR(lt_prog_compiler_pic, $1)=$_LT_TAGVAR(lt_cv_prog_compiler_pic, $1)
+
+#
+# Check to make sure the PIC flag actually works.
+#
+if test -n "$_LT_TAGVAR(lt_prog_compiler_pic, $1)"; then
+ _LT_COMPILER_OPTION([if $compiler PIC flag $_LT_TAGVAR(lt_prog_compiler_pic, $1) works],
+ [_LT_TAGVAR(lt_cv_prog_compiler_pic_works, $1)],
+ [$_LT_TAGVAR(lt_prog_compiler_pic, $1)@&t@m4_if([$1],[],[ -DPIC],[m4_if([$1],[CXX],[ -DPIC],[])])], [],
+ [case $_LT_TAGVAR(lt_prog_compiler_pic, $1) in
+ "" | " "*) ;;
+ *) _LT_TAGVAR(lt_prog_compiler_pic, $1)=" $_LT_TAGVAR(lt_prog_compiler_pic, $1)" ;;
+ esac],
+ [_LT_TAGVAR(lt_prog_compiler_pic, $1)=
+ _LT_TAGVAR(lt_prog_compiler_can_build_shared, $1)=no])
+fi
+_LT_TAGDECL([pic_flag], [lt_prog_compiler_pic], [1],
+ [Additional compiler flags for building library objects])
+
+_LT_TAGDECL([wl], [lt_prog_compiler_wl], [1],
+ [How to pass a linker flag through the compiler])
+#
+# Check to make sure the static flag actually works.
+#
+wl=$_LT_TAGVAR(lt_prog_compiler_wl, $1) eval lt_tmp_static_flag=\"$_LT_TAGVAR(lt_prog_compiler_static, $1)\"
+_LT_LINKER_OPTION([if $compiler static flag $lt_tmp_static_flag works],
+ _LT_TAGVAR(lt_cv_prog_compiler_static_works, $1),
+ $lt_tmp_static_flag,
+ [],
+ [_LT_TAGVAR(lt_prog_compiler_static, $1)=])
+_LT_TAGDECL([link_static_flag], [lt_prog_compiler_static], [1],
+ [Compiler flag to prevent dynamic linking])
+])# _LT_COMPILER_PIC
+
+
+# _LT_LINKER_SHLIBS([TAGNAME])
+# ----------------------------
+# See if the linker supports building shared libraries.
+m4_defun([_LT_LINKER_SHLIBS],
+[AC_REQUIRE([LT_PATH_LD])dnl
+AC_REQUIRE([LT_PATH_NM])dnl
+m4_require([_LT_PATH_MANIFEST_TOOL])dnl
+m4_require([_LT_FILEUTILS_DEFAULTS])dnl
+m4_require([_LT_DECL_EGREP])dnl
+m4_require([_LT_DECL_SED])dnl
+m4_require([_LT_CMD_GLOBAL_SYMBOLS])dnl
+m4_require([_LT_TAG_COMPILER])dnl
+AC_MSG_CHECKING([whether the $compiler linker ($LD) supports shared libraries])
+m4_if([$1], [CXX], [
+ _LT_TAGVAR(export_symbols_cmds, $1)='$NM $libobjs $convenience | $global_symbol_pipe | $SED '\''s/.* //'\'' | sort | uniq > $export_symbols'
+ _LT_TAGVAR(exclude_expsyms, $1)=['_GLOBAL_OFFSET_TABLE_|_GLOBAL__F[ID]_.*']
+ case $host_os in
+ aix[[4-9]]*)
+ # If we're using GNU nm, then we don't want the "-C" option.
+ # -C means demangle to AIX nm, but means don't demangle with GNU nm
+ # Also, AIX nm treats weak defined symbols like other global defined
+ # symbols, whereas GNU nm marks them as "W".
+ if $NM -V 2>&1 | $GREP 'GNU' > /dev/null; then
+ _LT_TAGVAR(export_symbols_cmds, $1)='$NM -Bpg $libobjs $convenience | awk '\''{ if (((\$ 2 == "T") || (\$ 2 == "D") || (\$ 2 == "B") || (\$ 2 == "W")) && ([substr](\$ 3,1,1) != ".")) { print \$ 3 } }'\'' | sort -u > $export_symbols'
+ else
+ _LT_TAGVAR(export_symbols_cmds, $1)='$NM -BCpg $libobjs $convenience | awk '\''{ if (((\$ 2 == "T") || (\$ 2 == "D") || (\$ 2 == "B")) && ([substr](\$ 3,1,1) != ".")) { print \$ 3 } }'\'' | sort -u > $export_symbols'
+ fi
+ ;;
+ pw32*)
+ _LT_TAGVAR(export_symbols_cmds, $1)="$ltdll_cmds"
+ ;;
+ cygwin* | mingw* | cegcc*)
+ case $cc_basename in
+ cl*) ;;
+ *)
+ _LT_TAGVAR(export_symbols_cmds, $1)='$NM $libobjs $convenience | $global_symbol_pipe | $SED -e '\''/^[[BCDGRS]][[ ]]/s/.*[[ ]]\([[^ ]]*\)/\1 DATA/;s/^.*[[ ]]__nm__\([[^ ]]*\)[[ ]][[^ ]]*/\1 DATA/;/^I[[ ]]/d;/^[[AITW]][[ ]]/s/.* //'\'' | sort | uniq > $export_symbols'
+ _LT_TAGVAR(exclude_expsyms, $1)=['[_]+GLOBAL_OFFSET_TABLE_|[_]+GLOBAL__[FID]_.*|[_]+head_[A-Za-z0-9_]+_dll|[A-Za-z0-9_]+_dll_iname']
+ ;;
+ esac
+ ;;
+ *)
+ _LT_TAGVAR(export_symbols_cmds, $1)='$NM $libobjs $convenience | $global_symbol_pipe | $SED '\''s/.* //'\'' | sort | uniq > $export_symbols'
+ ;;
+ esac
+], [
+ runpath_var=
+ _LT_TAGVAR(allow_undefined_flag, $1)=
+ _LT_TAGVAR(always_export_symbols, $1)=no
+ _LT_TAGVAR(archive_cmds, $1)=
+ _LT_TAGVAR(archive_expsym_cmds, $1)=
+ _LT_TAGVAR(compiler_needs_object, $1)=no
+ _LT_TAGVAR(enable_shared_with_static_runtimes, $1)=no
+ _LT_TAGVAR(export_dynamic_flag_spec, $1)=
+ _LT_TAGVAR(export_symbols_cmds, $1)='$NM $libobjs $convenience | $global_symbol_pipe | $SED '\''s/.* //'\'' | sort | uniq > $export_symbols'
+ _LT_TAGVAR(hardcode_automatic, $1)=no
+ _LT_TAGVAR(hardcode_direct, $1)=no
+ _LT_TAGVAR(hardcode_direct_absolute, $1)=no
+ _LT_TAGVAR(hardcode_libdir_flag_spec, $1)=
+ _LT_TAGVAR(hardcode_libdir_flag_spec_ld, $1)=
+ _LT_TAGVAR(hardcode_libdir_separator, $1)=
+ _LT_TAGVAR(hardcode_minus_L, $1)=no
+ _LT_TAGVAR(hardcode_shlibpath_var, $1)=unsupported
+ _LT_TAGVAR(inherit_rpath, $1)=no
+ _LT_TAGVAR(link_all_deplibs, $1)=unknown
+ _LT_TAGVAR(module_cmds, $1)=
+ _LT_TAGVAR(module_expsym_cmds, $1)=
+ _LT_TAGVAR(old_archive_from_new_cmds, $1)=
+ _LT_TAGVAR(old_archive_from_expsyms_cmds, $1)=
+ _LT_TAGVAR(thread_safe_flag_spec, $1)=
+ _LT_TAGVAR(whole_archive_flag_spec, $1)=
+ # include_expsyms should be a list of space-separated symbols to be *always*
+ # included in the symbol list
+ _LT_TAGVAR(include_expsyms, $1)=
+ # exclude_expsyms can be an extended regexp of symbols to exclude
+ # it will be wrapped by ` (' and `)$', so one must not match beginning or
+ # end of line. Example: `a|bc|.*d.*' will exclude the symbols `a' and `bc',
+ # as well as any symbol that contains `d'.
+ _LT_TAGVAR(exclude_expsyms, $1)=['_GLOBAL_OFFSET_TABLE_|_GLOBAL__F[ID]_.*']
+ # Although _GLOBAL_OFFSET_TABLE_ is a valid symbol C name, most a.out
+ # platforms (ab)use it in PIC code, but their linkers get confused if
+ # the symbol is explicitly referenced. Since portable code cannot
+ # rely on this symbol name, it's probably fine to never include it in
+ # preloaded symbol tables.
+ # Exclude shared library initialization/finalization symbols.
+dnl Note also adjust exclude_expsyms for C++ above.
+ extract_expsyms_cmds=
+
+ case $host_os in
+ cygwin* | mingw* | pw32* | cegcc*)
+ # FIXME: the MSVC++ port hasn't been tested in a loooong time
+ # When not using gcc, we currently assume that we are using
+ # Microsoft Visual C++.
+ if test "$GCC" != yes; then
+ with_gnu_ld=no
+ fi
+ ;;
+ interix*)
+ # we just hope/assume this is gcc and not c89 (= MSVC++)
+ with_gnu_ld=yes
+ ;;
+ openbsd*)
+ with_gnu_ld=no
+ ;;
+ esac
+
+ _LT_TAGVAR(ld_shlibs, $1)=yes
+
+ # On some targets, GNU ld is compatible enough with the native linker
+ # that we're better off using the native interface for both.
+ lt_use_gnu_ld_interface=no
+ if test "$with_gnu_ld" = yes; then
+ case $host_os in
+ aix*)
+ # The AIX port of GNU ld has always aspired to compatibility
+ # with the native linker. However, as the warning in the GNU ld
+ # block says, versions before 2.19.5* couldn't really create working
+ # shared libraries, regardless of the interface used.
+ case `$LD -v 2>&1` in
+ *\ \(GNU\ Binutils\)\ 2.19.5*) ;;
+ *\ \(GNU\ Binutils\)\ 2.[[2-9]]*) ;;
+ *\ \(GNU\ Binutils\)\ [[3-9]]*) ;;
+ *)
+ lt_use_gnu_ld_interface=yes
+ ;;
+ esac
+ ;;
+ *)
+ lt_use_gnu_ld_interface=yes
+ ;;
+ esac
+ fi
+
+ if test "$lt_use_gnu_ld_interface" = yes; then
+ # If archive_cmds runs LD, not CC, wlarc should be empty
+ wlarc='${wl}'
+
+ # Set some defaults for GNU ld with shared library support. These
+ # are reset later if shared libraries are not supported. Putting them
+ # here allows them to be overridden if necessary.
+ runpath_var=LD_RUN_PATH
+ _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='${wl}-rpath ${wl}$libdir'
+ _LT_TAGVAR(export_dynamic_flag_spec, $1)='${wl}--export-dynamic'
+ # ancient GNU ld didn't support --whole-archive et. al.
+ if $LD --help 2>&1 | $GREP 'no-whole-archive' > /dev/null; then
+ _LT_TAGVAR(whole_archive_flag_spec, $1)="$wlarc"'--whole-archive$convenience '"$wlarc"'--no-whole-archive'
+ else
+ _LT_TAGVAR(whole_archive_flag_spec, $1)=
+ fi
+ supports_anon_versioning=no
+ case `$LD -v 2>&1` in
+ *GNU\ gold*) supports_anon_versioning=yes ;;
+ *\ [[01]].* | *\ 2.[[0-9]].* | *\ 2.10.*) ;; # catch versions < 2.11
+ *\ 2.11.93.0.2\ *) supports_anon_versioning=yes ;; # RH7.3 ...
+ *\ 2.11.92.0.12\ *) supports_anon_versioning=yes ;; # Mandrake 8.2 ...
+ *\ 2.11.*) ;; # other 2.11 versions
+ *) supports_anon_versioning=yes ;;
+ esac
+
+ # See if GNU ld supports shared libraries.
+ case $host_os in
+ aix[[3-9]]*)
+ # On AIX/PPC, the GNU linker is very broken
+ if test "$host_cpu" != ia64; then
+ _LT_TAGVAR(ld_shlibs, $1)=no
+ cat <<_LT_EOF 1>&2
+
+*** Warning: the GNU linker, at least up to release 2.19, is reported
+*** to be unable to reliably create shared libraries on AIX.
+*** Therefore, libtool is disabling shared libraries support. If you
+*** really care for shared libraries, you may want to install binutils
+*** 2.20 or above, or modify your PATH so that a non-GNU linker is found.
+*** You will then need to restart the configuration process.
+
+_LT_EOF
+ fi
+ ;;
+
+ amigaos*)
+ case $host_cpu in
+ powerpc)
+ # see comment about AmigaOS4 .so support
+ _LT_TAGVAR(archive_cmds, $1)='$CC -shared $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname -o $lib'
+ _LT_TAGVAR(archive_expsym_cmds, $1)=''
+ ;;
+ m68k)
+ _LT_TAGVAR(archive_cmds, $1)='$RM $output_objdir/a2ixlibrary.data~$ECHO "#define NAME $libname" > $output_objdir/a2ixlibrary.data~$ECHO "#define LIBRARY_ID 1" >> $output_objdir/a2ixlibrary.data~$ECHO "#define VERSION $major" >> $output_objdir/a2ixlibrary.data~$ECHO "#define REVISION $revision" >> $output_objdir/a2ixlibrary.data~$AR $AR_FLAGS $lib $libobjs~$RANLIB $lib~(cd $output_objdir && a2ixlibrary -32)'
+ _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='-L$libdir'
+ _LT_TAGVAR(hardcode_minus_L, $1)=yes
+ ;;
+ esac
+ ;;
+
+ beos*)
+ if $LD --help 2>&1 | $GREP ': supported targets:.* elf' > /dev/null; then
+ _LT_TAGVAR(allow_undefined_flag, $1)=unsupported
+ # Joseph Beckenbach <jrb3@best.com> says some releases of gcc
+ # support --undefined. This deserves some investigation. FIXME
+ _LT_TAGVAR(archive_cmds, $1)='$CC -nostart $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname -o $lib'
+ else
+ _LT_TAGVAR(ld_shlibs, $1)=no
+ fi
+ ;;
+
+ cygwin* | mingw* | pw32* | cegcc*)
+ # _LT_TAGVAR(hardcode_libdir_flag_spec, $1) is actually meaningless,
+ # as there is no search path for DLLs.
+ _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='-L$libdir'
+ _LT_TAGVAR(export_dynamic_flag_spec, $1)='${wl}--export-all-symbols'
+ _LT_TAGVAR(allow_undefined_flag, $1)=unsupported
+ _LT_TAGVAR(always_export_symbols, $1)=no
+ _LT_TAGVAR(enable_shared_with_static_runtimes, $1)=yes
+ _LT_TAGVAR(export_symbols_cmds, $1)='$NM $libobjs $convenience | $global_symbol_pipe | $SED -e '\''/^[[BCDGRS]][[ ]]/s/.*[[ ]]\([[^ ]]*\)/\1 DATA/;s/^.*[[ ]]__nm__\([[^ ]]*\)[[ ]][[^ ]]*/\1 DATA/;/^I[[ ]]/d;/^[[AITW]][[ ]]/s/.* //'\'' | sort | uniq > $export_symbols'
+ _LT_TAGVAR(exclude_expsyms, $1)=['[_]+GLOBAL_OFFSET_TABLE_|[_]+GLOBAL__[FID]_.*|[_]+head_[A-Za-z0-9_]+_dll|[A-Za-z0-9_]+_dll_iname']
+
+ if $LD --help 2>&1 | $GREP 'auto-import' > /dev/null; then
+ _LT_TAGVAR(archive_cmds, $1)='$CC -shared $libobjs $deplibs $compiler_flags -o $output_objdir/$soname ${wl}--enable-auto-image-base -Xlinker --out-implib -Xlinker $lib'
+ # If the export-symbols file already is a .def file (1st line
+ # is EXPORTS), use it as is; otherwise, prepend...
+ _LT_TAGVAR(archive_expsym_cmds, $1)='if test "x`$SED 1q $export_symbols`" = xEXPORTS; then
+ cp $export_symbols $output_objdir/$soname.def;
+ else
+ echo EXPORTS > $output_objdir/$soname.def;
+ cat $export_symbols >> $output_objdir/$soname.def;
+ fi~
+ $CC -shared $output_objdir/$soname.def $libobjs $deplibs $compiler_flags -o $output_objdir/$soname ${wl}--enable-auto-image-base -Xlinker --out-implib -Xlinker $lib'
+ else
+ _LT_TAGVAR(ld_shlibs, $1)=no
+ fi
+ ;;
+
+ haiku*)
+ _LT_TAGVAR(archive_cmds, $1)='$CC -shared $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname -o $lib'
+ _LT_TAGVAR(link_all_deplibs, $1)=yes
+ ;;
+
+ interix[[3-9]]*)
+ _LT_TAGVAR(hardcode_direct, $1)=no
+ _LT_TAGVAR(hardcode_shlibpath_var, $1)=no
+ _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='${wl}-rpath,$libdir'
+ _LT_TAGVAR(export_dynamic_flag_spec, $1)='${wl}-E'
+ # Hack: On Interix 3.x, we cannot compile PIC because of a broken gcc.
+ # Instead, shared libraries are loaded at an image base (0x10000000 by
+ # default) and relocated if they conflict, which is a slow very memory
+ # consuming and fragmenting process. To avoid this, we pick a random,
+ # 256 KiB-aligned image base between 0x50000000 and 0x6FFC0000 at link
+ # time. Moving up from 0x10000000 also allows more sbrk(2) space.
+ _LT_TAGVAR(archive_cmds, $1)='$CC -shared $pic_flag $libobjs $deplibs $compiler_flags ${wl}-h,$soname ${wl}--image-base,`expr ${RANDOM-$$} % 4096 / 2 \* 262144 + 1342177280` -o $lib'
+ _LT_TAGVAR(archive_expsym_cmds, $1)='sed "s,^,_," $export_symbols >$output_objdir/$soname.expsym~$CC -shared $pic_flag $libobjs $deplibs $compiler_flags ${wl}-h,$soname ${wl}--retain-symbols-file,$output_objdir/$soname.expsym ${wl}--image-base,`expr ${RANDOM-$$} % 4096 / 2 \* 262144 + 1342177280` -o $lib'
+ ;;
+
+ gnu* | linux* | tpf* | k*bsd*-gnu | kopensolaris*-gnu)
+ tmp_diet=no
+ if test "$host_os" = linux-dietlibc; then
+ case $cc_basename in
+ diet\ *) tmp_diet=yes;; # linux-dietlibc with static linking (!diet-dyn)
+ esac
+ fi
+ if $LD --help 2>&1 | $EGREP ': supported targets:.* elf' > /dev/null \
+ && test "$tmp_diet" = no
+ then
+ tmp_addflag=' $pic_flag'
+ tmp_sharedflag='-shared'
+ case $cc_basename,$host_cpu in
+ pgcc*) # Portland Group C compiler
+ _LT_TAGVAR(whole_archive_flag_spec, $1)='${wl}--whole-archive`for conv in $convenience\"\"; do test -n \"$conv\" && new_convenience=\"$new_convenience,$conv\"; done; func_echo_all \"$new_convenience\"` ${wl}--no-whole-archive'
+ tmp_addflag=' $pic_flag'
+ ;;
+ pgf77* | pgf90* | pgf95* | pgfortran*)
+ # Portland Group f77 and f90 compilers
+ _LT_TAGVAR(whole_archive_flag_spec, $1)='${wl}--whole-archive`for conv in $convenience\"\"; do test -n \"$conv\" && new_convenience=\"$new_convenience,$conv\"; done; func_echo_all \"$new_convenience\"` ${wl}--no-whole-archive'
+ tmp_addflag=' $pic_flag -Mnomain' ;;
+ ecc*,ia64* | icc*,ia64*) # Intel C compiler on ia64
+ tmp_addflag=' -i_dynamic' ;;
+ efc*,ia64* | ifort*,ia64*) # Intel Fortran compiler on ia64
+ tmp_addflag=' -i_dynamic -nofor_main' ;;
+ ifc* | ifort*) # Intel Fortran compiler
+ tmp_addflag=' -nofor_main' ;;
+ lf95*) # Lahey Fortran 8.1
+ _LT_TAGVAR(whole_archive_flag_spec, $1)=
+ tmp_sharedflag='--shared' ;;
+ xl[[cC]]* | bgxl[[cC]]* | mpixl[[cC]]*) # IBM XL C 8.0 on PPC (deal with xlf below)
+ tmp_sharedflag='-qmkshrobj'
+ tmp_addflag= ;;
+ nvcc*) # Cuda Compiler Driver 2.2
+ _LT_TAGVAR(whole_archive_flag_spec, $1)='${wl}--whole-archive`for conv in $convenience\"\"; do test -n \"$conv\" && new_convenience=\"$new_convenience,$conv\"; done; func_echo_all \"$new_convenience\"` ${wl}--no-whole-archive'
+ _LT_TAGVAR(compiler_needs_object, $1)=yes
+ ;;
+ esac
+ case `$CC -V 2>&1 | sed 5q` in
+ *Sun\ C*) # Sun C 5.9
+ _LT_TAGVAR(whole_archive_flag_spec, $1)='${wl}--whole-archive`new_convenience=; for conv in $convenience\"\"; do test -z \"$conv\" || new_convenience=\"$new_convenience,$conv\"; done; func_echo_all \"$new_convenience\"` ${wl}--no-whole-archive'
+ _LT_TAGVAR(compiler_needs_object, $1)=yes
+ tmp_sharedflag='-G' ;;
+ *Sun\ F*) # Sun Fortran 8.3
+ tmp_sharedflag='-G' ;;
+ esac
+ _LT_TAGVAR(archive_cmds, $1)='$CC '"$tmp_sharedflag""$tmp_addflag"' $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname -o $lib'
+
+ if test "x$supports_anon_versioning" = xyes; then
+ _LT_TAGVAR(archive_expsym_cmds, $1)='echo "{ global:" > $output_objdir/$libname.ver~
+ cat $export_symbols | sed -e "s/\(.*\)/\1;/" >> $output_objdir/$libname.ver~
+ echo "local: *; };" >> $output_objdir/$libname.ver~
+ $CC '"$tmp_sharedflag""$tmp_addflag"' $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname ${wl}-version-script ${wl}$output_objdir/$libname.ver -o $lib'
+ fi
+
+ case $cc_basename in
+ xlf* | bgf* | bgxlf* | mpixlf*)
+ # IBM XL Fortran 10.1 on PPC cannot create shared libs itself
+ _LT_TAGVAR(whole_archive_flag_spec, $1)='--whole-archive$convenience --no-whole-archive'
+ _LT_TAGVAR(hardcode_libdir_flag_spec, $1)=
+ _LT_TAGVAR(hardcode_libdir_flag_spec_ld, $1)='-rpath $libdir'
+ _LT_TAGVAR(archive_cmds, $1)='$LD -shared $libobjs $deplibs $linker_flags -soname $soname -o $lib'
+ if test "x$supports_anon_versioning" = xyes; then
+ _LT_TAGVAR(archive_expsym_cmds, $1)='echo "{ global:" > $output_objdir/$libname.ver~
+ cat $export_symbols | sed -e "s/\(.*\)/\1;/" >> $output_objdir/$libname.ver~
+ echo "local: *; };" >> $output_objdir/$libname.ver~
+ $LD -shared $libobjs $deplibs $linker_flags -soname $soname -version-script $output_objdir/$libname.ver -o $lib'
+ fi
+ ;;
+ esac
+ else
+ _LT_TAGVAR(ld_shlibs, $1)=no
+ fi
+ ;;
+
+ netbsd*)
+ if echo __ELF__ | $CC -E - | $GREP __ELF__ >/dev/null; then
+ _LT_TAGVAR(archive_cmds, $1)='$LD -Bshareable $libobjs $deplibs $linker_flags -o $lib'
+ wlarc=
+ else
+ _LT_TAGVAR(archive_cmds, $1)='$CC -shared $pic_flag $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname -o $lib'
+ _LT_TAGVAR(archive_expsym_cmds, $1)='$CC -shared $pic_flag $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname ${wl}-retain-symbols-file $wl$export_symbols -o $lib'
+ fi
+ ;;
+
+ solaris*)
+ if $LD -v 2>&1 | $GREP 'BFD 2\.8' > /dev/null; then
+ _LT_TAGVAR(ld_shlibs, $1)=no
+ cat <<_LT_EOF 1>&2
+
+*** Warning: The releases 2.8.* of the GNU linker cannot reliably
+*** create shared libraries on Solaris systems. Therefore, libtool
+*** is disabling shared libraries support. We urge you to upgrade GNU
+*** binutils to release 2.9.1 or newer. Another option is to modify
+*** your PATH or compiler configuration so that the native linker is
+*** used, and then restart.
+
+_LT_EOF
+ elif $LD --help 2>&1 | $GREP ': supported targets:.* elf' > /dev/null; then
+ _LT_TAGVAR(archive_cmds, $1)='$CC -shared $pic_flag $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname -o $lib'
+ _LT_TAGVAR(archive_expsym_cmds, $1)='$CC -shared $pic_flag $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname ${wl}-retain-symbols-file $wl$export_symbols -o $lib'
+ else
+ _LT_TAGVAR(ld_shlibs, $1)=no
+ fi
+ ;;
+
+ sysv5* | sco3.2v5* | sco5v6* | unixware* | OpenUNIX*)
+ case `$LD -v 2>&1` in
+ *\ [[01]].* | *\ 2.[[0-9]].* | *\ 2.1[[0-5]].*)
+ _LT_TAGVAR(ld_shlibs, $1)=no
+ cat <<_LT_EOF 1>&2
+
+*** Warning: Releases of the GNU linker prior to 2.16.91.0.3 can not
+*** reliably create shared libraries on SCO systems. Therefore, libtool
+*** is disabling shared libraries support. We urge you to upgrade GNU
+*** binutils to release 2.16.91.0.3 or newer. Another option is to modify
+*** your PATH or compiler configuration so that the native linker is
+*** used, and then restart.
+
+_LT_EOF
+ ;;
+ *)
+ # For security reasons, it is highly recommended that you always
+ # use absolute paths for naming shared libraries, and exclude the
+ # DT_RUNPATH tag from executables and libraries. But doing so
+ # requires that you compile everything twice, which is a pain.
+ if $LD --help 2>&1 | $GREP ': supported targets:.* elf' > /dev/null; then
+ _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='${wl}-rpath ${wl}$libdir'
+ _LT_TAGVAR(archive_cmds, $1)='$CC -shared $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname -o $lib'
+ _LT_TAGVAR(archive_expsym_cmds, $1)='$CC -shared $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname ${wl}-retain-symbols-file $wl$export_symbols -o $lib'
+ else
+ _LT_TAGVAR(ld_shlibs, $1)=no
+ fi
+ ;;
+ esac
+ ;;
+
+ sunos4*)
+ _LT_TAGVAR(archive_cmds, $1)='$LD -assert pure-text -Bshareable -o $lib $libobjs $deplibs $linker_flags'
+ wlarc=
+ _LT_TAGVAR(hardcode_direct, $1)=yes
+ _LT_TAGVAR(hardcode_shlibpath_var, $1)=no
+ ;;
+
+ *)
+ if $LD --help 2>&1 | $GREP ': supported targets:.* elf' > /dev/null; then
+ _LT_TAGVAR(archive_cmds, $1)='$CC -shared $pic_flag $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname -o $lib'
+ _LT_TAGVAR(archive_expsym_cmds, $1)='$CC -shared $pic_flag $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname ${wl}-retain-symbols-file $wl$export_symbols -o $lib'
+ else
+ _LT_TAGVAR(ld_shlibs, $1)=no
+ fi
+ ;;
+ esac
+
+ if test "$_LT_TAGVAR(ld_shlibs, $1)" = no; then
+ runpath_var=
+ _LT_TAGVAR(hardcode_libdir_flag_spec, $1)=
+ _LT_TAGVAR(export_dynamic_flag_spec, $1)=
+ _LT_TAGVAR(whole_archive_flag_spec, $1)=
+ fi
+ else
+ # PORTME fill in a description of your system's linker (not GNU ld)
+ case $host_os in
+ aix3*)
+ _LT_TAGVAR(allow_undefined_flag, $1)=unsupported
+ _LT_TAGVAR(always_export_symbols, $1)=yes
+ _LT_TAGVAR(archive_expsym_cmds, $1)='$LD -o $output_objdir/$soname $libobjs $deplibs $linker_flags -bE:$export_symbols -T512 -H512 -bM:SRE~$AR $AR_FLAGS $lib $output_objdir/$soname'
+ # Note: this linker hardcodes the directories in LIBPATH if there
+ # are no directories specified by -L.
+ _LT_TAGVAR(hardcode_minus_L, $1)=yes
+ if test "$GCC" = yes && test -z "$lt_prog_compiler_static"; then
+ # Neither direct hardcoding nor static linking is supported with a
+ # broken collect2.
+ _LT_TAGVAR(hardcode_direct, $1)=unsupported
+ fi
+ ;;
+
+ aix[[4-9]]*)
+ if test "$host_cpu" = ia64; then
+ # On IA64, the linker does run time linking by default, so we don't
+ # have to do anything special.
+ aix_use_runtimelinking=no
+ exp_sym_flag='-Bexport'
+ no_entry_flag=""
+ else
+ # If we're using GNU nm, then we don't want the "-C" option.
+ # -C means demangle to AIX nm, but means don't demangle with GNU nm
+ # Also, AIX nm treats weak defined symbols like other global
+ # defined symbols, whereas GNU nm marks them as "W".
+ if $NM -V 2>&1 | $GREP 'GNU' > /dev/null; then
+ _LT_TAGVAR(export_symbols_cmds, $1)='$NM -Bpg $libobjs $convenience | awk '\''{ if (((\$ 2 == "T") || (\$ 2 == "D") || (\$ 2 == "B") || (\$ 2 == "W")) && ([substr](\$ 3,1,1) != ".")) { print \$ 3 } }'\'' | sort -u > $export_symbols'
+ else
+ _LT_TAGVAR(export_symbols_cmds, $1)='$NM -BCpg $libobjs $convenience | awk '\''{ if (((\$ 2 == "T") || (\$ 2 == "D") || (\$ 2 == "B")) && ([substr](\$ 3,1,1) != ".")) { print \$ 3 } }'\'' | sort -u > $export_symbols'
+ fi
+ aix_use_runtimelinking=no
+
+ # Test if we are trying to use run time linking or normal
+ # AIX style linking. If -brtl is somewhere in LDFLAGS, we
+ # need to do runtime linking.
+ case $host_os in aix4.[[23]]|aix4.[[23]].*|aix[[5-9]]*)
+ for ld_flag in $LDFLAGS; do
+ if (test $ld_flag = "-brtl" || test $ld_flag = "-Wl,-brtl"); then
+ aix_use_runtimelinking=yes
+ break
+ fi
+ done
+ ;;
+ esac
+
+ exp_sym_flag='-bexport'
+ no_entry_flag='-bnoentry'
+ fi
+
+ # When large executables or shared objects are built, AIX ld can
+ # have problems creating the table of contents. If linking a library
+ # or program results in "error TOC overflow" add -mminimal-toc to
+ # CXXFLAGS/CFLAGS for g++/gcc. In the cases where that is not
+ # enough to fix the problem, add -Wl,-bbigtoc to LDFLAGS.
+
+ _LT_TAGVAR(archive_cmds, $1)=''
+ _LT_TAGVAR(hardcode_direct, $1)=yes
+ _LT_TAGVAR(hardcode_direct_absolute, $1)=yes
+ _LT_TAGVAR(hardcode_libdir_separator, $1)=':'
+ _LT_TAGVAR(link_all_deplibs, $1)=yes
+ _LT_TAGVAR(file_list_spec, $1)='${wl}-f,'
+
+ if test "$GCC" = yes; then
+ case $host_os in aix4.[[012]]|aix4.[[012]].*)
+ # We only want to do this on AIX 4.2 and lower, the check
+ # below for broken collect2 doesn't work under 4.3+
+ collect2name=`${CC} -print-prog-name=collect2`
+ if test -f "$collect2name" &&
+ strings "$collect2name" | $GREP resolve_lib_name >/dev/null
+ then
+ # We have reworked collect2
+ :
+ else
+ # We have old collect2
+ _LT_TAGVAR(hardcode_direct, $1)=unsupported
+ # It fails to find uninstalled libraries when the uninstalled
+ # path is not listed in the libpath. Setting hardcode_minus_L
+ # to unsupported forces relinking
+ _LT_TAGVAR(hardcode_minus_L, $1)=yes
+ _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='-L$libdir'
+ _LT_TAGVAR(hardcode_libdir_separator, $1)=
+ fi
+ ;;
+ esac
+ shared_flag='-shared'
+ if test "$aix_use_runtimelinking" = yes; then
+ shared_flag="$shared_flag "'${wl}-G'
+ fi
+ else
+ # not using gcc
+ if test "$host_cpu" = ia64; then
+ # VisualAge C++, Version 5.5 for AIX 5L for IA-64, Beta 3 Release
+ # chokes on -Wl,-G. The following line is correct:
+ shared_flag='-G'
+ else
+ if test "$aix_use_runtimelinking" = yes; then
+ shared_flag='${wl}-G'
+ else
+ shared_flag='${wl}-bM:SRE'
+ fi
+ fi
+ fi
+
+ _LT_TAGVAR(export_dynamic_flag_spec, $1)='${wl}-bexpall'
+ # It seems that -bexpall does not export symbols beginning with
+ # underscore (_), so it is better to generate a list of symbols to export.
+ _LT_TAGVAR(always_export_symbols, $1)=yes
+ if test "$aix_use_runtimelinking" = yes; then
+ # Warning - without using the other runtime loading flags (-brtl),
+ # -berok will link without error, but may produce a broken library.
+ _LT_TAGVAR(allow_undefined_flag, $1)='-berok'
+ # Determine the default libpath from the value encoded in an
+ # empty executable.
+ _LT_SYS_MODULE_PATH_AIX([$1])
+ _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='${wl}-blibpath:$libdir:'"$aix_libpath"
+ _LT_TAGVAR(archive_expsym_cmds, $1)='$CC -o $output_objdir/$soname $libobjs $deplibs '"\${wl}$no_entry_flag"' $compiler_flags `if test "x${allow_undefined_flag}" != "x"; then func_echo_all "${wl}${allow_undefined_flag}"; else :; fi` '"\${wl}$exp_sym_flag:\$export_symbols $shared_flag"
+ else
+ if test "$host_cpu" = ia64; then
+ _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='${wl}-R $libdir:/usr/lib:/lib'
+ _LT_TAGVAR(allow_undefined_flag, $1)="-z nodefs"
+ _LT_TAGVAR(archive_expsym_cmds, $1)="\$CC $shared_flag"' -o $output_objdir/$soname $libobjs $deplibs '"\${wl}$no_entry_flag"' $compiler_flags ${wl}${allow_undefined_flag} '"\${wl}$exp_sym_flag:\$export_symbols"
+ else
+ # Determine the default libpath from the value encoded in an
+ # empty executable.
+ _LT_SYS_MODULE_PATH_AIX([$1])
+ _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='${wl}-blibpath:$libdir:'"$aix_libpath"
+ # Warning - without using the other run time loading flags,
+ # -berok will link without error, but may produce a broken library.
+ _LT_TAGVAR(no_undefined_flag, $1)=' ${wl}-bernotok'
+ _LT_TAGVAR(allow_undefined_flag, $1)=' ${wl}-berok'
+ if test "$with_gnu_ld" = yes; then
+ # We only use this code for GNU lds that support --whole-archive.
+ _LT_TAGVAR(whole_archive_flag_spec, $1)='${wl}--whole-archive$convenience ${wl}--no-whole-archive'
+ else
+ # Exported symbols can be pulled into shared objects from archives
+ _LT_TAGVAR(whole_archive_flag_spec, $1)='$convenience'
+ fi
+ _LT_TAGVAR(archive_cmds_need_lc, $1)=yes
+ # This is similar to how AIX traditionally builds its shared libraries.
+ _LT_TAGVAR(archive_expsym_cmds, $1)="\$CC $shared_flag"' -o $output_objdir/$soname $libobjs $deplibs ${wl}-bnoentry $compiler_flags ${wl}-bE:$export_symbols${allow_undefined_flag}~$AR $AR_FLAGS $output_objdir/$libname$release.a $output_objdir/$soname'
+ fi
+ fi
+ ;;
+
+ amigaos*)
+ case $host_cpu in
+ powerpc)
+ # see comment about AmigaOS4 .so support
+ _LT_TAGVAR(archive_cmds, $1)='$CC -shared $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname -o $lib'
+ _LT_TAGVAR(archive_expsym_cmds, $1)=''
+ ;;
+ m68k)
+ _LT_TAGVAR(archive_cmds, $1)='$RM $output_objdir/a2ixlibrary.data~$ECHO "#define NAME $libname" > $output_objdir/a2ixlibrary.data~$ECHO "#define LIBRARY_ID 1" >> $output_objdir/a2ixlibrary.data~$ECHO "#define VERSION $major" >> $output_objdir/a2ixlibrary.data~$ECHO "#define REVISION $revision" >> $output_objdir/a2ixlibrary.data~$AR $AR_FLAGS $lib $libobjs~$RANLIB $lib~(cd $output_objdir && a2ixlibrary -32)'
+ _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='-L$libdir'
+ _LT_TAGVAR(hardcode_minus_L, $1)=yes
+ ;;
+ esac
+ ;;
+
+ bsdi[[45]]*)
+ _LT_TAGVAR(export_dynamic_flag_spec, $1)=-rdynamic
+ ;;
+
+ cygwin* | mingw* | pw32* | cegcc*)
+ # When not using gcc, we currently assume that we are using
+ # Microsoft Visual C++.
+ # hardcode_libdir_flag_spec is actually meaningless, as there is
+ # no search path for DLLs.
+ case $cc_basename in
+ cl*)
+ # Native MSVC
+ _LT_TAGVAR(hardcode_libdir_flag_spec, $1)=' '
+ _LT_TAGVAR(allow_undefined_flag, $1)=unsupported
+ _LT_TAGVAR(always_export_symbols, $1)=yes
+ _LT_TAGVAR(file_list_spec, $1)='@'
+ # Tell ltmain to make .lib files, not .a files.
+ libext=lib
+ # Tell ltmain to make .dll files, not .so files.
+ shrext_cmds=".dll"
+ # FIXME: Setting linknames here is a bad hack.
+ _LT_TAGVAR(archive_cmds, $1)='$CC -o $output_objdir/$soname $libobjs $compiler_flags $deplibs -Wl,-dll~linknames='
+ _LT_TAGVAR(archive_expsym_cmds, $1)='if test "x`$SED 1q $export_symbols`" = xEXPORTS; then
+ sed -n -e 's/\\\\\\\(.*\\\\\\\)/-link\\\ -EXPORT:\\\\\\\1/' -e '1\\\!p' < $export_symbols > $output_objdir/$soname.exp;
+ else
+ sed -e 's/\\\\\\\(.*\\\\\\\)/-link\\\ -EXPORT:\\\\\\\1/' < $export_symbols > $output_objdir/$soname.exp;
+ fi~
+ $CC -o $tool_output_objdir$soname $libobjs $compiler_flags $deplibs "@$tool_output_objdir$soname.exp" -Wl,-DLL,-IMPLIB:"$tool_output_objdir$libname.dll.lib"~
+ linknames='
+ # The linker will not automatically build a static lib if we build a DLL.
+ # _LT_TAGVAR(old_archive_from_new_cmds, $1)='true'
+ _LT_TAGVAR(enable_shared_with_static_runtimes, $1)=yes
+ _LT_TAGVAR(export_symbols_cmds, $1)='$NM $libobjs $convenience | $global_symbol_pipe | $SED -e '\''/^[[BCDGRS]][[ ]]/s/.*[[ ]]\([[^ ]]*\)/\1,DATA/'\'' | $SED -e '\''/^[[AITW]][[ ]]/s/.*[[ ]]//'\'' | sort | uniq > $export_symbols'
+ # Don't use ranlib
+ _LT_TAGVAR(old_postinstall_cmds, $1)='chmod 644 $oldlib'
+ _LT_TAGVAR(postlink_cmds, $1)='lt_outputfile="@OUTPUT@"~
+ lt_tool_outputfile="@TOOL_OUTPUT@"~
+ case $lt_outputfile in
+ *.exe|*.EXE) ;;
+ *)
+ lt_outputfile="$lt_outputfile.exe"
+ lt_tool_outputfile="$lt_tool_outputfile.exe"
+ ;;
+ esac~
+ if test "$MANIFEST_TOOL" != ":" && test -f "$lt_outputfile.manifest"; then
+ $MANIFEST_TOOL -manifest "$lt_tool_outputfile.manifest" -outputresource:"$lt_tool_outputfile" || exit 1;
+ $RM "$lt_outputfile.manifest";
+ fi'
+ ;;
+ *)
+ # Assume MSVC wrapper
+ _LT_TAGVAR(hardcode_libdir_flag_spec, $1)=' '
+ _LT_TAGVAR(allow_undefined_flag, $1)=unsupported
+ # Tell ltmain to make .lib files, not .a files.
+ libext=lib
+ # Tell ltmain to make .dll files, not .so files.
+ shrext_cmds=".dll"
+ # FIXME: Setting linknames here is a bad hack.
+ _LT_TAGVAR(archive_cmds, $1)='$CC -o $lib $libobjs $compiler_flags `func_echo_all "$deplibs" | $SED '\''s/ -lc$//'\''` -link -dll~linknames='
+ # The linker will automatically build a .lib file if we build a DLL.
+ _LT_TAGVAR(old_archive_from_new_cmds, $1)='true'
+ # FIXME: Should let the user specify the lib program.
+ _LT_TAGVAR(old_archive_cmds, $1)='lib -OUT:$oldlib$oldobjs$old_deplibs'
+ _LT_TAGVAR(enable_shared_with_static_runtimes, $1)=yes
+ ;;
+ esac
+ ;;
+
+ darwin* | rhapsody*)
+ _LT_DARWIN_LINKER_FEATURES($1)
+ ;;
+
+ dgux*)
+ _LT_TAGVAR(archive_cmds, $1)='$LD -G -h $soname -o $lib $libobjs $deplibs $linker_flags'
+ _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='-L$libdir'
+ _LT_TAGVAR(hardcode_shlibpath_var, $1)=no
+ ;;
+
+ freebsd1*)
+ _LT_TAGVAR(ld_shlibs, $1)=no
+ ;;
+
+ # FreeBSD 2.2.[012] allows us to include c++rt0.o to get C++ constructor
+ # support. Future versions do this automatically, but an explicit c++rt0.o
+ # does not break anything, and helps significantly (at the cost of a little
+ # extra space).
+ freebsd2.2*)
+ _LT_TAGVAR(archive_cmds, $1)='$LD -Bshareable -o $lib $libobjs $deplibs $linker_flags /usr/lib/c++rt0.o'
+ _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='-R$libdir'
+ _LT_TAGVAR(hardcode_direct, $1)=yes
+ _LT_TAGVAR(hardcode_shlibpath_var, $1)=no
+ ;;
+
+ # Unfortunately, older versions of FreeBSD 2 do not have this feature.
+ freebsd2*)
+ _LT_TAGVAR(archive_cmds, $1)='$LD -Bshareable -o $lib $libobjs $deplibs $linker_flags'
+ _LT_TAGVAR(hardcode_direct, $1)=yes
+ _LT_TAGVAR(hardcode_minus_L, $1)=yes
+ _LT_TAGVAR(hardcode_shlibpath_var, $1)=no
+ ;;
+
+ # FreeBSD 3 and greater uses gcc -shared to do shared libraries.
+ freebsd* | dragonfly*)
+ _LT_TAGVAR(archive_cmds, $1)='$CC -shared $pic_flag -o $lib $libobjs $deplibs $compiler_flags'
+ _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='-R$libdir'
+ _LT_TAGVAR(hardcode_direct, $1)=yes
+ _LT_TAGVAR(hardcode_shlibpath_var, $1)=no
+ ;;
+
+ hpux9*)
+ if test "$GCC" = yes; then
+ _LT_TAGVAR(archive_cmds, $1)='$RM $output_objdir/$soname~$CC -shared $pic_flag ${wl}+b ${wl}$install_libdir -o $output_objdir/$soname $libobjs $deplibs $compiler_flags~test $output_objdir/$soname = $lib || mv $output_objdir/$soname $lib'
+ else
+ _LT_TAGVAR(archive_cmds, $1)='$RM $output_objdir/$soname~$LD -b +b $install_libdir -o $output_objdir/$soname $libobjs $deplibs $linker_flags~test $output_objdir/$soname = $lib || mv $output_objdir/$soname $lib'
+ fi
+ _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='${wl}+b ${wl}$libdir'
+ _LT_TAGVAR(hardcode_libdir_separator, $1)=:
+ _LT_TAGVAR(hardcode_direct, $1)=yes
+
+ # hardcode_minus_L: Not really in the search PATH,
+ # but as the default location of the library.
+ _LT_TAGVAR(hardcode_minus_L, $1)=yes
+ _LT_TAGVAR(export_dynamic_flag_spec, $1)='${wl}-E'
+ ;;
+
+ hpux10*)
+ if test "$GCC" = yes && test "$with_gnu_ld" = no; then
+ _LT_TAGVAR(archive_cmds, $1)='$CC -shared $pic_flag ${wl}+h ${wl}$soname ${wl}+b ${wl}$install_libdir -o $lib $libobjs $deplibs $compiler_flags'
+ else
+ _LT_TAGVAR(archive_cmds, $1)='$LD -b +h $soname +b $install_libdir -o $lib $libobjs $deplibs $linker_flags'
+ fi
+ if test "$with_gnu_ld" = no; then
+ _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='${wl}+b ${wl}$libdir'
+ _LT_TAGVAR(hardcode_libdir_flag_spec_ld, $1)='+b $libdir'
+ _LT_TAGVAR(hardcode_libdir_separator, $1)=:
+ _LT_TAGVAR(hardcode_direct, $1)=yes
+ _LT_TAGVAR(hardcode_direct_absolute, $1)=yes
+ _LT_TAGVAR(export_dynamic_flag_spec, $1)='${wl}-E'
+ # hardcode_minus_L: Not really in the search PATH,
+ # but as the default location of the library.
+ _LT_TAGVAR(hardcode_minus_L, $1)=yes
+ fi
+ ;;
+
+ hpux11*)
+ if test "$GCC" = yes && test "$with_gnu_ld" = no; then
+ case $host_cpu in
+ hppa*64*)
+ _LT_TAGVAR(archive_cmds, $1)='$CC -shared ${wl}+h ${wl}$soname -o $lib $libobjs $deplibs $compiler_flags'
+ ;;
+ ia64*)
+ _LT_TAGVAR(archive_cmds, $1)='$CC -shared $pic_flag ${wl}+h ${wl}$soname ${wl}+nodefaultrpath -o $lib $libobjs $deplibs $compiler_flags'
+ ;;
+ *)
+ _LT_TAGVAR(archive_cmds, $1)='$CC -shared $pic_flag ${wl}+h ${wl}$soname ${wl}+b ${wl}$install_libdir -o $lib $libobjs $deplibs $compiler_flags'
+ ;;
+ esac
+ else
+ case $host_cpu in
+ hppa*64*)
+ _LT_TAGVAR(archive_cmds, $1)='$CC -b ${wl}+h ${wl}$soname -o $lib $libobjs $deplibs $compiler_flags'
+ ;;
+ ia64*)
+ _LT_TAGVAR(archive_cmds, $1)='$CC -b ${wl}+h ${wl}$soname ${wl}+nodefaultrpath -o $lib $libobjs $deplibs $compiler_flags'
+ ;;
+ *)
+ m4_if($1, [], [
+ # Older versions of the 11.00 compiler do not understand -b yet
+ # (HP92453-01 A.11.01.20 doesn't, HP92453-01 B.11.X.35175-35176.GP does)
+ _LT_LINKER_OPTION([if $CC understands -b],
+ _LT_TAGVAR(lt_cv_prog_compiler__b, $1), [-b],
+ [_LT_TAGVAR(archive_cmds, $1)='$CC -b ${wl}+h ${wl}$soname ${wl}+b ${wl}$install_libdir -o $lib $libobjs $deplibs $compiler_flags'],
+ [_LT_TAGVAR(archive_cmds, $1)='$LD -b +h $soname +b $install_libdir -o $lib $libobjs $deplibs $linker_flags'])],
+ [_LT_TAGVAR(archive_cmds, $1)='$CC -b ${wl}+h ${wl}$soname ${wl}+b ${wl}$install_libdir -o $lib $libobjs $deplibs $compiler_flags'])
+ ;;
+ esac
+ fi
+ if test "$with_gnu_ld" = no; then
+ _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='${wl}+b ${wl}$libdir'
+ _LT_TAGVAR(hardcode_libdir_separator, $1)=:
+
+ case $host_cpu in
+ hppa*64*|ia64*)
+ _LT_TAGVAR(hardcode_direct, $1)=no
+ _LT_TAGVAR(hardcode_shlibpath_var, $1)=no
+ ;;
+ *)
+ _LT_TAGVAR(hardcode_direct, $1)=yes
+ _LT_TAGVAR(hardcode_direct_absolute, $1)=yes
+ _LT_TAGVAR(export_dynamic_flag_spec, $1)='${wl}-E'
+
+ # hardcode_minus_L: Not really in the search PATH,
+ # but as the default location of the library.
+ _LT_TAGVAR(hardcode_minus_L, $1)=yes
+ ;;
+ esac
+ fi
+ ;;
+
+ irix5* | irix6* | nonstopux*)
+ if test "$GCC" = yes; then
+ _LT_TAGVAR(archive_cmds, $1)='$CC -shared $pic_flag $libobjs $deplibs $compiler_flags ${wl}-soname ${wl}$soname `test -n "$verstring" && func_echo_all "${wl}-set_version ${wl}$verstring"` ${wl}-update_registry ${wl}${output_objdir}/so_locations -o $lib'
+ # Try to use the -exported_symbol ld option, if it does not
+ # work, assume that -exports_file does not work either and
+ # implicitly export all symbols.
+ # This should be the same for all languages, so no per-tag cache variable.
+ AC_CACHE_CHECK([whether the $host_os linker accepts -exported_symbol],
+ [lt_cv_irix_exported_symbol],
+ [save_LDFLAGS="$LDFLAGS"
+ LDFLAGS="$LDFLAGS -shared ${wl}-exported_symbol ${wl}foo ${wl}-update_registry ${wl}/dev/null"
+ AC_LINK_IFELSE(
+ [AC_LANG_SOURCE(
+ [AC_LANG_CASE([C], [[int foo (void) { return 0; }]],
+ [C++], [[int foo (void) { return 0; }]],
+ [Fortran 77], [[
+ subroutine foo
+ end]],
+ [Fortran], [[
+ subroutine foo
+ end]])])],
+ [lt_cv_irix_exported_symbol=yes],
+ [lt_cv_irix_exported_symbol=no])
+ LDFLAGS="$save_LDFLAGS"])
+ if test "$lt_cv_irix_exported_symbol" = yes; then
+ _LT_TAGVAR(archive_expsym_cmds, $1)='$CC -shared $pic_flag $libobjs $deplibs $compiler_flags ${wl}-soname ${wl}$soname `test -n "$verstring" && func_echo_all "${wl}-set_version ${wl}$verstring"` ${wl}-update_registry ${wl}${output_objdir}/so_locations ${wl}-exports_file ${wl}$export_symbols -o $lib'
+ fi
+ else
+ _LT_TAGVAR(archive_cmds, $1)='$CC -shared $libobjs $deplibs $compiler_flags -soname $soname `test -n "$verstring" && func_echo_all "-set_version $verstring"` -update_registry ${output_objdir}/so_locations -o $lib'
+ _LT_TAGVAR(archive_expsym_cmds, $1)='$CC -shared $libobjs $deplibs $compiler_flags -soname $soname `test -n "$verstring" && func_echo_all "-set_version $verstring"` -update_registry ${output_objdir}/so_locations -exports_file $export_symbols -o $lib'
+ fi
+ _LT_TAGVAR(archive_cmds_need_lc, $1)='no'
+ _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='${wl}-rpath ${wl}$libdir'
+ _LT_TAGVAR(hardcode_libdir_separator, $1)=:
+ _LT_TAGVAR(inherit_rpath, $1)=yes
+ _LT_TAGVAR(link_all_deplibs, $1)=yes
+ ;;
+
+ netbsd*)
+ if echo __ELF__ | $CC -E - | $GREP __ELF__ >/dev/null; then
+ _LT_TAGVAR(archive_cmds, $1)='$LD -Bshareable -o $lib $libobjs $deplibs $linker_flags' # a.out
+ else
+ _LT_TAGVAR(archive_cmds, $1)='$LD -shared -o $lib $libobjs $deplibs $linker_flags' # ELF
+ fi
+ _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='-R$libdir'
+ _LT_TAGVAR(hardcode_direct, $1)=yes
+ _LT_TAGVAR(hardcode_shlibpath_var, $1)=no
+ ;;
+
+ newsos6)
+ _LT_TAGVAR(archive_cmds, $1)='$LD -G -h $soname -o $lib $libobjs $deplibs $linker_flags'
+ _LT_TAGVAR(hardcode_direct, $1)=yes
+ _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='${wl}-rpath ${wl}$libdir'
+ _LT_TAGVAR(hardcode_libdir_separator, $1)=:
+ _LT_TAGVAR(hardcode_shlibpath_var, $1)=no
+ ;;
+
+ *nto* | *qnx*)
+ ;;
+
+ openbsd*)
+ if test -f /usr/libexec/ld.so; then
+ _LT_TAGVAR(hardcode_direct, $1)=yes
+ _LT_TAGVAR(hardcode_shlibpath_var, $1)=no
+ _LT_TAGVAR(hardcode_direct_absolute, $1)=yes
+ if test -z "`echo __ELF__ | $CC -E - | $GREP __ELF__`" || test "$host_os-$host_cpu" = "openbsd2.8-powerpc"; then
+ _LT_TAGVAR(archive_cmds, $1)='$CC -shared $pic_flag -o $lib $libobjs $deplibs $compiler_flags'
+ _LT_TAGVAR(archive_expsym_cmds, $1)='$CC -shared $pic_flag -o $lib $libobjs $deplibs $compiler_flags ${wl}-retain-symbols-file,$export_symbols'
+ _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='${wl}-rpath,$libdir'
+ _LT_TAGVAR(export_dynamic_flag_spec, $1)='${wl}-E'
+ else
+ case $host_os in
+ openbsd[[01]].* | openbsd2.[[0-7]] | openbsd2.[[0-7]].*)
+ _LT_TAGVAR(archive_cmds, $1)='$LD -Bshareable -o $lib $libobjs $deplibs $linker_flags'
+ _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='-R$libdir'
+ ;;
+ *)
+ _LT_TAGVAR(archive_cmds, $1)='$CC -shared $pic_flag -o $lib $libobjs $deplibs $compiler_flags'
+ _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='${wl}-rpath,$libdir'
+ ;;
+ esac
+ fi
+ else
+ _LT_TAGVAR(ld_shlibs, $1)=no
+ fi
+ ;;
+
+ os2*)
+ _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='-L$libdir'
+ _LT_TAGVAR(hardcode_minus_L, $1)=yes
+ _LT_TAGVAR(allow_undefined_flag, $1)=unsupported
+ _LT_TAGVAR(archive_cmds, $1)='$ECHO "LIBRARY $libname INITINSTANCE" > $output_objdir/$libname.def~$ECHO "DESCRIPTION \"$libname\"" >> $output_objdir/$libname.def~echo DATA >> $output_objdir/$libname.def~echo " SINGLE NONSHARED" >> $output_objdir/$libname.def~echo EXPORTS >> $output_objdir/$libname.def~emxexp $libobjs >> $output_objdir/$libname.def~$CC -Zdll -Zcrtdll -o $lib $libobjs $deplibs $compiler_flags $output_objdir/$libname.def'
+ _LT_TAGVAR(old_archive_from_new_cmds, $1)='emximp -o $output_objdir/$libname.a $output_objdir/$libname.def'
+ ;;
+
+ osf3*)
+ if test "$GCC" = yes; then
+ _LT_TAGVAR(allow_undefined_flag, $1)=' ${wl}-expect_unresolved ${wl}\*'
+ _LT_TAGVAR(archive_cmds, $1)='$CC -shared${allow_undefined_flag} $libobjs $deplibs $compiler_flags ${wl}-soname ${wl}$soname `test -n "$verstring" && func_echo_all "${wl}-set_version ${wl}$verstring"` ${wl}-update_registry ${wl}${output_objdir}/so_locations -o $lib'
+ else
+ _LT_TAGVAR(allow_undefined_flag, $1)=' -expect_unresolved \*'
+ _LT_TAGVAR(archive_cmds, $1)='$CC -shared${allow_undefined_flag} $libobjs $deplibs $compiler_flags -soname $soname `test -n "$verstring" && func_echo_all "-set_version $verstring"` -update_registry ${output_objdir}/so_locations -o $lib'
+ fi
+ _LT_TAGVAR(archive_cmds_need_lc, $1)='no'
+ _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='${wl}-rpath ${wl}$libdir'
+ _LT_TAGVAR(hardcode_libdir_separator, $1)=:
+ ;;
+
+ osf4* | osf5*) # as osf3* with the addition of -msym flag
+ if test "$GCC" = yes; then
+ _LT_TAGVAR(allow_undefined_flag, $1)=' ${wl}-expect_unresolved ${wl}\*'
+ _LT_TAGVAR(archive_cmds, $1)='$CC -shared${allow_undefined_flag} $pic_flag $libobjs $deplibs $compiler_flags ${wl}-msym ${wl}-soname ${wl}$soname `test -n "$verstring" && func_echo_all "${wl}-set_version ${wl}$verstring"` ${wl}-update_registry ${wl}${output_objdir}/so_locations -o $lib'
+ _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='${wl}-rpath ${wl}$libdir'
+ else
+ _LT_TAGVAR(allow_undefined_flag, $1)=' -expect_unresolved \*'
+ _LT_TAGVAR(archive_cmds, $1)='$CC -shared${allow_undefined_flag} $libobjs $deplibs $compiler_flags -msym -soname $soname `test -n "$verstring" && func_echo_all "-set_version $verstring"` -update_registry ${output_objdir}/so_locations -o $lib'
+ _LT_TAGVAR(archive_expsym_cmds, $1)='for i in `cat $export_symbols`; do printf "%s %s\\n" -exported_symbol "\$i" >> $lib.exp; done; printf "%s\\n" "-hidden">> $lib.exp~
+ $CC -shared${allow_undefined_flag} ${wl}-input ${wl}$lib.exp $compiler_flags $libobjs $deplibs -soname $soname `test -n "$verstring" && $ECHO "-set_version $verstring"` -update_registry ${output_objdir}/so_locations -o $lib~$RM $lib.exp'
+
+ # Both c and cxx compiler support -rpath directly
+ _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='-rpath $libdir'
+ fi
+ _LT_TAGVAR(archive_cmds_need_lc, $1)='no'
+ _LT_TAGVAR(hardcode_libdir_separator, $1)=:
+ ;;
+
+ solaris*)
+ _LT_TAGVAR(no_undefined_flag, $1)=' -z defs'
+ if test "$GCC" = yes; then
+ wlarc='${wl}'
+ _LT_TAGVAR(archive_cmds, $1)='$CC -shared $pic_flag ${wl}-z ${wl}text ${wl}-h ${wl}$soname -o $lib $libobjs $deplibs $compiler_flags'
+ _LT_TAGVAR(archive_expsym_cmds, $1)='echo "{ global:" > $lib.exp~cat $export_symbols | $SED -e "s/\(.*\)/\1;/" >> $lib.exp~echo "local: *; };" >> $lib.exp~
+ $CC -shared $pic_flag ${wl}-z ${wl}text ${wl}-M ${wl}$lib.exp ${wl}-h ${wl}$soname -o $lib $libobjs $deplibs $compiler_flags~$RM $lib.exp'
+ else
+ case `$CC -V 2>&1` in
+ *"Compilers 5.0"*)
+ wlarc=''
+ _LT_TAGVAR(archive_cmds, $1)='$LD -G${allow_undefined_flag} -h $soname -o $lib $libobjs $deplibs $linker_flags'
+ _LT_TAGVAR(archive_expsym_cmds, $1)='echo "{ global:" > $lib.exp~cat $export_symbols | $SED -e "s/\(.*\)/\1;/" >> $lib.exp~echo "local: *; };" >> $lib.exp~
+ $LD -G${allow_undefined_flag} -M $lib.exp -h $soname -o $lib $libobjs $deplibs $linker_flags~$RM $lib.exp'
+ ;;
+ *)
+ wlarc='${wl}'
+ _LT_TAGVAR(archive_cmds, $1)='$CC -G${allow_undefined_flag} -h $soname -o $lib $libobjs $deplibs $compiler_flags'
+ _LT_TAGVAR(archive_expsym_cmds, $1)='echo "{ global:" > $lib.exp~cat $export_symbols | $SED -e "s/\(.*\)/\1;/" >> $lib.exp~echo "local: *; };" >> $lib.exp~
+ $CC -G${allow_undefined_flag} -M $lib.exp -h $soname -o $lib $libobjs $deplibs $compiler_flags~$RM $lib.exp'
+ ;;
+ esac
+ fi
+ _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='-R$libdir'
+ _LT_TAGVAR(hardcode_shlibpath_var, $1)=no
+ case $host_os in
+ solaris2.[[0-5]] | solaris2.[[0-5]].*) ;;
+ *)
+ # The compiler driver will combine and reorder linker options,
+ # but understands `-z linker_flag'. GCC discards it without `$wl',
+ # but is careful enough not to reorder.
+ # Supported since Solaris 2.6 (maybe 2.5.1?)
+ if test "$GCC" = yes; then
+ _LT_TAGVAR(whole_archive_flag_spec, $1)='${wl}-z ${wl}allextract$convenience ${wl}-z ${wl}defaultextract'
+ else
+ _LT_TAGVAR(whole_archive_flag_spec, $1)='-z allextract$convenience -z defaultextract'
+ fi
+ ;;
+ esac
+ _LT_TAGVAR(link_all_deplibs, $1)=yes
+ ;;
+
+ sunos4*)
+ if test "x$host_vendor" = xsequent; then
+ # Use $CC to link under sequent, because it throws in some extra .o
+ # files that make .init and .fini sections work.
+ _LT_TAGVAR(archive_cmds, $1)='$CC -G ${wl}-h $soname -o $lib $libobjs $deplibs $compiler_flags'
+ else
+ _LT_TAGVAR(archive_cmds, $1)='$LD -assert pure-text -Bstatic -o $lib $libobjs $deplibs $linker_flags'
+ fi
+ _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='-L$libdir'
+ _LT_TAGVAR(hardcode_direct, $1)=yes
+ _LT_TAGVAR(hardcode_minus_L, $1)=yes
+ _LT_TAGVAR(hardcode_shlibpath_var, $1)=no
+ ;;
+
+ sysv4)
+ case $host_vendor in
+ sni)
+ _LT_TAGVAR(archive_cmds, $1)='$LD -G -h $soname -o $lib $libobjs $deplibs $linker_flags'
+ _LT_TAGVAR(hardcode_direct, $1)=yes # is this really true???
+ ;;
+ siemens)
+ ## LD is ld it makes a PLAMLIB
+ ## CC just makes a GrossModule.
+ _LT_TAGVAR(archive_cmds, $1)='$LD -G -o $lib $libobjs $deplibs $linker_flags'
+ _LT_TAGVAR(reload_cmds, $1)='$CC -r -o $output$reload_objs'
+ _LT_TAGVAR(hardcode_direct, $1)=no
+ ;;
+ motorola)
+ _LT_TAGVAR(archive_cmds, $1)='$LD -G -h $soname -o $lib $libobjs $deplibs $linker_flags'
+ _LT_TAGVAR(hardcode_direct, $1)=no #Motorola manual says yes, but my tests say they lie
+ ;;
+ esac
+ runpath_var='LD_RUN_PATH'
+ _LT_TAGVAR(hardcode_shlibpath_var, $1)=no
+ ;;
+
+ sysv4.3*)
+ _LT_TAGVAR(archive_cmds, $1)='$LD -G -h $soname -o $lib $libobjs $deplibs $linker_flags'
+ _LT_TAGVAR(hardcode_shlibpath_var, $1)=no
+ _LT_TAGVAR(export_dynamic_flag_spec, $1)='-Bexport'
+ ;;
+
+ sysv4*MP*)
+ if test -d /usr/nec; then
+ _LT_TAGVAR(archive_cmds, $1)='$LD -G -h $soname -o $lib $libobjs $deplibs $linker_flags'
+ _LT_TAGVAR(hardcode_shlibpath_var, $1)=no
+ runpath_var=LD_RUN_PATH
+ hardcode_runpath_var=yes
+ _LT_TAGVAR(ld_shlibs, $1)=yes
+ fi
+ ;;
+
+ sysv4*uw2* | sysv5OpenUNIX* | sysv5UnixWare7.[[01]].[[10]]* | unixware7* | sco3.2v5.0.[[024]]*)
+ _LT_TAGVAR(no_undefined_flag, $1)='${wl}-z,text'
+ _LT_TAGVAR(archive_cmds_need_lc, $1)=no
+ _LT_TAGVAR(hardcode_shlibpath_var, $1)=no
+ runpath_var='LD_RUN_PATH'
+
+ if test "$GCC" = yes; then
+ _LT_TAGVAR(archive_cmds, $1)='$CC -shared ${wl}-h,$soname -o $lib $libobjs $deplibs $compiler_flags'
+ _LT_TAGVAR(archive_expsym_cmds, $1)='$CC -shared ${wl}-Bexport:$export_symbols ${wl}-h,$soname -o $lib $libobjs $deplibs $compiler_flags'
+ else
+ _LT_TAGVAR(archive_cmds, $1)='$CC -G ${wl}-h,$soname -o $lib $libobjs $deplibs $compiler_flags'
+ _LT_TAGVAR(archive_expsym_cmds, $1)='$CC -G ${wl}-Bexport:$export_symbols ${wl}-h,$soname -o $lib $libobjs $deplibs $compiler_flags'
+ fi
+ ;;
+
+ sysv5* | sco3.2v5* | sco5v6*)
+ # Note: We can NOT use -z defs as we might desire, because we do not
+ # link with -lc, and that would cause any symbols used from libc to
+ # always be unresolved, which means just about no library would
+ # ever link correctly. If we're not using GNU ld we use -z text
+ # though, which does catch some bad symbols but isn't as heavy-handed
+ # as -z defs.
+ _LT_TAGVAR(no_undefined_flag, $1)='${wl}-z,text'
+ _LT_TAGVAR(allow_undefined_flag, $1)='${wl}-z,nodefs'
+ _LT_TAGVAR(archive_cmds_need_lc, $1)=no
+ _LT_TAGVAR(hardcode_shlibpath_var, $1)=no
+ _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='${wl}-R,$libdir'
+ _LT_TAGVAR(hardcode_libdir_separator, $1)=':'
+ _LT_TAGVAR(link_all_deplibs, $1)=yes
+ _LT_TAGVAR(export_dynamic_flag_spec, $1)='${wl}-Bexport'
+ runpath_var='LD_RUN_PATH'
+
+ if test "$GCC" = yes; then
+ _LT_TAGVAR(archive_cmds, $1)='$CC -shared ${wl}-h,$soname -o $lib $libobjs $deplibs $compiler_flags'
+ _LT_TAGVAR(archive_expsym_cmds, $1)='$CC -shared ${wl}-Bexport:$export_symbols ${wl}-h,$soname -o $lib $libobjs $deplibs $compiler_flags'
+ else
+ _LT_TAGVAR(archive_cmds, $1)='$CC -G ${wl}-h,$soname -o $lib $libobjs $deplibs $compiler_flags'
+ _LT_TAGVAR(archive_expsym_cmds, $1)='$CC -G ${wl}-Bexport:$export_symbols ${wl}-h,$soname -o $lib $libobjs $deplibs $compiler_flags'
+ fi
+ ;;
+
+ uts4*)
+ _LT_TAGVAR(archive_cmds, $1)='$LD -G -h $soname -o $lib $libobjs $deplibs $linker_flags'
+ _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='-L$libdir'
+ _LT_TAGVAR(hardcode_shlibpath_var, $1)=no
+ ;;
+
+ *)
+ _LT_TAGVAR(ld_shlibs, $1)=no
+ ;;
+ esac
+
+ if test x$host_vendor = xsni; then
+ case $host in
+ sysv4 | sysv4.2uw2* | sysv4.3* | sysv5*)
+ _LT_TAGVAR(export_dynamic_flag_spec, $1)='${wl}-Blargedynsym'
+ ;;
+ esac
+ fi
+ fi
+])
+AC_MSG_RESULT([$_LT_TAGVAR(ld_shlibs, $1)])
+test "$_LT_TAGVAR(ld_shlibs, $1)" = no && can_build_shared=no
+
+_LT_TAGVAR(with_gnu_ld, $1)=$with_gnu_ld
+
+_LT_DECL([], [libext], [0], [Old archive suffix (normally "a")])dnl
+_LT_DECL([], [shrext_cmds], [1], [Shared library suffix (normally ".so")])dnl
+_LT_DECL([], [extract_expsyms_cmds], [2],
+ [The commands to extract the exported symbol list from a shared archive])
+
+#
+# Do we need to explicitly link libc?
+#
+case "x$_LT_TAGVAR(archive_cmds_need_lc, $1)" in
+x|xyes)
+ # Assume -lc should be added
+ _LT_TAGVAR(archive_cmds_need_lc, $1)=yes
+
+ if test "$enable_shared" = yes && test "$GCC" = yes; then
+ case $_LT_TAGVAR(archive_cmds, $1) in
+ *'~'*)
+ # FIXME: we may have to deal with multi-command sequences.
+ ;;
+ '$CC '*)
+ # Test whether the compiler implicitly links with -lc since on some
+ # systems, -lgcc has to come before -lc. If gcc already passes -lc
+ # to ld, don't add -lc before -lgcc.
+ AC_CACHE_CHECK([whether -lc should be explicitly linked in],
+ [lt_cv_]_LT_TAGVAR(archive_cmds_need_lc, $1),
+ [$RM conftest*
+ echo "$lt_simple_compile_test_code" > conftest.$ac_ext
+
+ if AC_TRY_EVAL(ac_compile) 2>conftest.err; then
+ soname=conftest
+ lib=conftest
+ libobjs=conftest.$ac_objext
+ deplibs=
+ wl=$_LT_TAGVAR(lt_prog_compiler_wl, $1)
+ pic_flag=$_LT_TAGVAR(lt_prog_compiler_pic, $1)
+ compiler_flags=-v
+ linker_flags=-v
+ verstring=
+ output_objdir=.
+ libname=conftest
+ lt_save_allow_undefined_flag=$_LT_TAGVAR(allow_undefined_flag, $1)
+ _LT_TAGVAR(allow_undefined_flag, $1)=
+ if AC_TRY_EVAL(_LT_TAGVAR(archive_cmds, $1) 2\>\&1 \| $GREP \" -lc \" \>/dev/null 2\>\&1)
+ then
+ lt_cv_[]_LT_TAGVAR(archive_cmds_need_lc, $1)=no
+ else
+ lt_cv_[]_LT_TAGVAR(archive_cmds_need_lc, $1)=yes
+ fi
+ _LT_TAGVAR(allow_undefined_flag, $1)=$lt_save_allow_undefined_flag
+ else
+ cat conftest.err 1>&5
+ fi
+ $RM conftest*
+ ])
+ _LT_TAGVAR(archive_cmds_need_lc, $1)=$lt_cv_[]_LT_TAGVAR(archive_cmds_need_lc, $1)
+ ;;
+ esac
+ fi
+ ;;
+esac
+
+_LT_TAGDECL([build_libtool_need_lc], [archive_cmds_need_lc], [0],
+ [Whether or not to add -lc for building shared libraries])
+_LT_TAGDECL([allow_libtool_libs_with_static_runtimes],
+ [enable_shared_with_static_runtimes], [0],
+ [Whether or not to disallow shared libs when runtime libs are static])
+_LT_TAGDECL([], [export_dynamic_flag_spec], [1],
+ [Compiler flag to allow reflexive dlopens])
+_LT_TAGDECL([], [whole_archive_flag_spec], [1],
+ [Compiler flag to generate shared objects directly from archives])
+_LT_TAGDECL([], [compiler_needs_object], [1],
+ [Whether the compiler copes with passing no objects directly])
+_LT_TAGDECL([], [old_archive_from_new_cmds], [2],
+ [Create an old-style archive from a shared archive])
+_LT_TAGDECL([], [old_archive_from_expsyms_cmds], [2],
+ [Create a temporary old-style archive to link instead of a shared archive])
+_LT_TAGDECL([], [archive_cmds], [2], [Commands used to build a shared archive])
+_LT_TAGDECL([], [archive_expsym_cmds], [2])
+_LT_TAGDECL([], [module_cmds], [2],
+ [Commands used to build a loadable module if different from building
+ a shared archive.])
+_LT_TAGDECL([], [module_expsym_cmds], [2])
+_LT_TAGDECL([], [with_gnu_ld], [1],
+ [Whether we are building with GNU ld or not])
+_LT_TAGDECL([], [allow_undefined_flag], [1],
+ [Flag that allows shared libraries with undefined symbols to be built])
+_LT_TAGDECL([], [no_undefined_flag], [1],
+ [Flag that enforces no undefined symbols])
+_LT_TAGDECL([], [hardcode_libdir_flag_spec], [1],
+ [Flag to hardcode $libdir into a binary during linking.
+ This must work even if $libdir does not exist])
+_LT_TAGDECL([], [hardcode_libdir_flag_spec_ld], [1],
+ [[If ld is used when linking, flag to hardcode $libdir into a binary
+ during linking. This must work even if $libdir does not exist]])
+_LT_TAGDECL([], [hardcode_libdir_separator], [1],
+ [Whether we need a single "-rpath" flag with a separated argument])
+_LT_TAGDECL([], [hardcode_direct], [0],
+ [Set to "yes" if using DIR/libNAME${shared_ext} during linking hardcodes
+ DIR into the resulting binary])
+_LT_TAGDECL([], [hardcode_direct_absolute], [0],
+ [Set to "yes" if using DIR/libNAME${shared_ext} during linking hardcodes
+ DIR into the resulting binary and the resulting library dependency is
+ "absolute", i.e impossible to change by setting ${shlibpath_var} if the
+ library is relocated])
+_LT_TAGDECL([], [hardcode_minus_L], [0],
+ [Set to "yes" if using the -LDIR flag during linking hardcodes DIR
+ into the resulting binary])
+_LT_TAGDECL([], [hardcode_shlibpath_var], [0],
+ [Set to "yes" if using SHLIBPATH_VAR=DIR during linking hardcodes DIR
+ into the resulting binary])
+_LT_TAGDECL([], [hardcode_automatic], [0],
+ [Set to "yes" if building a shared library automatically hardcodes DIR
+ into the library and all subsequent libraries and executables linked
+ against it])
+_LT_TAGDECL([], [inherit_rpath], [0],
+ [Set to yes if linker adds runtime paths of dependent libraries
+ to runtime path list])
+_LT_TAGDECL([], [link_all_deplibs], [0],
+ [Whether libtool must link a program against all its dependency libraries])
+_LT_TAGDECL([], [always_export_symbols], [0],
+ [Set to "yes" if exported symbols are required])
+_LT_TAGDECL([], [export_symbols_cmds], [2],
+ [The commands to list exported symbols])
+_LT_TAGDECL([], [exclude_expsyms], [1],
+ [Symbols that should not be listed in the preloaded symbols])
+_LT_TAGDECL([], [include_expsyms], [1],
+ [Symbols that must always be exported])
+_LT_TAGDECL([], [prelink_cmds], [2],
+ [Commands necessary for linking programs (against libraries) with templates])
+_LT_TAGDECL([], [postlink_cmds], [2],
+ [Commands necessary for finishing linking programs])
+_LT_TAGDECL([], [file_list_spec], [1],
+ [Specify filename containing input files])
+dnl FIXME: Not yet implemented
+dnl _LT_TAGDECL([], [thread_safe_flag_spec], [1],
+dnl [Compiler flag to generate thread safe objects])
+])# _LT_LINKER_SHLIBS
+
+
+# _LT_LANG_C_CONFIG([TAG])
+# ------------------------
+# Ensure that the configuration variables for a C compiler are suitably
+# defined. These variables are subsequently used by _LT_CONFIG to write
+# the compiler configuration to `libtool'.
+m4_defun([_LT_LANG_C_CONFIG],
+[m4_require([_LT_DECL_EGREP])dnl
+lt_save_CC="$CC"
+AC_LANG_PUSH(C)
+
+# Source file extension for C test sources.
+ac_ext=c
+
+# Object file extension for compiled C test sources.
+objext=o
+_LT_TAGVAR(objext, $1)=$objext
+
+# Code to be used in simple compile tests
+lt_simple_compile_test_code="int some_variable = 0;"
+
+# Code to be used in simple link tests
+lt_simple_link_test_code='int main(){return(0);}'
+
+_LT_TAG_COMPILER
+# Save the default compiler, since it gets overwritten when the other
+# tags are being tested, and _LT_TAGVAR(compiler, []) is a NOP.
+compiler_DEFAULT=$CC
+
+# save warnings/boilerplate of simple test code
+_LT_COMPILER_BOILERPLATE
+_LT_LINKER_BOILERPLATE
+
+## CAVEAT EMPTOR:
+## There is no encapsulation within the following macros, do not change
+## the running order or otherwise move them around unless you know exactly
+## what you are doing...
+if test -n "$compiler"; then
+ _LT_COMPILER_NO_RTTI($1)
+ _LT_COMPILER_PIC($1)
+ _LT_COMPILER_C_O($1)
+ _LT_COMPILER_FILE_LOCKS($1)
+ _LT_LINKER_SHLIBS($1)
+ _LT_SYS_DYNAMIC_LINKER($1)
+ _LT_LINKER_HARDCODE_LIBPATH($1)
+ LT_SYS_DLOPEN_SELF
+ _LT_CMD_STRIPLIB
+
+ # Report which library types will actually be built
+ AC_MSG_CHECKING([if libtool supports shared libraries])
+ AC_MSG_RESULT([$can_build_shared])
+
+ AC_MSG_CHECKING([whether to build shared libraries])
+ test "$can_build_shared" = "no" && enable_shared=no
+
+ # On AIX, shared libraries and static libraries use the same namespace, and
+ # are all built from PIC.
+ case $host_os in
+ aix3*)
+ test "$enable_shared" = yes && enable_static=no
+ if test -n "$RANLIB"; then
+ archive_cmds="$archive_cmds~\$RANLIB \$lib"
+ postinstall_cmds='$RANLIB $lib'
+ fi
+ ;;
+
+ aix[[4-9]]*)
+ if test "$host_cpu" != ia64 && test "$aix_use_runtimelinking" = no ; then
+ test "$enable_shared" = yes && enable_static=no
+ fi
+ ;;
+ esac
+ AC_MSG_RESULT([$enable_shared])
+
+ AC_MSG_CHECKING([whether to build static libraries])
+ # Make sure either enable_shared or enable_static is yes.
+ test "$enable_shared" = yes || enable_static=yes
+ AC_MSG_RESULT([$enable_static])
+
+ _LT_CONFIG($1)
+fi
+AC_LANG_POP
+CC="$lt_save_CC"
+])# _LT_LANG_C_CONFIG
+
+
+# _LT_LANG_CXX_CONFIG([TAG])
+# --------------------------
+# Ensure that the configuration variables for a C++ compiler are suitably
+# defined. These variables are subsequently used by _LT_CONFIG to write
+# the compiler configuration to `libtool'.
+m4_defun([_LT_LANG_CXX_CONFIG],
+[m4_require([_LT_FILEUTILS_DEFAULTS])dnl
+m4_require([_LT_DECL_EGREP])dnl
+m4_require([_LT_PATH_MANIFEST_TOOL])dnl
+if test -n "$CXX" && ( test "X$CXX" != "Xno" &&
+ ( (test "X$CXX" = "Xg++" && `g++ -v >/dev/null 2>&1` ) ||
+ (test "X$CXX" != "Xg++"))) ; then
+ AC_PROG_CXXCPP
+else
+ _lt_caught_CXX_error=yes
+fi
+
+AC_LANG_PUSH(C++)
+_LT_TAGVAR(archive_cmds_need_lc, $1)=no
+_LT_TAGVAR(allow_undefined_flag, $1)=
+_LT_TAGVAR(always_export_symbols, $1)=no
+_LT_TAGVAR(archive_expsym_cmds, $1)=
+_LT_TAGVAR(compiler_needs_object, $1)=no
+_LT_TAGVAR(export_dynamic_flag_spec, $1)=
+_LT_TAGVAR(hardcode_direct, $1)=no
+_LT_TAGVAR(hardcode_direct_absolute, $1)=no
+_LT_TAGVAR(hardcode_libdir_flag_spec, $1)=
+_LT_TAGVAR(hardcode_libdir_flag_spec_ld, $1)=
+_LT_TAGVAR(hardcode_libdir_separator, $1)=
+_LT_TAGVAR(hardcode_minus_L, $1)=no
+_LT_TAGVAR(hardcode_shlibpath_var, $1)=unsupported
+_LT_TAGVAR(hardcode_automatic, $1)=no
+_LT_TAGVAR(inherit_rpath, $1)=no
+_LT_TAGVAR(module_cmds, $1)=
+_LT_TAGVAR(module_expsym_cmds, $1)=
+_LT_TAGVAR(link_all_deplibs, $1)=unknown
+_LT_TAGVAR(old_archive_cmds, $1)=$old_archive_cmds
+_LT_TAGVAR(reload_flag, $1)=$reload_flag
+_LT_TAGVAR(reload_cmds, $1)=$reload_cmds
+_LT_TAGVAR(no_undefined_flag, $1)=
+_LT_TAGVAR(whole_archive_flag_spec, $1)=
+_LT_TAGVAR(enable_shared_with_static_runtimes, $1)=no
+
+# Source file extension for C++ test sources.
+ac_ext=cpp
+
+# Object file extension for compiled C++ test sources.
+objext=o
+_LT_TAGVAR(objext, $1)=$objext
+
+# No sense in running all these tests if we already determined that
+# the CXX compiler isn't working. Some variables (like enable_shared)
+# are currently assumed to apply to all compilers on this platform,
+# and will be corrupted by setting them based on a non-working compiler.
+if test "$_lt_caught_CXX_error" != yes; then
+ # Code to be used in simple compile tests
+ lt_simple_compile_test_code="int some_variable = 0;"
+
+ # Code to be used in simple link tests
+ lt_simple_link_test_code='int main(int, char *[[]]) { return(0); }'
+
+ # ltmain only uses $CC for tagged configurations so make sure $CC is set.
+ _LT_TAG_COMPILER
+
+ # save warnings/boilerplate of simple test code
+ _LT_COMPILER_BOILERPLATE
+ _LT_LINKER_BOILERPLATE
+
+ # Allow CC to be a program name with arguments.
+ lt_save_CC=$CC
+ lt_save_CFLAGS=$CFLAGS
+ lt_save_LD=$LD
+ lt_save_GCC=$GCC
+ GCC=$GXX
+ lt_save_with_gnu_ld=$with_gnu_ld
+ lt_save_path_LD=$lt_cv_path_LD
+ if test -n "${lt_cv_prog_gnu_ldcxx+set}"; then
+ lt_cv_prog_gnu_ld=$lt_cv_prog_gnu_ldcxx
+ else
+ $as_unset lt_cv_prog_gnu_ld
+ fi
+ if test -n "${lt_cv_path_LDCXX+set}"; then
+ lt_cv_path_LD=$lt_cv_path_LDCXX
+ else
+ $as_unset lt_cv_path_LD
+ fi
+ test -z "${LDCXX+set}" || LD=$LDCXX
+ CC=${CXX-"c++"}
+ CFLAGS=$CXXFLAGS
+ compiler=$CC
+ _LT_TAGVAR(compiler, $1)=$CC
+ _LT_CC_BASENAME([$compiler])
+
+ if test -n "$compiler"; then
+ # We don't want -fno-exception when compiling C++ code, so set the
+ # no_builtin_flag separately
+ if test "$GXX" = yes; then
+ _LT_TAGVAR(lt_prog_compiler_no_builtin_flag, $1)=' -fno-builtin'
+ else
+ _LT_TAGVAR(lt_prog_compiler_no_builtin_flag, $1)=
+ fi
+
+ if test "$GXX" = yes; then
+ # Set up default GNU C++ configuration
+
+ LT_PATH_LD
+
+ # Check if GNU C++ uses GNU ld as the underlying linker, since the
+ # archiving commands below assume that GNU ld is being used.
+ if test "$with_gnu_ld" = yes; then
+ _LT_TAGVAR(archive_cmds, $1)='$CC $pic_flag -shared -nostdlib $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags ${wl}-soname $wl$soname -o $lib'
+ _LT_TAGVAR(archive_expsym_cmds, $1)='$CC $pic_flag -shared -nostdlib $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags ${wl}-soname $wl$soname ${wl}-retain-symbols-file $wl$export_symbols -o $lib'
+
+ _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='${wl}-rpath ${wl}$libdir'
+ _LT_TAGVAR(export_dynamic_flag_spec, $1)='${wl}--export-dynamic'
+
+ # If archive_cmds runs LD, not CC, wlarc should be empty
+ # XXX I think wlarc can be eliminated in ltcf-cxx, but I need to
+ # investigate it a little bit more. (MM)
+ wlarc='${wl}'
+
+ # ancient GNU ld didn't support --whole-archive et. al.
+ if eval "`$CC -print-prog-name=ld` --help 2>&1" |
+ $GREP 'no-whole-archive' > /dev/null; then
+ _LT_TAGVAR(whole_archive_flag_spec, $1)="$wlarc"'--whole-archive$convenience '"$wlarc"'--no-whole-archive'
+ else
+ _LT_TAGVAR(whole_archive_flag_spec, $1)=
+ fi
+ else
+ with_gnu_ld=no
+ wlarc=
+
+ # A generic and very simple default shared library creation
+ # command for GNU C++ for the case where it uses the native
+ # linker, instead of GNU ld. If possible, this setting should
+ # overridden to take advantage of the native linker features on
+ # the platform it is being used on.
+ _LT_TAGVAR(archive_cmds, $1)='$CC -shared -nostdlib $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags -o $lib'
+ fi
+
+ # Commands to make compiler produce verbose output that lists
+ # what "hidden" libraries, object files and flags are used when
+ # linking a shared library.
+ output_verbose_link_cmd='$CC -shared $CFLAGS -v conftest.$objext 2>&1 | $GREP -v "^Configured with:" | $GREP "\-L"'
+
+ else
+ GXX=no
+ with_gnu_ld=no
+ wlarc=
+ fi
+
+ # PORTME: fill in a description of your system's C++ link characteristics
+ AC_MSG_CHECKING([whether the $compiler linker ($LD) supports shared libraries])
+ _LT_TAGVAR(ld_shlibs, $1)=yes
+ case $host_os in
+ aix3*)
+ # FIXME: insert proper C++ library support
+ _LT_TAGVAR(ld_shlibs, $1)=no
+ ;;
+ aix[[4-9]]*)
+ if test "$host_cpu" = ia64; then
+ # On IA64, the linker does run time linking by default, so we don't
+ # have to do anything special.
+ aix_use_runtimelinking=no
+ exp_sym_flag='-Bexport'
+ no_entry_flag=""
+ else
+ aix_use_runtimelinking=no
+
+ # Test if we are trying to use run time linking or normal
+ # AIX style linking. If -brtl is somewhere in LDFLAGS, we
+ # need to do runtime linking.
+ case $host_os in aix4.[[23]]|aix4.[[23]].*|aix[[5-9]]*)
+ for ld_flag in $LDFLAGS; do
+ case $ld_flag in
+ *-brtl*)
+ aix_use_runtimelinking=yes
+ break
+ ;;
+ esac
+ done
+ ;;
+ esac
+
+ exp_sym_flag='-bexport'
+ no_entry_flag='-bnoentry'
+ fi
+
+ # When large executables or shared objects are built, AIX ld can
+ # have problems creating the table of contents. If linking a library
+ # or program results in "error TOC overflow" add -mminimal-toc to
+ # CXXFLAGS/CFLAGS for g++/gcc. In the cases where that is not
+ # enough to fix the problem, add -Wl,-bbigtoc to LDFLAGS.
+
+ _LT_TAGVAR(archive_cmds, $1)=''
+ _LT_TAGVAR(hardcode_direct, $1)=yes
+ _LT_TAGVAR(hardcode_direct_absolute, $1)=yes
+ _LT_TAGVAR(hardcode_libdir_separator, $1)=':'
+ _LT_TAGVAR(link_all_deplibs, $1)=yes
+ _LT_TAGVAR(file_list_spec, $1)='${wl}-f,'
+
+ if test "$GXX" = yes; then
+ case $host_os in aix4.[[012]]|aix4.[[012]].*)
+ # We only want to do this on AIX 4.2 and lower, the check
+ # below for broken collect2 doesn't work under 4.3+
+ collect2name=`${CC} -print-prog-name=collect2`
+ if test -f "$collect2name" &&
+ strings "$collect2name" | $GREP resolve_lib_name >/dev/null
+ then
+ # We have reworked collect2
+ :
+ else
+ # We have old collect2
+ _LT_TAGVAR(hardcode_direct, $1)=unsupported
+ # It fails to find uninstalled libraries when the uninstalled
+ # path is not listed in the libpath. Setting hardcode_minus_L
+ # to unsupported forces relinking
+ _LT_TAGVAR(hardcode_minus_L, $1)=yes
+ _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='-L$libdir'
+ _LT_TAGVAR(hardcode_libdir_separator, $1)=
+ fi
+ esac
+ shared_flag='-shared'
+ if test "$aix_use_runtimelinking" = yes; then
+ shared_flag="$shared_flag "'${wl}-G'
+ fi
+ else
+ # not using gcc
+ if test "$host_cpu" = ia64; then
+ # VisualAge C++, Version 5.5 for AIX 5L for IA-64, Beta 3 Release
+ # chokes on -Wl,-G. The following line is correct:
+ shared_flag='-G'
+ else
+ if test "$aix_use_runtimelinking" = yes; then
+ shared_flag='${wl}-G'
+ else
+ shared_flag='${wl}-bM:SRE'
+ fi
+ fi
+ fi
+
+ _LT_TAGVAR(export_dynamic_flag_spec, $1)='${wl}-bexpall'
+ # It seems that -bexpall does not export symbols beginning with
+ # underscore (_), so it is better to generate a list of symbols to
+ # export.
+ _LT_TAGVAR(always_export_symbols, $1)=yes
+ if test "$aix_use_runtimelinking" = yes; then
+ # Warning - without using the other runtime loading flags (-brtl),
+ # -berok will link without error, but may produce a broken library.
+ _LT_TAGVAR(allow_undefined_flag, $1)='-berok'
+ # Determine the default libpath from the value encoded in an empty
+ # executable.
+ _LT_SYS_MODULE_PATH_AIX([$1])
+ _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='${wl}-blibpath:$libdir:'"$aix_libpath"
+
+ _LT_TAGVAR(archive_expsym_cmds, $1)='$CC -o $output_objdir/$soname $libobjs $deplibs '"\${wl}$no_entry_flag"' $compiler_flags `if test "x${allow_undefined_flag}" != "x"; then func_echo_all "${wl}${allow_undefined_flag}"; else :; fi` '"\${wl}$exp_sym_flag:\$export_symbols $shared_flag"
+ else
+ if test "$host_cpu" = ia64; then
+ _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='${wl}-R $libdir:/usr/lib:/lib'
+ _LT_TAGVAR(allow_undefined_flag, $1)="-z nodefs"
+ _LT_TAGVAR(archive_expsym_cmds, $1)="\$CC $shared_flag"' -o $output_objdir/$soname $libobjs $deplibs '"\${wl}$no_entry_flag"' $compiler_flags ${wl}${allow_undefined_flag} '"\${wl}$exp_sym_flag:\$export_symbols"
+ else
+ # Determine the default libpath from the value encoded in an
+ # empty executable.
+ _LT_SYS_MODULE_PATH_AIX([$1])
+ _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='${wl}-blibpath:$libdir:'"$aix_libpath"
+ # Warning - without using the other run time loading flags,
+ # -berok will link without error, but may produce a broken library.
+ _LT_TAGVAR(no_undefined_flag, $1)=' ${wl}-bernotok'
+ _LT_TAGVAR(allow_undefined_flag, $1)=' ${wl}-berok'
+ if test "$with_gnu_ld" = yes; then
+ # We only use this code for GNU lds that support --whole-archive.
+ _LT_TAGVAR(whole_archive_flag_spec, $1)='${wl}--whole-archive$convenience ${wl}--no-whole-archive'
+ else
+ # Exported symbols can be pulled into shared objects from archives
+ _LT_TAGVAR(whole_archive_flag_spec, $1)='$convenience'
+ fi
+ _LT_TAGVAR(archive_cmds_need_lc, $1)=yes
+ # This is similar to how AIX traditionally builds its shared
+ # libraries.
+ _LT_TAGVAR(archive_expsym_cmds, $1)="\$CC $shared_flag"' -o $output_objdir/$soname $libobjs $deplibs ${wl}-bnoentry $compiler_flags ${wl}-bE:$export_symbols${allow_undefined_flag}~$AR $AR_FLAGS $output_objdir/$libname$release.a $output_objdir/$soname'
+ fi
+ fi
+ ;;
+
+ beos*)
+ if $LD --help 2>&1 | $GREP ': supported targets:.* elf' > /dev/null; then
+ _LT_TAGVAR(allow_undefined_flag, $1)=unsupported
+ # Joseph Beckenbach <jrb3@best.com> says some releases of gcc
+ # support --undefined. This deserves some investigation. FIXME
+ _LT_TAGVAR(archive_cmds, $1)='$CC -nostart $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname -o $lib'
+ else
+ _LT_TAGVAR(ld_shlibs, $1)=no
+ fi
+ ;;
+
+ chorus*)
+ case $cc_basename in
+ *)
+ # FIXME: insert proper C++ library support
+ _LT_TAGVAR(ld_shlibs, $1)=no
+ ;;
+ esac
+ ;;
+
+ cygwin* | mingw* | pw32* | cegcc*)
+ case $GXX,$cc_basename in
+ ,cl* | no,cl*)
+ # Native MSVC
+ # hardcode_libdir_flag_spec is actually meaningless, as there is
+ # no search path for DLLs.
+ _LT_TAGVAR(hardcode_libdir_flag_spec, $1)=' '
+ _LT_TAGVAR(allow_undefined_flag, $1)=unsupported
+ _LT_TAGVAR(always_export_symbols, $1)=yes
+ _LT_TAGVAR(file_list_spec, $1)='@'
+ # Tell ltmain to make .lib files, not .a files.
+ libext=lib
+ # Tell ltmain to make .dll files, not .so files.
+ shrext_cmds=".dll"
+ # FIXME: Setting linknames here is a bad hack.
+ _LT_TAGVAR(archive_cmds, $1)='$CC -o $output_objdir/$soname $libobjs $compiler_flags $deplibs -Wl,-dll~linknames='
+ _LT_TAGVAR(archive_expsym_cmds, $1)='if test "x`$SED 1q $export_symbols`" = xEXPORTS; then
+ $SED -n -e 's/\\\\\\\(.*\\\\\\\)/-link\\\ -EXPORT:\\\\\\\1/' -e '1\\\!p' < $export_symbols > $output_objdir/$soname.exp;
+ else
+ $SED -e 's/\\\\\\\(.*\\\\\\\)/-link\\\ -EXPORT:\\\\\\\1/' < $export_symbols > $output_objdir/$soname.exp;
+ fi~
+ $CC -o $tool_output_objdir$soname $libobjs $compiler_flags $deplibs "@$tool_output_objdir$soname.exp" -Wl,-DLL,-IMPLIB:"$tool_output_objdir$libname.dll.lib"~
+ linknames='
+ # The linker will not automatically build a static lib if we build a DLL.
+ # _LT_TAGVAR(old_archive_from_new_cmds, $1)='true'
+ _LT_TAGVAR(enable_shared_with_static_runtimes, $1)=yes
+ # Don't use ranlib
+ _LT_TAGVAR(old_postinstall_cmds, $1)='chmod 644 $oldlib'
+ _LT_TAGVAR(postlink_cmds, $1)='lt_outputfile="@OUTPUT@"~
+ lt_tool_outputfile="@TOOL_OUTPUT@"~
+ case $lt_outputfile in
+ *.exe|*.EXE) ;;
+ *)
+ lt_outputfile="$lt_outputfile.exe"
+ lt_tool_outputfile="$lt_tool_outputfile.exe"
+ ;;
+ esac~
+ func_to_tool_file "$lt_outputfile"~
+ if test "$MANIFEST_TOOL" != ":" && test -f "$lt_outputfile.manifest"; then
+ $MANIFEST_TOOL -manifest "$lt_tool_outputfile.manifest" -outputresource:"$lt_tool_outputfile" || exit 1;
+ $RM "$lt_outputfile.manifest";
+ fi'
+ ;;
+ *)
+ # g++
+ # _LT_TAGVAR(hardcode_libdir_flag_spec, $1) is actually meaningless,
+ # as there is no search path for DLLs.
+ _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='-L$libdir'
+ _LT_TAGVAR(export_dynamic_flag_spec, $1)='${wl}--export-all-symbols'
+ _LT_TAGVAR(allow_undefined_flag, $1)=unsupported
+ _LT_TAGVAR(always_export_symbols, $1)=no
+ _LT_TAGVAR(enable_shared_with_static_runtimes, $1)=yes
+
+ if $LD --help 2>&1 | $GREP 'auto-import' > /dev/null; then
+ _LT_TAGVAR(archive_cmds, $1)='$CC -shared -nostdlib $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags -o $output_objdir/$soname ${wl}--enable-auto-image-base -Xlinker --out-implib -Xlinker $lib'
+ # If the export-symbols file already is a .def file (1st line
+ # is EXPORTS), use it as is; otherwise, prepend...
+ _LT_TAGVAR(archive_expsym_cmds, $1)='if test "x`$SED 1q $export_symbols`" = xEXPORTS; then
+ cp $export_symbols $output_objdir/$soname.def;
+ else
+ echo EXPORTS > $output_objdir/$soname.def;
+ cat $export_symbols >> $output_objdir/$soname.def;
+ fi~
+ $CC -shared -nostdlib $output_objdir/$soname.def $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags -o $output_objdir/$soname ${wl}--enable-auto-image-base -Xlinker --out-implib -Xlinker $lib'
+ else
+ _LT_TAGVAR(ld_shlibs, $1)=no
+ fi
+ ;;
+ esac
+ ;;
+ darwin* | rhapsody*)
+ _LT_DARWIN_LINKER_FEATURES($1)
+ ;;
+
+ dgux*)
+ case $cc_basename in
+ ec++*)
+ # FIXME: insert proper C++ library support
+ _LT_TAGVAR(ld_shlibs, $1)=no
+ ;;
+ ghcx*)
+ # Green Hills C++ Compiler
+ # FIXME: insert proper C++ library support
+ _LT_TAGVAR(ld_shlibs, $1)=no
+ ;;
+ *)
+ # FIXME: insert proper C++ library support
+ _LT_TAGVAR(ld_shlibs, $1)=no
+ ;;
+ esac
+ ;;
+
+ freebsd[[12]]*)
+ # C++ shared libraries reported to be fairly broken before
+ # switch to ELF
+ _LT_TAGVAR(ld_shlibs, $1)=no
+ ;;
+
+ freebsd-elf*)
+ _LT_TAGVAR(archive_cmds_need_lc, $1)=no
+ ;;
+
+ freebsd* | dragonfly*)
+ # FreeBSD 3 and later use GNU C++ and GNU ld with standard ELF
+ # conventions
+ _LT_TAGVAR(ld_shlibs, $1)=yes
+ ;;
+
+ gnu*)
+ ;;
+
+ haiku*)
+ _LT_TAGVAR(archive_cmds, $1)='$CC -shared $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname -o $lib'
+ _LT_TAGVAR(link_all_deplibs, $1)=yes
+ ;;
+
+ hpux9*)
+ _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='${wl}+b ${wl}$libdir'
+ _LT_TAGVAR(hardcode_libdir_separator, $1)=:
+ _LT_TAGVAR(export_dynamic_flag_spec, $1)='${wl}-E'
+ _LT_TAGVAR(hardcode_direct, $1)=yes
+ _LT_TAGVAR(hardcode_minus_L, $1)=yes # Not in the search PATH,
+ # but as the default
+ # location of the library.
+
+ case $cc_basename in
+ CC*)
+ # FIXME: insert proper C++ library support
+ _LT_TAGVAR(ld_shlibs, $1)=no
+ ;;
+ aCC*)
+ _LT_TAGVAR(archive_cmds, $1)='$RM $output_objdir/$soname~$CC -b ${wl}+b ${wl}$install_libdir -o $output_objdir/$soname $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags~test $output_objdir/$soname = $lib || mv $output_objdir/$soname $lib'
+ # Commands to make compiler produce verbose output that lists
+ # what "hidden" libraries, object files and flags are used when
+ # linking a shared library.
+ #
+ # There doesn't appear to be a way to prevent this compiler from
+ # explicitly linking system object files so we need to strip them
+ # from the output so that they don't get included in the library
+ # dependencies.
+ output_verbose_link_cmd='templist=`($CC -b $CFLAGS -v conftest.$objext 2>&1) | $EGREP "\-L"`; list=""; for z in $templist; do case $z in conftest.$objext) list="$list $z";; *.$objext);; *) list="$list $z";;esac; done; func_echo_all "$list"'
+ ;;
+ *)
+ if test "$GXX" = yes; then
+ _LT_TAGVAR(archive_cmds, $1)='$RM $output_objdir/$soname~$CC -shared -nostdlib $pic_flag ${wl}+b ${wl}$install_libdir -o $output_objdir/$soname $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags~test $output_objdir/$soname = $lib || mv $output_objdir/$soname $lib'
+ else
+ # FIXME: insert proper C++ library support
+ _LT_TAGVAR(ld_shlibs, $1)=no
+ fi
+ ;;
+ esac
+ ;;
+
+ hpux10*|hpux11*)
+ if test $with_gnu_ld = no; then
+ _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='${wl}+b ${wl}$libdir'
+ _LT_TAGVAR(hardcode_libdir_separator, $1)=:
+
+ case $host_cpu in
+ hppa*64*|ia64*)
+ ;;
+ *)
+ _LT_TAGVAR(export_dynamic_flag_spec, $1)='${wl}-E'
+ ;;
+ esac
+ fi
+ case $host_cpu in
+ hppa*64*|ia64*)
+ _LT_TAGVAR(hardcode_direct, $1)=no
+ _LT_TAGVAR(hardcode_shlibpath_var, $1)=no
+ ;;
+ *)
+ _LT_TAGVAR(hardcode_direct, $1)=yes
+ _LT_TAGVAR(hardcode_direct_absolute, $1)=yes
+ _LT_TAGVAR(hardcode_minus_L, $1)=yes # Not in the search PATH,
+ # but as the default
+ # location of the library.
+ ;;
+ esac
+
+ case $cc_basename in
+ CC*)
+ # FIXME: insert proper C++ library support
+ _LT_TAGVAR(ld_shlibs, $1)=no
+ ;;
+ aCC*)
+ case $host_cpu in
+ hppa*64*)
+ _LT_TAGVAR(archive_cmds, $1)='$CC -b ${wl}+h ${wl}$soname -o $lib $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags'
+ ;;
+ ia64*)
+ _LT_TAGVAR(archive_cmds, $1)='$CC -b ${wl}+h ${wl}$soname ${wl}+nodefaultrpath -o $lib $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags'
+ ;;
+ *)
+ _LT_TAGVAR(archive_cmds, $1)='$CC -b ${wl}+h ${wl}$soname ${wl}+b ${wl}$install_libdir -o $lib $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags'
+ ;;
+ esac
+ # Commands to make compiler produce verbose output that lists
+ # what "hidden" libraries, object files and flags are used when
+ # linking a shared library.
+ #
+ # There doesn't appear to be a way to prevent this compiler from
+ # explicitly linking system object files so we need to strip them
+ # from the output so that they don't get included in the library
+ # dependencies.
+ output_verbose_link_cmd='templist=`($CC -b $CFLAGS -v conftest.$objext 2>&1) | $GREP "\-L"`; list=""; for z in $templist; do case $z in conftest.$objext) list="$list $z";; *.$objext);; *) list="$list $z";;esac; done; func_echo_all "$list"'
+ ;;
+ *)
+ if test "$GXX" = yes; then
+ if test $with_gnu_ld = no; then
+ case $host_cpu in
+ hppa*64*)
+ _LT_TAGVAR(archive_cmds, $1)='$CC -shared -nostdlib -fPIC ${wl}+h ${wl}$soname -o $lib $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags'
+ ;;
+ ia64*)
+ _LT_TAGVAR(archive_cmds, $1)='$CC -shared -nostdlib $pic_flag ${wl}+h ${wl}$soname ${wl}+nodefaultrpath -o $lib $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags'
+ ;;
+ *)
+ _LT_TAGVAR(archive_cmds, $1)='$CC -shared -nostdlib $pic_flag ${wl}+h ${wl}$soname ${wl}+b ${wl}$install_libdir -o $lib $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags'
+ ;;
+ esac
+ fi
+ else
+ # FIXME: insert proper C++ library support
+ _LT_TAGVAR(ld_shlibs, $1)=no
+ fi
+ ;;
+ esac
+ ;;
+
+ interix[[3-9]]*)
+ _LT_TAGVAR(hardcode_direct, $1)=no
+ _LT_TAGVAR(hardcode_shlibpath_var, $1)=no
+ _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='${wl}-rpath,$libdir'
+ _LT_TAGVAR(export_dynamic_flag_spec, $1)='${wl}-E'
+ # Hack: On Interix 3.x, we cannot compile PIC because of a broken gcc.
+ # Instead, shared libraries are loaded at an image base (0x10000000 by
+ # default) and relocated if they conflict, which is a slow very memory
+ # consuming and fragmenting process. To avoid this, we pick a random,
+ # 256 KiB-aligned image base between 0x50000000 and 0x6FFC0000 at link
+ # time. Moving up from 0x10000000 also allows more sbrk(2) space.
+ _LT_TAGVAR(archive_cmds, $1)='$CC -shared $pic_flag $libobjs $deplibs $compiler_flags ${wl}-h,$soname ${wl}--image-base,`expr ${RANDOM-$$} % 4096 / 2 \* 262144 + 1342177280` -o $lib'
+ _LT_TAGVAR(archive_expsym_cmds, $1)='sed "s,^,_," $export_symbols >$output_objdir/$soname.expsym~$CC -shared $pic_flag $libobjs $deplibs $compiler_flags ${wl}-h,$soname ${wl}--retain-symbols-file,$output_objdir/$soname.expsym ${wl}--image-base,`expr ${RANDOM-$$} % 4096 / 2 \* 262144 + 1342177280` -o $lib'
+ ;;
+ irix5* | irix6*)
+ case $cc_basename in
+ CC*)
+ # SGI C++
+ _LT_TAGVAR(archive_cmds, $1)='$CC -shared -all -multigot $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags -soname $soname `test -n "$verstring" && func_echo_all "-set_version $verstring"` -update_registry ${output_objdir}/so_locations -o $lib'
+
+ # Archives containing C++ object files must be created using
+ # "CC -ar", where "CC" is the IRIX C++ compiler. This is
+ # necessary to make sure instantiated templates are included
+ # in the archive.
+ _LT_TAGVAR(old_archive_cmds, $1)='$CC -ar -WR,-u -o $oldlib $oldobjs'
+ ;;
+ *)
+ if test "$GXX" = yes; then
+ if test "$with_gnu_ld" = no; then
+ _LT_TAGVAR(archive_cmds, $1)='$CC -shared $pic_flag -nostdlib $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags ${wl}-soname ${wl}$soname `test -n "$verstring" && func_echo_all "${wl}-set_version ${wl}$verstring"` ${wl}-update_registry ${wl}${output_objdir}/so_locations -o $lib'
+ else
+ _LT_TAGVAR(archive_cmds, $1)='$CC -shared $pic_flag -nostdlib $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags ${wl}-soname ${wl}$soname `test -n "$verstring" && func_echo_all "${wl}-set_version ${wl}$verstring"` -o $lib'
+ fi
+ fi
+ _LT_TAGVAR(link_all_deplibs, $1)=yes
+ ;;
+ esac
+ _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='${wl}-rpath ${wl}$libdir'
+ _LT_TAGVAR(hardcode_libdir_separator, $1)=:
+ _LT_TAGVAR(inherit_rpath, $1)=yes
+ ;;
+
+ linux* | k*bsd*-gnu | kopensolaris*-gnu)
+ case $cc_basename in
+ KCC*)
+ # Kuck and Associates, Inc. (KAI) C++ Compiler
+
+ # KCC will only create a shared library if the output file
+ # ends with ".so" (or ".sl" for HP-UX), so rename the library
+ # to its proper name (with version) after linking.
+ _LT_TAGVAR(archive_cmds, $1)='tempext=`echo $shared_ext | $SED -e '\''s/\([[^()0-9A-Za-z{}]]\)/\\\\\1/g'\''`; templib=`echo $lib | $SED -e "s/\${tempext}\..*/.so/"`; $CC $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags --soname $soname -o \$templib; mv \$templib $lib'
+ _LT_TAGVAR(archive_expsym_cmds, $1)='tempext=`echo $shared_ext | $SED -e '\''s/\([[^()0-9A-Za-z{}]]\)/\\\\\1/g'\''`; templib=`echo $lib | $SED -e "s/\${tempext}\..*/.so/"`; $CC $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags --soname $soname -o \$templib ${wl}-retain-symbols-file,$export_symbols; mv \$templib $lib'
+ # Commands to make compiler produce verbose output that lists
+ # what "hidden" libraries, object files and flags are used when
+ # linking a shared library.
+ #
+ # There doesn't appear to be a way to prevent this compiler from
+ # explicitly linking system object files so we need to strip them
+ # from the output so that they don't get included in the library
+ # dependencies.
+ output_verbose_link_cmd='templist=`$CC $CFLAGS -v conftest.$objext -o libconftest$shared_ext 2>&1 | $GREP "ld"`; rm -f libconftest$shared_ext; list=""; for z in $templist; do case $z in conftest.$objext) list="$list $z";; *.$objext);; *) list="$list $z";;esac; done; func_echo_all "$list"'
+
+ _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='${wl}-rpath,$libdir'
+ _LT_TAGVAR(export_dynamic_flag_spec, $1)='${wl}--export-dynamic'
+
+ # Archives containing C++ object files must be created using
+ # "CC -Bstatic", where "CC" is the KAI C++ compiler.
+ _LT_TAGVAR(old_archive_cmds, $1)='$CC -Bstatic -o $oldlib $oldobjs'
+ ;;
+ icpc* | ecpc* )
+ # Intel C++
+ with_gnu_ld=yes
+ # version 8.0 and above of icpc choke on multiply defined symbols
+ # if we add $predep_objects and $postdep_objects, however 7.1 and
+ # earlier do not add the objects themselves.
+ case `$CC -V 2>&1` in
+ *"Version 7."*)
+ _LT_TAGVAR(archive_cmds, $1)='$CC -shared $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags ${wl}-soname $wl$soname -o $lib'
+ _LT_TAGVAR(archive_expsym_cmds, $1)='$CC -shared $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags ${wl}-soname $wl$soname ${wl}-retain-symbols-file $wl$export_symbols -o $lib'
+ ;;
+ *) # Version 8.0 or newer
+ tmp_idyn=
+ case $host_cpu in
+ ia64*) tmp_idyn=' -i_dynamic';;
+ esac
+ _LT_TAGVAR(archive_cmds, $1)='$CC -shared'"$tmp_idyn"' $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname -o $lib'
+ _LT_TAGVAR(archive_expsym_cmds, $1)='$CC -shared'"$tmp_idyn"' $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname ${wl}-retain-symbols-file $wl$export_symbols -o $lib'
+ ;;
+ esac
+ _LT_TAGVAR(archive_cmds_need_lc, $1)=no
+ _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='${wl}-rpath,$libdir'
+ _LT_TAGVAR(export_dynamic_flag_spec, $1)='${wl}--export-dynamic'
+ _LT_TAGVAR(whole_archive_flag_spec, $1)='${wl}--whole-archive$convenience ${wl}--no-whole-archive'
+ ;;
+ pgCC* | pgcpp*)
+ # Portland Group C++ compiler
+ case `$CC -V` in
+ *pgCC\ [[1-5]].* | *pgcpp\ [[1-5]].*)
+ _LT_TAGVAR(prelink_cmds, $1)='tpldir=Template.dir~
+ rm -rf $tpldir~
+ $CC --prelink_objects --instantiation_dir $tpldir $objs $libobjs $compile_deplibs~
+ compile_command="$compile_command `find $tpldir -name \*.o | sort | $NL2SP`"'
+ _LT_TAGVAR(old_archive_cmds, $1)='tpldir=Template.dir~
+ rm -rf $tpldir~
+ $CC --prelink_objects --instantiation_dir $tpldir $oldobjs$old_deplibs~
+ $AR $AR_FLAGS $oldlib$oldobjs$old_deplibs `find $tpldir -name \*.o | sort | $NL2SP`~
+ $RANLIB $oldlib'
+ _LT_TAGVAR(archive_cmds, $1)='tpldir=Template.dir~
+ rm -rf $tpldir~
+ $CC --prelink_objects --instantiation_dir $tpldir $predep_objects $libobjs $deplibs $convenience $postdep_objects~
+ $CC -shared $pic_flag $predep_objects $libobjs $deplibs `find $tpldir -name \*.o | sort | $NL2SP` $postdep_objects $compiler_flags ${wl}-soname ${wl}$soname -o $lib'
+ _LT_TAGVAR(archive_expsym_cmds, $1)='tpldir=Template.dir~
+ rm -rf $tpldir~
+ $CC --prelink_objects --instantiation_dir $tpldir $predep_objects $libobjs $deplibs $convenience $postdep_objects~
+ $CC -shared $pic_flag $predep_objects $libobjs $deplibs `find $tpldir -name \*.o | sort | $NL2SP` $postdep_objects $compiler_flags ${wl}-soname ${wl}$soname ${wl}-retain-symbols-file ${wl}$export_symbols -o $lib'
+ ;;
+ *) # Version 6 and above use weak symbols
+ _LT_TAGVAR(archive_cmds, $1)='$CC -shared $pic_flag $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags ${wl}-soname ${wl}$soname -o $lib'
+ _LT_TAGVAR(archive_expsym_cmds, $1)='$CC -shared $pic_flag $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags ${wl}-soname ${wl}$soname ${wl}-retain-symbols-file ${wl}$export_symbols -o $lib'
+ ;;
+ esac
+
+ _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='${wl}--rpath ${wl}$libdir'
+ _LT_TAGVAR(export_dynamic_flag_spec, $1)='${wl}--export-dynamic'
+ _LT_TAGVAR(whole_archive_flag_spec, $1)='${wl}--whole-archive`for conv in $convenience\"\"; do test -n \"$conv\" && new_convenience=\"$new_convenience,$conv\"; done; func_echo_all \"$new_convenience\"` ${wl}--no-whole-archive'
+ ;;
+ cxx*)
+ # Compaq C++
+ _LT_TAGVAR(archive_cmds, $1)='$CC -shared $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags ${wl}-soname $wl$soname -o $lib'
+ _LT_TAGVAR(archive_expsym_cmds, $1)='$CC -shared $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags ${wl}-soname $wl$soname -o $lib ${wl}-retain-symbols-file $wl$export_symbols'
+
+ runpath_var=LD_RUN_PATH
+ _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='-rpath $libdir'
+ _LT_TAGVAR(hardcode_libdir_separator, $1)=:
+
+ # Commands to make compiler produce verbose output that lists
+ # what "hidden" libraries, object files and flags are used when
+ # linking a shared library.
+ #
+ # There doesn't appear to be a way to prevent this compiler from
+ # explicitly linking system object files so we need to strip them
+ # from the output so that they don't get included in the library
+ # dependencies.
+ output_verbose_link_cmd='templist=`$CC -shared $CFLAGS -v conftest.$objext 2>&1 | $GREP "ld"`; templist=`func_echo_all "$templist" | $SED "s/\(^.*ld.*\)\( .*ld .*$\)/\1/"`; list=""; for z in $templist; do case $z in conftest.$objext) list="$list $z";; *.$objext);; *) list="$list $z";;esac; done; func_echo_all "X$list" | $Xsed'
+ ;;
+ xl* | mpixl* | bgxl*)
+ # IBM XL 8.0 on PPC, with GNU ld
+ _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='${wl}-rpath ${wl}$libdir'
+ _LT_TAGVAR(export_dynamic_flag_spec, $1)='${wl}--export-dynamic'
+ _LT_TAGVAR(archive_cmds, $1)='$CC -qmkshrobj $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname -o $lib'
+ if test "x$supports_anon_versioning" = xyes; then
+ _LT_TAGVAR(archive_expsym_cmds, $1)='echo "{ global:" > $output_objdir/$libname.ver~
+ cat $export_symbols | sed -e "s/\(.*\)/\1;/" >> $output_objdir/$libname.ver~
+ echo "local: *; };" >> $output_objdir/$libname.ver~
+ $CC -qmkshrobj $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname ${wl}-version-script ${wl}$output_objdir/$libname.ver -o $lib'
+ fi
+ ;;
+ *)
+ case `$CC -V 2>&1 | sed 5q` in
+ *Sun\ C*)
+ # Sun C++ 5.9
+ _LT_TAGVAR(no_undefined_flag, $1)=' -zdefs'
+ _LT_TAGVAR(archive_cmds, $1)='$CC -G${allow_undefined_flag} -h$soname -o $lib $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags'
+ _LT_TAGVAR(archive_expsym_cmds, $1)='$CC -G${allow_undefined_flag} -h$soname -o $lib $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags ${wl}-retain-symbols-file ${wl}$export_symbols'
+ _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='-R$libdir'
+ _LT_TAGVAR(whole_archive_flag_spec, $1)='${wl}--whole-archive`new_convenience=; for conv in $convenience\"\"; do test -z \"$conv\" || new_convenience=\"$new_convenience,$conv\"; done; func_echo_all \"$new_convenience\"` ${wl}--no-whole-archive'
+ _LT_TAGVAR(compiler_needs_object, $1)=yes
+
+ # Not sure whether something based on
+ # $CC $CFLAGS -v conftest.$objext -o libconftest$shared_ext 2>&1
+ # would be better.
+ output_verbose_link_cmd='func_echo_all'
+
+ # Archives containing C++ object files must be created using
+ # "CC -xar", where "CC" is the Sun C++ compiler. This is
+ # necessary to make sure instantiated templates are included
+ # in the archive.
+ _LT_TAGVAR(old_archive_cmds, $1)='$CC -xar -o $oldlib $oldobjs'
+ ;;
+ esac
+ ;;
+ esac
+ ;;
+
+ lynxos*)
+ # FIXME: insert proper C++ library support
+ _LT_TAGVAR(ld_shlibs, $1)=no
+ ;;
+
+ m88k*)
+ # FIXME: insert proper C++ library support
+ _LT_TAGVAR(ld_shlibs, $1)=no
+ ;;
+
+ mvs*)
+ case $cc_basename in
+ cxx*)
+ # FIXME: insert proper C++ library support
+ _LT_TAGVAR(ld_shlibs, $1)=no
+ ;;
+ *)
+ # FIXME: insert proper C++ library support
+ _LT_TAGVAR(ld_shlibs, $1)=no
+ ;;
+ esac
+ ;;
+
+ netbsd*)
+ if echo __ELF__ | $CC -E - | $GREP __ELF__ >/dev/null; then
+ _LT_TAGVAR(archive_cmds, $1)='$LD -Bshareable -o $lib $predep_objects $libobjs $deplibs $postdep_objects $linker_flags'
+ wlarc=
+ _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='-R$libdir'
+ _LT_TAGVAR(hardcode_direct, $1)=yes
+ _LT_TAGVAR(hardcode_shlibpath_var, $1)=no
+ fi
+ # Workaround some broken pre-1.5 toolchains
+ output_verbose_link_cmd='$CC -shared $CFLAGS -v conftest.$objext 2>&1 | $GREP conftest.$objext | $SED -e "s:-lgcc -lc -lgcc::"'
+ ;;
+
+ *nto* | *qnx*)
+ _LT_TAGVAR(ld_shlibs, $1)=yes
+ ;;
+
+ openbsd2*)
+ # C++ shared libraries are fairly broken
+ _LT_TAGVAR(ld_shlibs, $1)=no
+ ;;
+
+ openbsd*)
+ if test -f /usr/libexec/ld.so; then
+ _LT_TAGVAR(hardcode_direct, $1)=yes
+ _LT_TAGVAR(hardcode_shlibpath_var, $1)=no
+ _LT_TAGVAR(hardcode_direct_absolute, $1)=yes
+ _LT_TAGVAR(archive_cmds, $1)='$CC -shared $pic_flag $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags -o $lib'
+ _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='${wl}-rpath,$libdir'
+ if test -z "`echo __ELF__ | $CC -E - | grep __ELF__`" || test "$host_os-$host_cpu" = "openbsd2.8-powerpc"; then
+ _LT_TAGVAR(archive_expsym_cmds, $1)='$CC -shared $pic_flag $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags ${wl}-retain-symbols-file,$export_symbols -o $lib'
+ _LT_TAGVAR(export_dynamic_flag_spec, $1)='${wl}-E'
+ _LT_TAGVAR(whole_archive_flag_spec, $1)="$wlarc"'--whole-archive$convenience '"$wlarc"'--no-whole-archive'
+ fi
+ output_verbose_link_cmd=func_echo_all
+ else
+ _LT_TAGVAR(ld_shlibs, $1)=no
+ fi
+ ;;
+
+ osf3* | osf4* | osf5*)
+ case $cc_basename in
+ KCC*)
+ # Kuck and Associates, Inc. (KAI) C++ Compiler
+
+ # KCC will only create a shared library if the output file
+ # ends with ".so" (or ".sl" for HP-UX), so rename the library
+ # to its proper name (with version) after linking.
+ _LT_TAGVAR(archive_cmds, $1)='tempext=`echo $shared_ext | $SED -e '\''s/\([[^()0-9A-Za-z{}]]\)/\\\\\1/g'\''`; templib=`echo "$lib" | $SED -e "s/\${tempext}\..*/.so/"`; $CC $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags --soname $soname -o \$templib; mv \$templib $lib'
+
+ _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='${wl}-rpath,$libdir'
+ _LT_TAGVAR(hardcode_libdir_separator, $1)=:
+
+ # Archives containing C++ object files must be created using
+ # the KAI C++ compiler.
+ case $host in
+ osf3*) _LT_TAGVAR(old_archive_cmds, $1)='$CC -Bstatic -o $oldlib $oldobjs' ;;
+ *) _LT_TAGVAR(old_archive_cmds, $1)='$CC -o $oldlib $oldobjs' ;;
+ esac
+ ;;
+ RCC*)
+ # Rational C++ 2.4.1
+ # FIXME: insert proper C++ library support
+ _LT_TAGVAR(ld_shlibs, $1)=no
+ ;;
+ cxx*)
+ case $host in
+ osf3*)
+ _LT_TAGVAR(allow_undefined_flag, $1)=' ${wl}-expect_unresolved ${wl}\*'
+ _LT_TAGVAR(archive_cmds, $1)='$CC -shared${allow_undefined_flag} $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags ${wl}-soname $soname `test -n "$verstring" && func_echo_all "${wl}-set_version $verstring"` -update_registry ${output_objdir}/so_locations -o $lib'
+ _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='${wl}-rpath ${wl}$libdir'
+ ;;
+ *)
+ _LT_TAGVAR(allow_undefined_flag, $1)=' -expect_unresolved \*'
+ _LT_TAGVAR(archive_cmds, $1)='$CC -shared${allow_undefined_flag} $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags -msym -soname $soname `test -n "$verstring" && func_echo_all "-set_version $verstring"` -update_registry ${output_objdir}/so_locations -o $lib'
+ _LT_TAGVAR(archive_expsym_cmds, $1)='for i in `cat $export_symbols`; do printf "%s %s\\n" -exported_symbol "\$i" >> $lib.exp; done~
+ echo "-hidden">> $lib.exp~
+ $CC -shared$allow_undefined_flag $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags -msym -soname $soname ${wl}-input ${wl}$lib.exp `test -n "$verstring" && $ECHO "-set_version $verstring"` -update_registry ${output_objdir}/so_locations -o $lib~
+ $RM $lib.exp'
+ _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='-rpath $libdir'
+ ;;
+ esac
+
+ _LT_TAGVAR(hardcode_libdir_separator, $1)=:
+
+ # Commands to make compiler produce verbose output that lists
+ # what "hidden" libraries, object files and flags are used when
+ # linking a shared library.
+ #
+ # There doesn't appear to be a way to prevent this compiler from
+ # explicitly linking system object files so we need to strip them
+ # from the output so that they don't get included in the library
+ # dependencies.
+ output_verbose_link_cmd='templist=`$CC -shared $CFLAGS -v conftest.$objext 2>&1 | $GREP "ld" | $GREP -v "ld:"`; templist=`func_echo_all "$templist" | $SED "s/\(^.*ld.*\)\( .*ld.*$\)/\1/"`; list=""; for z in $templist; do case $z in conftest.$objext) list="$list $z";; *.$objext);; *) list="$list $z";;esac; done; func_echo_all "$list"'
+ ;;
+ *)
+ if test "$GXX" = yes && test "$with_gnu_ld" = no; then
+ _LT_TAGVAR(allow_undefined_flag, $1)=' ${wl}-expect_unresolved ${wl}\*'
+ case $host in
+ osf3*)
+ _LT_TAGVAR(archive_cmds, $1)='$CC -shared -nostdlib ${allow_undefined_flag} $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags ${wl}-soname ${wl}$soname `test -n "$verstring" && func_echo_all "${wl}-set_version ${wl}$verstring"` ${wl}-update_registry ${wl}${output_objdir}/so_locations -o $lib'
+ ;;
+ *)
+ _LT_TAGVAR(archive_cmds, $1)='$CC -shared $pic_flag -nostdlib ${allow_undefined_flag} $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags ${wl}-msym ${wl}-soname ${wl}$soname `test -n "$verstring" && func_echo_all "${wl}-set_version ${wl}$verstring"` ${wl}-update_registry ${wl}${output_objdir}/so_locations -o $lib'
+ ;;
+ esac
+
+ _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='${wl}-rpath ${wl}$libdir'
+ _LT_TAGVAR(hardcode_libdir_separator, $1)=:
+
+ # Commands to make compiler produce verbose output that lists
+ # what "hidden" libraries, object files and flags are used when
+ # linking a shared library.
+ output_verbose_link_cmd='$CC -shared $CFLAGS -v conftest.$objext 2>&1 | $GREP -v "^Configured with:" | $GREP "\-L"'
+
+ else
+ # FIXME: insert proper C++ library support
+ _LT_TAGVAR(ld_shlibs, $1)=no
+ fi
+ ;;
+ esac
+ ;;
+
+ psos*)
+ # FIXME: insert proper C++ library support
+ _LT_TAGVAR(ld_shlibs, $1)=no
+ ;;
+
+ sunos4*)
+ case $cc_basename in
+ CC*)
+ # Sun C++ 4.x
+ # FIXME: insert proper C++ library support
+ _LT_TAGVAR(ld_shlibs, $1)=no
+ ;;
+ lcc*)
+ # Lucid
+ # FIXME: insert proper C++ library support
+ _LT_TAGVAR(ld_shlibs, $1)=no
+ ;;
+ *)
+ # FIXME: insert proper C++ library support
+ _LT_TAGVAR(ld_shlibs, $1)=no
+ ;;
+ esac
+ ;;
+
+ solaris*)
+ case $cc_basename in
+ CC* | sunCC*)
+ # Sun C++ 4.2, 5.x and Centerline C++
+ _LT_TAGVAR(archive_cmds_need_lc,$1)=yes
+ _LT_TAGVAR(no_undefined_flag, $1)=' -zdefs'
+ _LT_TAGVAR(archive_cmds, $1)='$CC -G${allow_undefined_flag} -h$soname -o $lib $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags'
+ _LT_TAGVAR(archive_expsym_cmds, $1)='echo "{ global:" > $lib.exp~cat $export_symbols | $SED -e "s/\(.*\)/\1;/" >> $lib.exp~echo "local: *; };" >> $lib.exp~
+ $CC -G${allow_undefined_flag} ${wl}-M ${wl}$lib.exp -h$soname -o $lib $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags~$RM $lib.exp'
+
+ _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='-R$libdir'
+ _LT_TAGVAR(hardcode_shlibpath_var, $1)=no
+ case $host_os in
+ solaris2.[[0-5]] | solaris2.[[0-5]].*) ;;
+ *)
+ # The compiler driver will combine and reorder linker options,
+ # but understands `-z linker_flag'.
+ # Supported since Solaris 2.6 (maybe 2.5.1?)
+ _LT_TAGVAR(whole_archive_flag_spec, $1)='-z allextract$convenience -z defaultextract'
+ ;;
+ esac
+ _LT_TAGVAR(link_all_deplibs, $1)=yes
+
+ output_verbose_link_cmd='func_echo_all'
+
+ # Archives containing C++ object files must be created using
+ # "CC -xar", where "CC" is the Sun C++ compiler. This is
+ # necessary to make sure instantiated templates are included
+ # in the archive.
+ _LT_TAGVAR(old_archive_cmds, $1)='$CC -xar -o $oldlib $oldobjs'
+ ;;
+ gcx*)
+ # Green Hills C++ Compiler
+ _LT_TAGVAR(archive_cmds, $1)='$CC -shared $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags ${wl}-h $wl$soname -o $lib'
+
+ # The C++ compiler must be used to create the archive.
+ _LT_TAGVAR(old_archive_cmds, $1)='$CC $LDFLAGS -archive -o $oldlib $oldobjs'
+ ;;
+ *)
+ # GNU C++ compiler with Solaris linker
+ if test "$GXX" = yes && test "$with_gnu_ld" = no; then
+ _LT_TAGVAR(no_undefined_flag, $1)=' ${wl}-z ${wl}defs'
+ if $CC --version | $GREP -v '^2\.7' > /dev/null; then
+ _LT_TAGVAR(archive_cmds, $1)='$CC -shared $pic_flag -nostdlib $LDFLAGS $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags ${wl}-h $wl$soname -o $lib'
+ _LT_TAGVAR(archive_expsym_cmds, $1)='echo "{ global:" > $lib.exp~cat $export_symbols | $SED -e "s/\(.*\)/\1;/" >> $lib.exp~echo "local: *; };" >> $lib.exp~
+ $CC -shared $pic_flag -nostdlib ${wl}-M $wl$lib.exp -o $lib $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags~$RM $lib.exp'
+
+ # Commands to make compiler produce verbose output that lists
+ # what "hidden" libraries, object files and flags are used when
+ # linking a shared library.
+ output_verbose_link_cmd='$CC -shared $CFLAGS -v conftest.$objext 2>&1 | $GREP -v "^Configured with:" | $GREP "\-L"'
+ else
+ # g++ 2.7 appears to require `-G' NOT `-shared' on this
+ # platform.
+ _LT_TAGVAR(archive_cmds, $1)='$CC -G -nostdlib $LDFLAGS $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags ${wl}-h $wl$soname -o $lib'
+ _LT_TAGVAR(archive_expsym_cmds, $1)='echo "{ global:" > $lib.exp~cat $export_symbols | $SED -e "s/\(.*\)/\1;/" >> $lib.exp~echo "local: *; };" >> $lib.exp~
+ $CC -G -nostdlib ${wl}-M $wl$lib.exp -o $lib $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags~$RM $lib.exp'
+
+ # Commands to make compiler produce verbose output that lists
+ # what "hidden" libraries, object files and flags are used when
+ # linking a shared library.
+ output_verbose_link_cmd='$CC -G $CFLAGS -v conftest.$objext 2>&1 | $GREP -v "^Configured with:" | $GREP "\-L"'
+ fi
+
+ _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='${wl}-R $wl$libdir'
+ case $host_os in
+ solaris2.[[0-5]] | solaris2.[[0-5]].*) ;;
+ *)
+ _LT_TAGVAR(whole_archive_flag_spec, $1)='${wl}-z ${wl}allextract$convenience ${wl}-z ${wl}defaultextract'
+ ;;
+ esac
+ fi
+ ;;
+ esac
+ ;;
+
+ sysv4*uw2* | sysv5OpenUNIX* | sysv5UnixWare7.[[01]].[[10]]* | unixware7* | sco3.2v5.0.[[024]]*)
+ _LT_TAGVAR(no_undefined_flag, $1)='${wl}-z,text'
+ _LT_TAGVAR(archive_cmds_need_lc, $1)=no
+ _LT_TAGVAR(hardcode_shlibpath_var, $1)=no
+ runpath_var='LD_RUN_PATH'
+
+ case $cc_basename in
+ CC*)
+ _LT_TAGVAR(archive_cmds, $1)='$CC -G ${wl}-h,$soname -o $lib $libobjs $deplibs $compiler_flags'
+ _LT_TAGVAR(archive_expsym_cmds, $1)='$CC -G ${wl}-Bexport:$export_symbols ${wl}-h,$soname -o $lib $libobjs $deplibs $compiler_flags'
+ ;;
+ *)
+ _LT_TAGVAR(archive_cmds, $1)='$CC -shared ${wl}-h,$soname -o $lib $libobjs $deplibs $compiler_flags'
+ _LT_TAGVAR(archive_expsym_cmds, $1)='$CC -shared ${wl}-Bexport:$export_symbols ${wl}-h,$soname -o $lib $libobjs $deplibs $compiler_flags'
+ ;;
+ esac
+ ;;
+
+ sysv5* | sco3.2v5* | sco5v6*)
+ # Note: We can NOT use -z defs as we might desire, because we do not
+ # link with -lc, and that would cause any symbols used from libc to
+ # always be unresolved, which means just about no library would
+ # ever link correctly. If we're not using GNU ld we use -z text
+ # though, which does catch some bad symbols but isn't as heavy-handed
+ # as -z defs.
+ _LT_TAGVAR(no_undefined_flag, $1)='${wl}-z,text'
+ _LT_TAGVAR(allow_undefined_flag, $1)='${wl}-z,nodefs'
+ _LT_TAGVAR(archive_cmds_need_lc, $1)=no
+ _LT_TAGVAR(hardcode_shlibpath_var, $1)=no
+ _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='${wl}-R,$libdir'
+ _LT_TAGVAR(hardcode_libdir_separator, $1)=':'
+ _LT_TAGVAR(link_all_deplibs, $1)=yes
+ _LT_TAGVAR(export_dynamic_flag_spec, $1)='${wl}-Bexport'
+ runpath_var='LD_RUN_PATH'
+
+ case $cc_basename in
+ CC*)
+ _LT_TAGVAR(archive_cmds, $1)='$CC -G ${wl}-h,$soname -o $lib $libobjs $deplibs $compiler_flags'
+ _LT_TAGVAR(archive_expsym_cmds, $1)='$CC -G ${wl}-Bexport:$export_symbols ${wl}-h,$soname -o $lib $libobjs $deplibs $compiler_flags'
+ _LT_TAGVAR(old_archive_cmds, $1)='$CC -Tprelink_objects $oldobjs~
+ '"$_LT_TAGVAR(old_archive_cmds, $1)"
+ _LT_TAGVAR(reload_cmds, $1)='$CC -Tprelink_objects $reload_objs~
+ '"$_LT_TAGVAR(reload_cmds, $1)"
+ ;;
+ *)
+ _LT_TAGVAR(archive_cmds, $1)='$CC -shared ${wl}-h,$soname -o $lib $libobjs $deplibs $compiler_flags'
+ _LT_TAGVAR(archive_expsym_cmds, $1)='$CC -shared ${wl}-Bexport:$export_symbols ${wl}-h,$soname -o $lib $libobjs $deplibs $compiler_flags'
+ ;;
+ esac
+ ;;
+
+ tandem*)
+ case $cc_basename in
+ NCC*)
+ # NonStop-UX NCC 3.20
+ # FIXME: insert proper C++ library support
+ _LT_TAGVAR(ld_shlibs, $1)=no
+ ;;
+ *)
+ # FIXME: insert proper C++ library support
+ _LT_TAGVAR(ld_shlibs, $1)=no
+ ;;
+ esac
+ ;;
+
+ vxworks*)
+ # FIXME: insert proper C++ library support
+ _LT_TAGVAR(ld_shlibs, $1)=no
+ ;;
+
+ *)
+ # FIXME: insert proper C++ library support
+ _LT_TAGVAR(ld_shlibs, $1)=no
+ ;;
+ esac
+
+ AC_MSG_RESULT([$_LT_TAGVAR(ld_shlibs, $1)])
+ test "$_LT_TAGVAR(ld_shlibs, $1)" = no && can_build_shared=no
+
+ _LT_TAGVAR(GCC, $1)="$GXX"
+ _LT_TAGVAR(LD, $1)="$LD"
+
+ ## CAVEAT EMPTOR:
+ ## There is no encapsulation within the following macros, do not change
+ ## the running order or otherwise move them around unless you know exactly
+ ## what you are doing...
+ _LT_SYS_HIDDEN_LIBDEPS($1)
+ _LT_COMPILER_PIC($1)
+ _LT_COMPILER_C_O($1)
+ _LT_COMPILER_FILE_LOCKS($1)
+ _LT_LINKER_SHLIBS($1)
+ _LT_SYS_DYNAMIC_LINKER($1)
+ _LT_LINKER_HARDCODE_LIBPATH($1)
+
+ _LT_CONFIG($1)
+ fi # test -n "$compiler"
+
+ CC=$lt_save_CC
+ CFLAGS=$lt_save_CFLAGS
+ LDCXX=$LD
+ LD=$lt_save_LD
+ GCC=$lt_save_GCC
+ with_gnu_ld=$lt_save_with_gnu_ld
+ lt_cv_path_LDCXX=$lt_cv_path_LD
+ lt_cv_path_LD=$lt_save_path_LD
+ lt_cv_prog_gnu_ldcxx=$lt_cv_prog_gnu_ld
+ lt_cv_prog_gnu_ld=$lt_save_with_gnu_ld
+fi # test "$_lt_caught_CXX_error" != yes
+
+AC_LANG_POP
+])# _LT_LANG_CXX_CONFIG
+
+
+# _LT_FUNC_STRIPNAME_CNF
+# ----------------------
+# func_stripname_cnf prefix suffix name
+# strip PREFIX and SUFFIX off of NAME.
+# PREFIX and SUFFIX must not contain globbing or regex special
+# characters, hashes, percent signs, but SUFFIX may contain a leading
+# dot (in which case that matches only a dot).
+#
+# This function is identical to the (non-XSI) version of func_stripname,
+# except this one can be used by m4 code that may be executed by configure,
+# rather than the libtool script.
+m4_defun([_LT_FUNC_STRIPNAME_CNF],[dnl
+AC_REQUIRE([_LT_DECL_SED])
+AC_REQUIRE([_LT_PROG_ECHO_BACKSLASH])
+func_stripname_cnf ()
+{
+ case ${2} in
+ .*) func_stripname_result=`$ECHO "${3}" | $SED "s%^${1}%%; s%\\\\${2}\$%%"`;;
+ *) func_stripname_result=`$ECHO "${3}" | $SED "s%^${1}%%; s%${2}\$%%"`;;
+ esac
+} # func_stripname_cnf
+])# _LT_FUNC_STRIPNAME_CNF
+
+# _LT_SYS_HIDDEN_LIBDEPS([TAGNAME])
+# ---------------------------------
+# Figure out "hidden" library dependencies from verbose
+# compiler output when linking a shared library.
+# Parse the compiler output and extract the necessary
+# objects, libraries and library flags.
+m4_defun([_LT_SYS_HIDDEN_LIBDEPS],
+[m4_require([_LT_FILEUTILS_DEFAULTS])dnl
+AC_REQUIRE([_LT_FUNC_STRIPNAME_CNF])dnl
+# Dependencies to place before and after the object being linked:
+_LT_TAGVAR(predep_objects, $1)=
+_LT_TAGVAR(postdep_objects, $1)=
+_LT_TAGVAR(predeps, $1)=
+_LT_TAGVAR(postdeps, $1)=
+_LT_TAGVAR(compiler_lib_search_path, $1)=
+
+dnl we can't use the lt_simple_compile_test_code here,
+dnl because it contains code intended for an executable,
+dnl not a library. It's possible we should let each
+dnl tag define a new lt_????_link_test_code variable,
+dnl but it's only used here...
+m4_if([$1], [], [cat > conftest.$ac_ext <<_LT_EOF
+int a;
+void foo (void) { a = 0; }
+_LT_EOF
+], [$1], [CXX], [cat > conftest.$ac_ext <<_LT_EOF
+class Foo
+{
+public:
+ Foo (void) { a = 0; }
+private:
+ int a;
+};
+_LT_EOF
+], [$1], [F77], [cat > conftest.$ac_ext <<_LT_EOF
+ subroutine foo
+ implicit none
+ integer*4 a
+ a=0
+ return
+ end
+_LT_EOF
+], [$1], [FC], [cat > conftest.$ac_ext <<_LT_EOF
+ subroutine foo
+ implicit none
+ integer a
+ a=0
+ return
+ end
+_LT_EOF
+], [$1], [GCJ], [cat > conftest.$ac_ext <<_LT_EOF
+public class foo {
+ private int a;
+ public void bar (void) {
+ a = 0;
+ }
+};
+_LT_EOF
+])
+
+_lt_libdeps_save_CFLAGS=$CFLAGS
+case "$CC $CFLAGS " in #(
+*\ -flto*\ *) CFLAGS="$CFLAGS -fno-lto" ;;
+*\ -fwhopr*\ *) CFLAGS="$CFLAGS -fno-whopr" ;;
+esac
+
+dnl Parse the compiler output and extract the necessary
+dnl objects, libraries and library flags.
+if AC_TRY_EVAL(ac_compile); then
+ # Parse the compiler output and extract the necessary
+ # objects, libraries and library flags.
+
+ # Sentinel used to keep track of whether or not we are before
+ # the conftest object file.
+ pre_test_object_deps_done=no
+
+ for p in `eval "$output_verbose_link_cmd"`; do
+ case ${prev}${p} in
+
+ -L* | -R* | -l*)
+ # Some compilers place space between "-{L,R}" and the path.
+ # Remove the space.
+ if test $p = "-L" ||
+ test $p = "-R"; then
+ prev=$p
+ continue
+ fi
+
+ # Expand the sysroot to ease extracting the directories later.
+ if test -z "$prev"; then
+ case $p in
+ -L*) func_stripname_cnf '-L' '' "$p"; prev=-L; p=$func_stripname_result ;;
+ -R*) func_stripname_cnf '-R' '' "$p"; prev=-R; p=$func_stripname_result ;;
+ -l*) func_stripname_cnf '-l' '' "$p"; prev=-l; p=$func_stripname_result ;;
+ esac
+ fi
+ case $p in
+ =*) func_stripname_cnf '=' '' "$p"; p=$lt_sysroot$func_stripname_result ;;
+ esac
+ if test "$pre_test_object_deps_done" = no; then
+ case ${prev} in
+ -L | -R)
+ # Internal compiler library paths should come after those
+ # provided the user. The postdeps already come after the
+ # user supplied libs so there is no need to process them.
+ if test -z "$_LT_TAGVAR(compiler_lib_search_path, $1)"; then
+ _LT_TAGVAR(compiler_lib_search_path, $1)="${prev}${p}"
+ else
+ _LT_TAGVAR(compiler_lib_search_path, $1)="${_LT_TAGVAR(compiler_lib_search_path, $1)} ${prev}${p}"
+ fi
+ ;;
+ # The "-l" case would never come before the object being
+ # linked, so don't bother handling this case.
+ esac
+ else
+ if test -z "$_LT_TAGVAR(postdeps, $1)"; then
+ _LT_TAGVAR(postdeps, $1)="${prev}${p}"
+ else
+ _LT_TAGVAR(postdeps, $1)="${_LT_TAGVAR(postdeps, $1)} ${prev}${p}"
+ fi
+ fi
+ prev=
+ ;;
+
+ *.lto.$objext) ;; # Ignore GCC LTO objects
+ *.$objext)
+ # This assumes that the test object file only shows up
+ # once in the compiler output.
+ if test "$p" = "conftest.$objext"; then
+ pre_test_object_deps_done=yes
+ continue
+ fi
+
+ if test "$pre_test_object_deps_done" = no; then
+ if test -z "$_LT_TAGVAR(predep_objects, $1)"; then
+ _LT_TAGVAR(predep_objects, $1)="$p"
+ else
+ _LT_TAGVAR(predep_objects, $1)="$_LT_TAGVAR(predep_objects, $1) $p"
+ fi
+ else
+ if test -z "$_LT_TAGVAR(postdep_objects, $1)"; then
+ _LT_TAGVAR(postdep_objects, $1)="$p"
+ else
+ _LT_TAGVAR(postdep_objects, $1)="$_LT_TAGVAR(postdep_objects, $1) $p"
+ fi
+ fi
+ ;;
+
+ *) ;; # Ignore the rest.
+
+ esac
+ done
+
+ # Clean up.
+ rm -f a.out a.exe
+else
+ echo "libtool.m4: error: problem compiling $1 test program"
+fi
+
+$RM -f confest.$objext
+CFLAGS=$_lt_libdeps_save_CFLAGS
+
+# PORTME: override above test on systems where it is broken
+m4_if([$1], [CXX],
+[case $host_os in
+interix[[3-9]]*)
+ # Interix 3.5 installs completely hosed .la files for C++, so rather than
+ # hack all around it, let's just trust "g++" to DTRT.
+ _LT_TAGVAR(predep_objects,$1)=
+ _LT_TAGVAR(postdep_objects,$1)=
+ _LT_TAGVAR(postdeps,$1)=
+ ;;
+
+linux*)
+ case `$CC -V 2>&1 | sed 5q` in
+ *Sun\ C*)
+ # Sun C++ 5.9
+
+ # The more standards-conforming stlport4 library is
+ # incompatible with the Cstd library. Avoid specifying
+ # it if it's in CXXFLAGS. Ignore libCrun as
+ # -library=stlport4 depends on it.
+ case " $CXX $CXXFLAGS " in
+ *" -library=stlport4 "*)
+ solaris_use_stlport4=yes
+ ;;
+ esac
+
+ if test "$solaris_use_stlport4" != yes; then
+ _LT_TAGVAR(postdeps,$1)='-library=Cstd -library=Crun'
+ fi
+ ;;
+ esac
+ ;;
+
+solaris*)
+ case $cc_basename in
+ CC* | sunCC*)
+ # The more standards-conforming stlport4 library is
+ # incompatible with the Cstd library. Avoid specifying
+ # it if it's in CXXFLAGS. Ignore libCrun as
+ # -library=stlport4 depends on it.
+ case " $CXX $CXXFLAGS " in
+ *" -library=stlport4 "*)
+ solaris_use_stlport4=yes
+ ;;
+ esac
+
+ # Adding this requires a known-good setup of shared libraries for
+ # Sun compiler versions before 5.6, else PIC objects from an old
+ # archive will be linked into the output, leading to subtle bugs.
+ if test "$solaris_use_stlport4" != yes; then
+ _LT_TAGVAR(postdeps,$1)='-library=Cstd -library=Crun'
+ fi
+ ;;
+ esac
+ ;;
+esac
+])
+
+case " $_LT_TAGVAR(postdeps, $1) " in
+*" -lc "*) _LT_TAGVAR(archive_cmds_need_lc, $1)=no ;;
+esac
+ _LT_TAGVAR(compiler_lib_search_dirs, $1)=
+if test -n "${_LT_TAGVAR(compiler_lib_search_path, $1)}"; then
+ _LT_TAGVAR(compiler_lib_search_dirs, $1)=`echo " ${_LT_TAGVAR(compiler_lib_search_path, $1)}" | ${SED} -e 's! -L! !g' -e 's!^ !!'`
+fi
+_LT_TAGDECL([], [compiler_lib_search_dirs], [1],
+ [The directories searched by this compiler when creating a shared library])
+_LT_TAGDECL([], [predep_objects], [1],
+ [Dependencies to place before and after the objects being linked to
+ create a shared library])
+_LT_TAGDECL([], [postdep_objects], [1])
+_LT_TAGDECL([], [predeps], [1])
+_LT_TAGDECL([], [postdeps], [1])
+_LT_TAGDECL([], [compiler_lib_search_path], [1],
+ [The library search path used internally by the compiler when linking
+ a shared library])
+])# _LT_SYS_HIDDEN_LIBDEPS
+
+
+# _LT_LANG_F77_CONFIG([TAG])
+# --------------------------
+# Ensure that the configuration variables for a Fortran 77 compiler are
+# suitably defined. These variables are subsequently used by _LT_CONFIG
+# to write the compiler configuration to `libtool'.
+m4_defun([_LT_LANG_F77_CONFIG],
+[AC_LANG_PUSH(Fortran 77)
+if test -z "$F77" || test "X$F77" = "Xno"; then
+ _lt_disable_F77=yes
+fi
+
+_LT_TAGVAR(archive_cmds_need_lc, $1)=no
+_LT_TAGVAR(allow_undefined_flag, $1)=
+_LT_TAGVAR(always_export_symbols, $1)=no
+_LT_TAGVAR(archive_expsym_cmds, $1)=
+_LT_TAGVAR(export_dynamic_flag_spec, $1)=
+_LT_TAGVAR(hardcode_direct, $1)=no
+_LT_TAGVAR(hardcode_direct_absolute, $1)=no
+_LT_TAGVAR(hardcode_libdir_flag_spec, $1)=
+_LT_TAGVAR(hardcode_libdir_flag_spec_ld, $1)=
+_LT_TAGVAR(hardcode_libdir_separator, $1)=
+_LT_TAGVAR(hardcode_minus_L, $1)=no
+_LT_TAGVAR(hardcode_automatic, $1)=no
+_LT_TAGVAR(inherit_rpath, $1)=no
+_LT_TAGVAR(module_cmds, $1)=
+_LT_TAGVAR(module_expsym_cmds, $1)=
+_LT_TAGVAR(link_all_deplibs, $1)=unknown
+_LT_TAGVAR(old_archive_cmds, $1)=$old_archive_cmds
+_LT_TAGVAR(reload_flag, $1)=$reload_flag
+_LT_TAGVAR(reload_cmds, $1)=$reload_cmds
+_LT_TAGVAR(no_undefined_flag, $1)=
+_LT_TAGVAR(whole_archive_flag_spec, $1)=
+_LT_TAGVAR(enable_shared_with_static_runtimes, $1)=no
+
+# Source file extension for f77 test sources.
+ac_ext=f
+
+# Object file extension for compiled f77 test sources.
+objext=o
+_LT_TAGVAR(objext, $1)=$objext
+
+# No sense in running all these tests if we already determined that
+# the F77 compiler isn't working. Some variables (like enable_shared)
+# are currently assumed to apply to all compilers on this platform,
+# and will be corrupted by setting them based on a non-working compiler.
+if test "$_lt_disable_F77" != yes; then
+ # Code to be used in simple compile tests
+ lt_simple_compile_test_code="\
+ subroutine t
+ return
+ end
+"
+
+ # Code to be used in simple link tests
+ lt_simple_link_test_code="\
+ program t
+ end
+"
+
+ # ltmain only uses $CC for tagged configurations so make sure $CC is set.
+ _LT_TAG_COMPILER
+
+ # save warnings/boilerplate of simple test code
+ _LT_COMPILER_BOILERPLATE
+ _LT_LINKER_BOILERPLATE
+
+ # Allow CC to be a program name with arguments.
+ lt_save_CC="$CC"
+ lt_save_GCC=$GCC
+ lt_save_CFLAGS=$CFLAGS
+ CC=${F77-"f77"}
+ CFLAGS=$FFLAGS
+ compiler=$CC
+ _LT_TAGVAR(compiler, $1)=$CC
+ _LT_CC_BASENAME([$compiler])
+ GCC=$G77
+ if test -n "$compiler"; then
+ AC_MSG_CHECKING([if libtool supports shared libraries])
+ AC_MSG_RESULT([$can_build_shared])
+
+ AC_MSG_CHECKING([whether to build shared libraries])
+ test "$can_build_shared" = "no" && enable_shared=no
+
+ # On AIX, shared libraries and static libraries use the same namespace, and
+ # are all built from PIC.
+ case $host_os in
+ aix3*)
+ test "$enable_shared" = yes && enable_static=no
+ if test -n "$RANLIB"; then
+ archive_cmds="$archive_cmds~\$RANLIB \$lib"
+ postinstall_cmds='$RANLIB $lib'
+ fi
+ ;;
+ aix[[4-9]]*)
+ if test "$host_cpu" != ia64 && test "$aix_use_runtimelinking" = no ; then
+ test "$enable_shared" = yes && enable_static=no
+ fi
+ ;;
+ esac
+ AC_MSG_RESULT([$enable_shared])
+
+ AC_MSG_CHECKING([whether to build static libraries])
+ # Make sure either enable_shared or enable_static is yes.
+ test "$enable_shared" = yes || enable_static=yes
+ AC_MSG_RESULT([$enable_static])
+
+ _LT_TAGVAR(GCC, $1)="$G77"
+ _LT_TAGVAR(LD, $1)="$LD"
+
+ ## CAVEAT EMPTOR:
+ ## There is no encapsulation within the following macros, do not change
+ ## the running order or otherwise move them around unless you know exactly
+ ## what you are doing...
+ _LT_COMPILER_PIC($1)
+ _LT_COMPILER_C_O($1)
+ _LT_COMPILER_FILE_LOCKS($1)
+ _LT_LINKER_SHLIBS($1)
+ _LT_SYS_DYNAMIC_LINKER($1)
+ _LT_LINKER_HARDCODE_LIBPATH($1)
+
+ _LT_CONFIG($1)
+ fi # test -n "$compiler"
+
+ GCC=$lt_save_GCC
+ CC="$lt_save_CC"
+ CFLAGS="$lt_save_CFLAGS"
+fi # test "$_lt_disable_F77" != yes
+
+AC_LANG_POP
+])# _LT_LANG_F77_CONFIG
+
+
+# _LT_LANG_FC_CONFIG([TAG])
+# -------------------------
+# Ensure that the configuration variables for a Fortran compiler are
+# suitably defined. These variables are subsequently used by _LT_CONFIG
+# to write the compiler configuration to `libtool'.
+m4_defun([_LT_LANG_FC_CONFIG],
+[AC_LANG_PUSH(Fortran)
+
+if test -z "$FC" || test "X$FC" = "Xno"; then
+ _lt_disable_FC=yes
+fi
+
+_LT_TAGVAR(archive_cmds_need_lc, $1)=no
+_LT_TAGVAR(allow_undefined_flag, $1)=
+_LT_TAGVAR(always_export_symbols, $1)=no
+_LT_TAGVAR(archive_expsym_cmds, $1)=
+_LT_TAGVAR(export_dynamic_flag_spec, $1)=
+_LT_TAGVAR(hardcode_direct, $1)=no
+_LT_TAGVAR(hardcode_direct_absolute, $1)=no
+_LT_TAGVAR(hardcode_libdir_flag_spec, $1)=
+_LT_TAGVAR(hardcode_libdir_flag_spec_ld, $1)=
+_LT_TAGVAR(hardcode_libdir_separator, $1)=
+_LT_TAGVAR(hardcode_minus_L, $1)=no
+_LT_TAGVAR(hardcode_automatic, $1)=no
+_LT_TAGVAR(inherit_rpath, $1)=no
+_LT_TAGVAR(module_cmds, $1)=
+_LT_TAGVAR(module_expsym_cmds, $1)=
+_LT_TAGVAR(link_all_deplibs, $1)=unknown
+_LT_TAGVAR(old_archive_cmds, $1)=$old_archive_cmds
+_LT_TAGVAR(reload_flag, $1)=$reload_flag
+_LT_TAGVAR(reload_cmds, $1)=$reload_cmds
+_LT_TAGVAR(no_undefined_flag, $1)=
+_LT_TAGVAR(whole_archive_flag_spec, $1)=
+_LT_TAGVAR(enable_shared_with_static_runtimes, $1)=no
+
+# Source file extension for fc test sources.
+ac_ext=${ac_fc_srcext-f}
+
+# Object file extension for compiled fc test sources.
+objext=o
+_LT_TAGVAR(objext, $1)=$objext
+
+# No sense in running all these tests if we already determined that
+# the FC compiler isn't working. Some variables (like enable_shared)
+# are currently assumed to apply to all compilers on this platform,
+# and will be corrupted by setting them based on a non-working compiler.
+if test "$_lt_disable_FC" != yes; then
+ # Code to be used in simple compile tests
+ lt_simple_compile_test_code="\
+ subroutine t
+ return
+ end
+"
+
+ # Code to be used in simple link tests
+ lt_simple_link_test_code="\
+ program t
+ end
+"
+
+ # ltmain only uses $CC for tagged configurations so make sure $CC is set.
+ _LT_TAG_COMPILER
+
+ # save warnings/boilerplate of simple test code
+ _LT_COMPILER_BOILERPLATE
+ _LT_LINKER_BOILERPLATE
+
+ # Allow CC to be a program name with arguments.
+ lt_save_CC="$CC"
+ lt_save_GCC=$GCC
+ lt_save_CFLAGS=$CFLAGS
+ CC=${FC-"f95"}
+ CFLAGS=$FCFLAGS
+ compiler=$CC
+ GCC=$ac_cv_fc_compiler_gnu
+
+ _LT_TAGVAR(compiler, $1)=$CC
+ _LT_CC_BASENAME([$compiler])
+
+ if test -n "$compiler"; then
+ AC_MSG_CHECKING([if libtool supports shared libraries])
+ AC_MSG_RESULT([$can_build_shared])
+
+ AC_MSG_CHECKING([whether to build shared libraries])
+ test "$can_build_shared" = "no" && enable_shared=no
+
+ # On AIX, shared libraries and static libraries use the same namespace, and
+ # are all built from PIC.
+ case $host_os in
+ aix3*)
+ test "$enable_shared" = yes && enable_static=no
+ if test -n "$RANLIB"; then
+ archive_cmds="$archive_cmds~\$RANLIB \$lib"
+ postinstall_cmds='$RANLIB $lib'
+ fi
+ ;;
+ aix[[4-9]]*)
+ if test "$host_cpu" != ia64 && test "$aix_use_runtimelinking" = no ; then
+ test "$enable_shared" = yes && enable_static=no
+ fi
+ ;;
+ esac
+ AC_MSG_RESULT([$enable_shared])
+
+ AC_MSG_CHECKING([whether to build static libraries])
+ # Make sure either enable_shared or enable_static is yes.
+ test "$enable_shared" = yes || enable_static=yes
+ AC_MSG_RESULT([$enable_static])
+
+ _LT_TAGVAR(GCC, $1)="$ac_cv_fc_compiler_gnu"
+ _LT_TAGVAR(LD, $1)="$LD"
+
+ ## CAVEAT EMPTOR:
+ ## There is no encapsulation within the following macros, do not change
+ ## the running order or otherwise move them around unless you know exactly
+ ## what you are doing...
+ _LT_SYS_HIDDEN_LIBDEPS($1)
+ _LT_COMPILER_PIC($1)
+ _LT_COMPILER_C_O($1)
+ _LT_COMPILER_FILE_LOCKS($1)
+ _LT_LINKER_SHLIBS($1)
+ _LT_SYS_DYNAMIC_LINKER($1)
+ _LT_LINKER_HARDCODE_LIBPATH($1)
+
+ _LT_CONFIG($1)
+ fi # test -n "$compiler"
+
+ GCC=$lt_save_GCC
+ CC=$lt_save_CC
+ CFLAGS=$lt_save_CFLAGS
+fi # test "$_lt_disable_FC" != yes
+
+AC_LANG_POP
+])# _LT_LANG_FC_CONFIG
+
+
+# _LT_LANG_GCJ_CONFIG([TAG])
+# --------------------------
+# Ensure that the configuration variables for the GNU Java Compiler compiler
+# are suitably defined. These variables are subsequently used by _LT_CONFIG
+# to write the compiler configuration to `libtool'.
+m4_defun([_LT_LANG_GCJ_CONFIG],
+[AC_REQUIRE([LT_PROG_GCJ])dnl
+AC_LANG_SAVE
+
+# Source file extension for Java test sources.
+ac_ext=java
+
+# Object file extension for compiled Java test sources.
+objext=o
+_LT_TAGVAR(objext, $1)=$objext
+
+# Code to be used in simple compile tests
+lt_simple_compile_test_code="class foo {}"
+
+# Code to be used in simple link tests
+lt_simple_link_test_code='public class conftest { public static void main(String[[]] argv) {}; }'
+
+# ltmain only uses $CC for tagged configurations so make sure $CC is set.
+_LT_TAG_COMPILER
+
+# save warnings/boilerplate of simple test code
+_LT_COMPILER_BOILERPLATE
+_LT_LINKER_BOILERPLATE
+
+# Allow CC to be a program name with arguments.
+lt_save_CC=$CC
+lt_save_CFLAGS=$CFLAGS
+lt_save_GCC=$GCC
+GCC=yes
+CC=${GCJ-"gcj"}
+CFLAGS=$GCJFLAGS
+compiler=$CC
+_LT_TAGVAR(compiler, $1)=$CC
+_LT_TAGVAR(LD, $1)="$LD"
+_LT_CC_BASENAME([$compiler])
+
+# GCJ did not exist at the time GCC didn't implicitly link libc in.
+_LT_TAGVAR(archive_cmds_need_lc, $1)=no
+
+_LT_TAGVAR(old_archive_cmds, $1)=$old_archive_cmds
+_LT_TAGVAR(reload_flag, $1)=$reload_flag
+_LT_TAGVAR(reload_cmds, $1)=$reload_cmds
+
+## CAVEAT EMPTOR:
+## There is no encapsulation within the following macros, do not change
+## the running order or otherwise move them around unless you know exactly
+## what you are doing...
+if test -n "$compiler"; then
+ _LT_COMPILER_NO_RTTI($1)
+ _LT_COMPILER_PIC($1)
+ _LT_COMPILER_C_O($1)
+ _LT_COMPILER_FILE_LOCKS($1)
+ _LT_LINKER_SHLIBS($1)
+ _LT_LINKER_HARDCODE_LIBPATH($1)
+
+ _LT_CONFIG($1)
+fi
+
+AC_LANG_RESTORE
+
+GCC=$lt_save_GCC
+CC=$lt_save_CC
+CFLAGS=$lt_save_CFLAGS
+])# _LT_LANG_GCJ_CONFIG
+
+
+# _LT_LANG_RC_CONFIG([TAG])
+# -------------------------
+# Ensure that the configuration variables for the Windows resource compiler
+# are suitably defined. These variables are subsequently used by _LT_CONFIG
+# to write the compiler configuration to `libtool'.
+m4_defun([_LT_LANG_RC_CONFIG],
+[AC_REQUIRE([LT_PROG_RC])dnl
+AC_LANG_SAVE
+
+# Source file extension for RC test sources.
+ac_ext=rc
+
+# Object file extension for compiled RC test sources.
+objext=o
+_LT_TAGVAR(objext, $1)=$objext
+
+# Code to be used in simple compile tests
+lt_simple_compile_test_code='sample MENU { MENUITEM "&Soup", 100, CHECKED }'
+
+# Code to be used in simple link tests
+lt_simple_link_test_code="$lt_simple_compile_test_code"
+
+# ltmain only uses $CC for tagged configurations so make sure $CC is set.
+_LT_TAG_COMPILER
+
+# save warnings/boilerplate of simple test code
+_LT_COMPILER_BOILERPLATE
+_LT_LINKER_BOILERPLATE
+
+# Allow CC to be a program name with arguments.
+lt_save_CC="$CC"
+lt_save_CFLAGS=$CFLAGS
+lt_save_GCC=$GCC
+GCC=
+CC=${RC-"windres"}
+CFLAGS=
+compiler=$CC
+_LT_TAGVAR(compiler, $1)=$CC
+_LT_CC_BASENAME([$compiler])
+_LT_TAGVAR(lt_cv_prog_compiler_c_o, $1)=yes
+
+if test -n "$compiler"; then
+ :
+ _LT_CONFIG($1)
+fi
+
+GCC=$lt_save_GCC
+AC_LANG_RESTORE
+CC=$lt_save_CC
+CFLAGS=$lt_save_CFLAGS
+])# _LT_LANG_RC_CONFIG
+
+
+# LT_PROG_GCJ
+# -----------
+AC_DEFUN([LT_PROG_GCJ],
+[m4_ifdef([AC_PROG_GCJ], [AC_PROG_GCJ],
+ [m4_ifdef([A][M_PROG_GCJ], [A][M_PROG_GCJ],
+ [AC_CHECK_TOOL(GCJ, gcj,)
+ test "x${GCJFLAGS+set}" = xset || GCJFLAGS="-g -O2"
+ AC_SUBST(GCJFLAGS)])])[]dnl
+])
+
+# Old name:
+AU_ALIAS([LT_AC_PROG_GCJ], [LT_PROG_GCJ])
+dnl aclocal-1.4 backwards compatibility:
+dnl AC_DEFUN([LT_AC_PROG_GCJ], [])
+
+
+# LT_PROG_RC
+# ----------
+AC_DEFUN([LT_PROG_RC],
+[AC_CHECK_TOOL(RC, windres,)
+])
+
+# Old name:
+AU_ALIAS([LT_AC_PROG_RC], [LT_PROG_RC])
+dnl aclocal-1.4 backwards compatibility:
+dnl AC_DEFUN([LT_AC_PROG_RC], [])
+
+
+# _LT_DECL_EGREP
+# --------------
+# If we don't have a new enough Autoconf to choose the best grep
+# available, choose the one first in the user's PATH.
+m4_defun([_LT_DECL_EGREP],
+[AC_REQUIRE([AC_PROG_EGREP])dnl
+AC_REQUIRE([AC_PROG_FGREP])dnl
+test -z "$GREP" && GREP=grep
+_LT_DECL([], [GREP], [1], [A grep program that handles long lines])
+_LT_DECL([], [EGREP], [1], [An ERE matcher])
+_LT_DECL([], [FGREP], [1], [A literal string matcher])
+dnl Non-bleeding-edge autoconf doesn't subst GREP, so do it here too
+AC_SUBST([GREP])
+])
+
+
+# _LT_DECL_OBJDUMP
+# --------------
+# If we don't have a new enough Autoconf to choose the best objdump
+# available, choose the one first in the user's PATH.
+m4_defun([_LT_DECL_OBJDUMP],
+[AC_CHECK_TOOL(OBJDUMP, objdump, false)
+test -z "$OBJDUMP" && OBJDUMP=objdump
+_LT_DECL([], [OBJDUMP], [1], [An object symbol dumper])
+AC_SUBST([OBJDUMP])
+])
+
+# _LT_DECL_DLLTOOL
+# ----------------
+# Ensure DLLTOOL variable is set.
+m4_defun([_LT_DECL_DLLTOOL],
+[AC_CHECK_TOOL(DLLTOOL, dlltool, false)
+test -z "$DLLTOOL" && DLLTOOL=dlltool
+_LT_DECL([], [DLLTOOL], [1], [DLL creation program])
+AC_SUBST([DLLTOOL])
+])
+
+# _LT_DECL_SED
+# ------------
+# Check for a fully-functional sed program, that truncates
+# as few characters as possible. Prefer GNU sed if found.
+m4_defun([_LT_DECL_SED],
+[AC_PROG_SED
+test -z "$SED" && SED=sed
+Xsed="$SED -e 1s/^X//"
+_LT_DECL([], [SED], [1], [A sed program that does not truncate output])
+_LT_DECL([], [Xsed], ["\$SED -e 1s/^X//"],
+ [Sed that helps us avoid accidentally triggering echo(1) options like -n])
+])# _LT_DECL_SED
+
+m4_ifndef([AC_PROG_SED], [
+############################################################
+# NOTE: This macro has been submitted for inclusion into #
+# GNU Autoconf as AC_PROG_SED. When it is available in #
+# a released version of Autoconf we should remove this #
+# macro and use it instead. #
+############################################################
+
+m4_defun([AC_PROG_SED],
+[AC_MSG_CHECKING([for a sed that does not truncate output])
+AC_CACHE_VAL(lt_cv_path_SED,
+[# Loop through the user's path and test for sed and gsed.
+# Then use that list of sed's as ones to test for truncation.
+as_save_IFS=$IFS; IFS=$PATH_SEPARATOR
+for as_dir in $PATH
+do
+ IFS=$as_save_IFS
+ test -z "$as_dir" && as_dir=.
+ for lt_ac_prog in sed gsed; do
+ for ac_exec_ext in '' $ac_executable_extensions; do
+ if $as_executable_p "$as_dir/$lt_ac_prog$ac_exec_ext"; then
+ lt_ac_sed_list="$lt_ac_sed_list $as_dir/$lt_ac_prog$ac_exec_ext"
+ fi
+ done
+ done
+done
+IFS=$as_save_IFS
+lt_ac_max=0
+lt_ac_count=0
+# Add /usr/xpg4/bin/sed as it is typically found on Solaris
+# along with /bin/sed that truncates output.
+for lt_ac_sed in $lt_ac_sed_list /usr/xpg4/bin/sed; do
+ test ! -f $lt_ac_sed && continue
+ cat /dev/null > conftest.in
+ lt_ac_count=0
+ echo $ECHO_N "0123456789$ECHO_C" >conftest.in
+ # Check for GNU sed and select it if it is found.
+ if "$lt_ac_sed" --version 2>&1 < /dev/null | grep 'GNU' > /dev/null; then
+ lt_cv_path_SED=$lt_ac_sed
+ break
+ fi
+ while true; do
+ cat conftest.in conftest.in >conftest.tmp
+ mv conftest.tmp conftest.in
+ cp conftest.in conftest.nl
+ echo >>conftest.nl
+ $lt_ac_sed -e 's/a$//' < conftest.nl >conftest.out || break
+ cmp -s conftest.out conftest.nl || break
+ # 10000 chars as input seems more than enough
+ test $lt_ac_count -gt 10 && break
+ lt_ac_count=`expr $lt_ac_count + 1`
+ if test $lt_ac_count -gt $lt_ac_max; then
+ lt_ac_max=$lt_ac_count
+ lt_cv_path_SED=$lt_ac_sed
+ fi
+ done
+done
+])
+SED=$lt_cv_path_SED
+AC_SUBST([SED])
+AC_MSG_RESULT([$SED])
+])#AC_PROG_SED
+])#m4_ifndef
+
+# Old name:
+AU_ALIAS([LT_AC_PROG_SED], [AC_PROG_SED])
+dnl aclocal-1.4 backwards compatibility:
+dnl AC_DEFUN([LT_AC_PROG_SED], [])
+
+
+# _LT_CHECK_SHELL_FEATURES
+# ------------------------
+# Find out whether the shell is Bourne or XSI compatible,
+# or has some other useful features.
+m4_defun([_LT_CHECK_SHELL_FEATURES],
+[AC_MSG_CHECKING([whether the shell understands some XSI constructs])
+# Try some XSI features
+xsi_shell=no
+( _lt_dummy="a/b/c"
+ test "${_lt_dummy##*/},${_lt_dummy%/*},${_lt_dummy#??}"${_lt_dummy%"$_lt_dummy"}, \
+ = c,a/b,b/c, \
+ && eval 'test $(( 1 + 1 )) -eq 2 \
+ && test "${#_lt_dummy}" -eq 5' ) >/dev/null 2>&1 \
+ && xsi_shell=yes
+AC_MSG_RESULT([$xsi_shell])
+_LT_CONFIG_LIBTOOL_INIT([xsi_shell='$xsi_shell'])
+
+AC_MSG_CHECKING([whether the shell understands "+="])
+lt_shell_append=no
+( foo=bar; set foo baz; eval "$[1]+=\$[2]" && test "$foo" = barbaz ) \
+ >/dev/null 2>&1 \
+ && lt_shell_append=yes
+AC_MSG_RESULT([$lt_shell_append])
+_LT_CONFIG_LIBTOOL_INIT([lt_shell_append='$lt_shell_append'])
+
+if ( (MAIL=60; unset MAIL) || exit) >/dev/null 2>&1; then
+ lt_unset=unset
+else
+ lt_unset=false
+fi
+_LT_DECL([], [lt_unset], [0], [whether the shell understands "unset"])dnl
+
+# test EBCDIC or ASCII
+case `echo X|tr X '\101'` in
+ A) # ASCII based system
+ # \n is not interpreted correctly by Solaris 8 /usr/ucb/tr
+ lt_SP2NL='tr \040 \012'
+ lt_NL2SP='tr \015\012 \040\040'
+ ;;
+ *) # EBCDIC based system
+ lt_SP2NL='tr \100 \n'
+ lt_NL2SP='tr \r\n \100\100'
+ ;;
+esac
+_LT_DECL([SP2NL], [lt_SP2NL], [1], [turn spaces into newlines])dnl
+_LT_DECL([NL2SP], [lt_NL2SP], [1], [turn newlines into spaces])dnl
+])# _LT_CHECK_SHELL_FEATURES
+
+
+# _LT_PROG_FUNCTION_REPLACE (FUNCNAME, REPLACEMENT-BODY)
+# ------------------------------------------------------
+# In `$cfgfile', look for function FUNCNAME delimited by `^FUNCNAME ()$' and
+# '^} FUNCNAME ', and replace its body with REPLACEMENT-BODY.
+m4_defun([_LT_PROG_FUNCTION_REPLACE],
+[dnl {
+sed -e '/^$1 ()$/,/^} # $1 /c\
+$1 ()\
+{\
+m4_bpatsubsts([$2], [$], [\\], [^\([ ]\)], [\\\1])
+} # Extended-shell $1 implementation' "$cfgfile" > $cfgfile.tmp \
+ && mv -f "$cfgfile.tmp" "$cfgfile" \
+ || (rm -f "$cfgfile" && cp "$cfgfile.tmp" "$cfgfile" && rm -f "$cfgfile.tmp")
+test 0 -eq $? || _lt_function_replace_fail=:
+])
+
+
+# _LT_PROG_REPLACE_SHELLFNS
+# -------------------------
+# Replace existing portable implementations of several shell functions with
+# equivalent extended shell implementations where those features are available..
+m4_defun([_LT_PROG_REPLACE_SHELLFNS],
+[if test x"$xsi_shell" = xyes; then
+ _LT_PROG_FUNCTION_REPLACE([func_dirname], [dnl
+ case ${1} in
+ */*) func_dirname_result="${1%/*}${2}" ;;
+ * ) func_dirname_result="${3}" ;;
+ esac])
+
+ _LT_PROG_FUNCTION_REPLACE([func_basename], [dnl
+ func_basename_result="${1##*/}"])
+
+ _LT_PROG_FUNCTION_REPLACE([func_dirname_and_basename], [dnl
+ case ${1} in
+ */*) func_dirname_result="${1%/*}${2}" ;;
+ * ) func_dirname_result="${3}" ;;
+ esac
+ func_basename_result="${1##*/}"])
+
+ _LT_PROG_FUNCTION_REPLACE([func_stripname], [dnl
+ # pdksh 5.2.14 does not do ${X%$Y} correctly if both X and Y are
+ # positional parameters, so assign one to ordinary parameter first.
+ func_stripname_result=${3}
+ func_stripname_result=${func_stripname_result#"${1}"}
+ func_stripname_result=${func_stripname_result%"${2}"}])
+
+ _LT_PROG_FUNCTION_REPLACE([func_split_long_opt], [dnl
+ func_split_long_opt_name=${1%%=*}
+ func_split_long_opt_arg=${1#*=}])
+
+ _LT_PROG_FUNCTION_REPLACE([func_split_short_opt], [dnl
+ func_split_short_opt_arg=${1#??}
+ func_split_short_opt_name=${1%"$func_split_short_opt_arg"}])
+
+ _LT_PROG_FUNCTION_REPLACE([func_lo2o], [dnl
+ case ${1} in
+ *.lo) func_lo2o_result=${1%.lo}.${objext} ;;
+ *) func_lo2o_result=${1} ;;
+ esac])
+
+ _LT_PROG_FUNCTION_REPLACE([func_xform], [ func_xform_result=${1%.*}.lo])
+
+ _LT_PROG_FUNCTION_REPLACE([func_arith], [ func_arith_result=$(( $[*] ))])
+
+ _LT_PROG_FUNCTION_REPLACE([func_len], [ func_len_result=${#1}])
+fi
+
+if test x"$lt_shell_append" = xyes; then
+ _LT_PROG_FUNCTION_REPLACE([func_append], [ eval "${1}+=\\${2}"])
+
+ _LT_PROG_FUNCTION_REPLACE([func_append_quoted], [dnl
+ func_quote_for_eval "${2}"
+dnl m4 expansion turns \\\\ into \\, and then the shell eval turns that into \
+ eval "${1}+=\\\\ \\$func_quote_for_eval_result"])
+
+ # Save a `func_append' function call where possible by direct use of '+='
+ sed -e 's%func_append \([[a-zA-Z_]]\{1,\}\) "%\1+="%g' $cfgfile > $cfgfile.tmp \
+ && mv -f "$cfgfile.tmp" "$cfgfile" \
+ || (rm -f "$cfgfile" && cp "$cfgfile.tmp" "$cfgfile" && rm -f "$cfgfile.tmp")
+ test 0 -eq $? || _lt_function_replace_fail=:
+else
+ # Save a `func_append' function call even when '+=' is not available
+ sed -e 's%func_append \([[a-zA-Z_]]\{1,\}\) "%\1="$\1%g' $cfgfile > $cfgfile.tmp \
+ && mv -f "$cfgfile.tmp" "$cfgfile" \
+ || (rm -f "$cfgfile" && cp "$cfgfile.tmp" "$cfgfile" && rm -f "$cfgfile.tmp")
+ test 0 -eq $? || _lt_function_replace_fail=:
+fi
+
+if test x"$_lt_function_replace_fail" = x":"; then
+ AC_MSG_WARN([Unable to substitute extended shell functions in $ofile])
+fi
+])
+
+# _LT_PATH_CONVERSION_FUNCTIONS
+# -----------------------------
+# Determine which file name conversion functions should be used by
+# func_to_host_file (and, implicitly, by func_to_host_path). These are needed
+# for certain cross-compile configurations and native mingw.
+m4_defun([_LT_PATH_CONVERSION_FUNCTIONS],
+[AC_REQUIRE([AC_CANONICAL_HOST])dnl
+AC_REQUIRE([AC_CANONICAL_BUILD])dnl
+AC_MSG_CHECKING([how to convert $build file names to $host format])
+AC_CACHE_VAL(lt_cv_to_host_file_cmd,
+[case $host in
+ *-*-mingw* )
+ case $build in
+ *-*-mingw* ) # actually msys
+ lt_cv_to_host_file_cmd=func_convert_file_msys_to_w32
+ ;;
+ *-*-cygwin* )
+ lt_cv_to_host_file_cmd=func_convert_file_cygwin_to_w32
+ ;;
+ * ) # otherwise, assume *nix
+ lt_cv_to_host_file_cmd=func_convert_file_nix_to_w32
+ ;;
+ esac
+ ;;
+ *-*-cygwin* )
+ case $build in
+ *-*-mingw* ) # actually msys
+ lt_cv_to_host_file_cmd=func_convert_file_msys_to_cygwin
+ ;;
+ *-*-cygwin* )
+ lt_cv_to_host_file_cmd=func_convert_file_noop
+ ;;
+ * ) # otherwise, assume *nix
+ lt_cv_to_host_file_cmd=func_convert_file_nix_to_cygwin
+ ;;
+ esac
+ ;;
+ * ) # unhandled hosts (and "normal" native builds)
+ lt_cv_to_host_file_cmd=func_convert_file_noop
+ ;;
+esac
+])
+to_host_file_cmd=$lt_cv_to_host_file_cmd
+AC_MSG_RESULT([$lt_cv_to_host_file_cmd])
+_LT_DECL([to_host_file_cmd], [lt_cv_to_host_file_cmd],
+ [0], [convert $build file names to $host format])dnl
+
+AC_MSG_CHECKING([how to convert $build file names to toolchain format])
+AC_CACHE_VAL(lt_cv_to_tool_file_cmd,
+[#assume ordinary cross tools, or native build.
+lt_cv_to_tool_file_cmd=func_convert_file_noop
+case $host in
+ *-*-mingw* )
+ case $build in
+ *-*-mingw* ) # actually msys
+ lt_cv_to_tool_file_cmd=func_convert_file_msys_to_w32
+ ;;
+ esac
+ ;;
+esac
+])
+to_tool_file_cmd=$lt_cv_to_tool_file_cmd
+AC_MSG_RESULT([$lt_cv_to_tool_file_cmd])
+_LT_DECL([to_tool_file_cmd], [lt_cv_to_tool_file_cmd],
+ [0], [convert $build files to toolchain format])dnl
+])# _LT_PATH_CONVERSION_FUNCTIONS
diff --git a/build/ltmain.sh b/build/ltmain.sh
new file mode 100755
index 0000000..3061e3c
--- /dev/null
+++ b/build/ltmain.sh
@@ -0,0 +1,9636 @@
+
+# libtool (GNU libtool) 2.4
+# Written by Gordon Matzigkeit <gord@gnu.ai.mit.edu>, 1996
+
+# Copyright (C) 1996, 1997, 1998, 1999, 2000, 2001, 2003, 2004, 2005, 2006,
+# 2007, 2008, 2009, 2010 Free Software Foundation, Inc.
+# This is free software; see the source for copying conditions. There is NO
+# warranty; not even for MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
+
+# GNU Libtool is free software; you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation; either version 2 of the License, or
+# (at your option) any later version.
+#
+# As a special exception to the GNU General Public License,
+# if you distribute this file as part of a program or library that
+# is built using GNU Libtool, you may include this file under the
+# same distribution terms that you use for the rest of that program.
+#
+# GNU Libtool is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+# General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with GNU Libtool; see the file COPYING. If not, a copy
+# can be downloaded from http://www.gnu.org/licenses/gpl.html,
+# or obtained by writing to the Free Software Foundation, Inc.,
+# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
+
+# Usage: $progname [OPTION]... [MODE-ARG]...
+#
+# Provide generalized library-building support services.
+#
+# --config show all configuration variables
+# --debug enable verbose shell tracing
+# -n, --dry-run display commands without modifying any files
+# --features display basic configuration information and exit
+# --mode=MODE use operation mode MODE
+# --preserve-dup-deps don't remove duplicate dependency libraries
+# --quiet, --silent don't print informational messages
+# --no-quiet, --no-silent
+# print informational messages (default)
+# --tag=TAG use configuration variables from tag TAG
+# -v, --verbose print more informational messages than default
+# --no-verbose don't print the extra informational messages
+# --version print version information
+# -h, --help, --help-all print short, long, or detailed help message
+#
+# MODE must be one of the following:
+#
+# clean remove files from the build directory
+# compile compile a source file into a libtool object
+# execute automatically set library path, then run a program
+# finish complete the installation of libtool libraries
+# install install libraries or executables
+# link create a library or an executable
+# uninstall remove libraries from an installed directory
+#
+# MODE-ARGS vary depending on the MODE. When passed as first option,
+# `--mode=MODE' may be abbreviated as `MODE' or a unique abbreviation of that.
+# Try `$progname --help --mode=MODE' for a more detailed description of MODE.
+#
+# When reporting a bug, please describe a test case to reproduce it and
+# include the following information:
+#
+# host-triplet: $host
+# shell: $SHELL
+# compiler: $LTCC
+# compiler flags: $LTCFLAGS
+# linker: $LD (gnu? $with_gnu_ld)
+# $progname: (GNU libtool) 2.4
+# automake: $automake_version
+# autoconf: $autoconf_version
+#
+# Report bugs to <bug-libtool@gnu.org>.
+# GNU libtool home page: <http://www.gnu.org/software/libtool/>.
+# General help using GNU software: <http://www.gnu.org/gethelp/>.
+
+PROGRAM=libtool
+PACKAGE=libtool
+VERSION=2.4
+TIMESTAMP=""
+package_revision=1.3293
+
+# Be Bourne compatible
+if test -n "${ZSH_VERSION+set}" && (emulate sh) >/dev/null 2>&1; then
+ emulate sh
+ NULLCMD=:
+ # Zsh 3.x and 4.x performs word splitting on ${1+"$@"}, which
+ # is contrary to our usage. Disable this feature.
+ alias -g '${1+"$@"}'='"$@"'
+ setopt NO_GLOB_SUBST
+else
+ case `(set -o) 2>/dev/null` in *posix*) set -o posix;; esac
+fi
+BIN_SH=xpg4; export BIN_SH # for Tru64
+DUALCASE=1; export DUALCASE # for MKS sh
+
+# A function that is used when there is no print builtin or printf.
+func_fallback_echo ()
+{
+ eval 'cat <<_LTECHO_EOF
+$1
+_LTECHO_EOF'
+}
+
+# NLS nuisances: We save the old values to restore during execute mode.
+lt_user_locale=
+lt_safe_locale=
+for lt_var in LANG LANGUAGE LC_ALL LC_CTYPE LC_COLLATE LC_MESSAGES
+do
+ eval "if test \"\${$lt_var+set}\" = set; then
+ save_$lt_var=\$$lt_var
+ $lt_var=C
+ export $lt_var
+ lt_user_locale=\"$lt_var=\\\$save_\$lt_var; \$lt_user_locale\"
+ lt_safe_locale=\"$lt_var=C; \$lt_safe_locale\"
+ fi"
+done
+LC_ALL=C
+LANGUAGE=C
+export LANGUAGE LC_ALL
+
+$lt_unset CDPATH
+
+
+# Work around backward compatibility issue on IRIX 6.5. On IRIX 6.4+, sh
+# is ksh but when the shell is invoked as "sh" and the current value of
+# the _XPG environment variable is not equal to 1 (one), the special
+# positional parameter $0, within a function call, is the name of the
+# function.
+progpath="$0"
+
+
+
+: ${CP="cp -f"}
+test "${ECHO+set}" = set || ECHO=${as_echo-'printf %s\n'}
+: ${EGREP="grep -E"}
+: ${FGREP="grep -F"}
+: ${GREP="grep"}
+: ${LN_S="ln -s"}
+: ${MAKE="make"}
+: ${MKDIR="mkdir"}
+: ${MV="mv -f"}
+: ${RM="rm -f"}
+: ${SED="sed"}
+: ${SHELL="${CONFIG_SHELL-/bin/sh}"}
+: ${Xsed="$SED -e 1s/^X//"}
+
+# Global variables:
+EXIT_SUCCESS=0
+EXIT_FAILURE=1
+EXIT_MISMATCH=63 # $? = 63 is used to indicate version mismatch to missing.
+EXIT_SKIP=77 # $? = 77 is used to indicate a skipped test to automake.
+
+exit_status=$EXIT_SUCCESS
+
+# Make sure IFS has a sensible default
+lt_nl='
+'
+IFS=" $lt_nl"
+
+dirname="s,/[^/]*$,,"
+basename="s,^.*/,,"
+
+# func_dirname file append nondir_replacement
+# Compute the dirname of FILE. If nonempty, add APPEND to the result,
+# otherwise set result to NONDIR_REPLACEMENT.
+func_dirname ()
+{
+ func_dirname_result=`$ECHO "${1}" | $SED "$dirname"`
+ if test "X$func_dirname_result" = "X${1}"; then
+ func_dirname_result="${3}"
+ else
+ func_dirname_result="$func_dirname_result${2}"
+ fi
+} # func_dirname may be replaced by extended shell implementation
+
+
+# func_basename file
+func_basename ()
+{
+ func_basename_result=`$ECHO "${1}" | $SED "$basename"`
+} # func_basename may be replaced by extended shell implementation
+
+
+# func_dirname_and_basename file append nondir_replacement
+# perform func_basename and func_dirname in a single function
+# call:
+# dirname: Compute the dirname of FILE. If nonempty,
+# add APPEND to the result, otherwise set result
+# to NONDIR_REPLACEMENT.
+# value returned in "$func_dirname_result"
+# basename: Compute filename of FILE.
+# value retuned in "$func_basename_result"
+# Implementation must be kept synchronized with func_dirname
+# and func_basename. For efficiency, we do not delegate to
+# those functions but instead duplicate the functionality here.
+func_dirname_and_basename ()
+{
+ # Extract subdirectory from the argument.
+ func_dirname_result=`$ECHO "${1}" | $SED -e "$dirname"`
+ if test "X$func_dirname_result" = "X${1}"; then
+ func_dirname_result="${3}"
+ else
+ func_dirname_result="$func_dirname_result${2}"
+ fi
+ func_basename_result=`$ECHO "${1}" | $SED -e "$basename"`
+} # func_dirname_and_basename may be replaced by extended shell implementation
+
+
+# func_stripname prefix suffix name
+# strip PREFIX and SUFFIX off of NAME.
+# PREFIX and SUFFIX must not contain globbing or regex special
+# characters, hashes, percent signs, but SUFFIX may contain a leading
+# dot (in which case that matches only a dot).
+# func_strip_suffix prefix name
+func_stripname ()
+{
+ case ${2} in
+ .*) func_stripname_result=`$ECHO "${3}" | $SED "s%^${1}%%; s%\\\\${2}\$%%"`;;
+ *) func_stripname_result=`$ECHO "${3}" | $SED "s%^${1}%%; s%${2}\$%%"`;;
+ esac
+} # func_stripname may be replaced by extended shell implementation
+
+
+# These SED scripts presuppose an absolute path with a trailing slash.
+pathcar='s,^/\([^/]*\).*$,\1,'
+pathcdr='s,^/[^/]*,,'
+removedotparts=':dotsl
+ s@/\./@/@g
+ t dotsl
+ s,/\.$,/,'
+collapseslashes='s@/\{1,\}@/@g'
+finalslash='s,/*$,/,'
+
+# func_normal_abspath PATH
+# Remove doubled-up and trailing slashes, "." path components,
+# and cancel out any ".." path components in PATH after making
+# it an absolute path.
+# value returned in "$func_normal_abspath_result"
+func_normal_abspath ()
+{
+ # Start from root dir and reassemble the path.
+ func_normal_abspath_result=
+ func_normal_abspath_tpath=$1
+ func_normal_abspath_altnamespace=
+ case $func_normal_abspath_tpath in
+ "")
+ # Empty path, that just means $cwd.
+ func_stripname '' '/' "`pwd`"
+ func_normal_abspath_result=$func_stripname_result
+ return
+ ;;
+ # The next three entries are used to spot a run of precisely
+ # two leading slashes without using negated character classes;
+ # we take advantage of case's first-match behaviour.
+ ///*)
+ # Unusual form of absolute path, do nothing.
+ ;;
+ //*)
+ # Not necessarily an ordinary path; POSIX reserves leading '//'
+ # and for example Cygwin uses it to access remote file shares
+ # over CIFS/SMB, so we conserve a leading double slash if found.
+ func_normal_abspath_altnamespace=/
+ ;;
+ /*)
+ # Absolute path, do nothing.
+ ;;
+ *)
+ # Relative path, prepend $cwd.
+ func_normal_abspath_tpath=`pwd`/$func_normal_abspath_tpath
+ ;;
+ esac
+ # Cancel out all the simple stuff to save iterations. We also want
+ # the path to end with a slash for ease of parsing, so make sure
+ # there is one (and only one) here.
+ func_normal_abspath_tpath=`$ECHO "$func_normal_abspath_tpath" | $SED \
+ -e "$removedotparts" -e "$collapseslashes" -e "$finalslash"`
+ while :; do
+ # Processed it all yet?
+ if test "$func_normal_abspath_tpath" = / ; then
+ # If we ascended to the root using ".." the result may be empty now.
+ if test -z "$func_normal_abspath_result" ; then
+ func_normal_abspath_result=/
+ fi
+ break
+ fi
+ func_normal_abspath_tcomponent=`$ECHO "$func_normal_abspath_tpath" | $SED \
+ -e "$pathcar"`
+ func_normal_abspath_tpath=`$ECHO "$func_normal_abspath_tpath" | $SED \
+ -e "$pathcdr"`
+ # Figure out what to do with it
+ case $func_normal_abspath_tcomponent in
+ "")
+ # Trailing empty path component, ignore it.
+ ;;
+ ..)
+ # Parent dir; strip last assembled component from result.
+ func_dirname "$func_normal_abspath_result"
+ func_normal_abspath_result=$func_dirname_result
+ ;;
+ *)
+ # Actual path component, append it.
+ func_normal_abspath_result=$func_normal_abspath_result/$func_normal_abspath_tcomponent
+ ;;
+ esac
+ done
+ # Restore leading double-slash if one was found on entry.
+ func_normal_abspath_result=$func_normal_abspath_altnamespace$func_normal_abspath_result
+}
+
+# func_relative_path SRCDIR DSTDIR
+# generates a relative path from SRCDIR to DSTDIR, with a trailing
+# slash if non-empty, suitable for immediately appending a filename
+# without needing to append a separator.
+# value returned in "$func_relative_path_result"
+func_relative_path ()
+{
+ func_relative_path_result=
+ func_normal_abspath "$1"
+ func_relative_path_tlibdir=$func_normal_abspath_result
+ func_normal_abspath "$2"
+ func_relative_path_tbindir=$func_normal_abspath_result
+
+ # Ascend the tree starting from libdir
+ while :; do
+ # check if we have found a prefix of bindir
+ case $func_relative_path_tbindir in
+ $func_relative_path_tlibdir)
+ # found an exact match
+ func_relative_path_tcancelled=
+ break
+ ;;
+ $func_relative_path_tlibdir*)
+ # found a matching prefix
+ func_stripname "$func_relative_path_tlibdir" '' "$func_relative_path_tbindir"
+ func_relative_path_tcancelled=$func_stripname_result
+ if test -z "$func_relative_path_result"; then
+ func_relative_path_result=.
+ fi
+ break
+ ;;
+ *)
+ func_dirname $func_relative_path_tlibdir
+ func_relative_path_tlibdir=${func_dirname_result}
+ if test "x$func_relative_path_tlibdir" = x ; then
+ # Have to descend all the way to the root!
+ func_relative_path_result=../$func_relative_path_result
+ func_relative_path_tcancelled=$func_relative_path_tbindir
+ break
+ fi
+ func_relative_path_result=../$func_relative_path_result
+ ;;
+ esac
+ done
+
+ # Now calculate path; take care to avoid doubling-up slashes.
+ func_stripname '' '/' "$func_relative_path_result"
+ func_relative_path_result=$func_stripname_result
+ func_stripname '/' '/' "$func_relative_path_tcancelled"
+ if test "x$func_stripname_result" != x ; then
+ func_relative_path_result=${func_relative_path_result}/${func_stripname_result}
+ fi
+
+ # Normalisation. If bindir is libdir, return empty string,
+ # else relative path ending with a slash; either way, target
+ # file name can be directly appended.
+ if test ! -z "$func_relative_path_result"; then
+ func_stripname './' '' "$func_relative_path_result/"
+ func_relative_path_result=$func_stripname_result
+ fi
+}
+
+# The name of this program:
+func_dirname_and_basename "$progpath"
+progname=$func_basename_result
+
+# Make sure we have an absolute path for reexecution:
+case $progpath in
+ [\\/]*|[A-Za-z]:\\*) ;;
+ *[\\/]*)
+ progdir=$func_dirname_result
+ progdir=`cd "$progdir" && pwd`
+ progpath="$progdir/$progname"
+ ;;
+ *)
+ save_IFS="$IFS"
+ IFS=:
+ for progdir in $PATH; do
+ IFS="$save_IFS"
+ test -x "$progdir/$progname" && break
+ done
+ IFS="$save_IFS"
+ test -n "$progdir" || progdir=`pwd`
+ progpath="$progdir/$progname"
+ ;;
+esac
+
+# Sed substitution that helps us do robust quoting. It backslashifies
+# metacharacters that are still active within double-quoted strings.
+Xsed="${SED}"' -e 1s/^X//'
+sed_quote_subst='s/\([`"$\\]\)/\\\1/g'
+
+# Same as above, but do not quote variable references.
+double_quote_subst='s/\(["`\\]\)/\\\1/g'
+
+# Sed substitution that turns a string into a regex matching for the
+# string literally.
+sed_make_literal_regex='s,[].[^$\\*\/],\\&,g'
+
+# Sed substitution that converts a w32 file name or path
+# which contains forward slashes, into one that contains
+# (escaped) backslashes. A very naive implementation.
+lt_sed_naive_backslashify='s|\\\\*|\\|g;s|/|\\|g;s|\\|\\\\|g'
+
+# Re-`\' parameter expansions in output of double_quote_subst that were
+# `\'-ed in input to the same. If an odd number of `\' preceded a '$'
+# in input to double_quote_subst, that '$' was protected from expansion.
+# Since each input `\' is now two `\'s, look for any number of runs of
+# four `\'s followed by two `\'s and then a '$'. `\' that '$'.
+bs='\\'
+bs2='\\\\'
+bs4='\\\\\\\\'
+dollar='\$'
+sed_double_backslash="\
+ s/$bs4/&\\
+/g
+ s/^$bs2$dollar/$bs&/
+ s/\\([^$bs]\\)$bs2$dollar/\\1$bs2$bs$dollar/g
+ s/\n//g"
+
+# Standard options:
+opt_dry_run=false
+opt_help=false
+opt_quiet=false
+opt_verbose=false
+opt_warning=:
+
+# func_echo arg...
+# Echo program name prefixed message, along with the current mode
+# name if it has been set yet.
+func_echo ()
+{
+ $ECHO "$progname: ${opt_mode+$opt_mode: }$*"
+}
+
+# func_verbose arg...
+# Echo program name prefixed message in verbose mode only.
+func_verbose ()
+{
+ $opt_verbose && func_echo ${1+"$@"}
+
+ # A bug in bash halts the script if the last line of a function
+ # fails when set -e is in force, so we need another command to
+ # work around that:
+ :
+}
+
+# func_echo_all arg...
+# Invoke $ECHO with all args, space-separated.
+func_echo_all ()
+{
+ $ECHO "$*"
+}
+
+# func_error arg...
+# Echo program name prefixed message to standard error.
+func_error ()
+{
+ $ECHO "$progname: ${opt_mode+$opt_mode: }"${1+"$@"} 1>&2
+}
+
+# func_warning arg...
+# Echo program name prefixed warning message to standard error.
+func_warning ()
+{
+ $opt_warning && $ECHO "$progname: ${opt_mode+$opt_mode: }warning: "${1+"$@"} 1>&2
+
+ # bash bug again:
+ :
+}
+
+# func_fatal_error arg...
+# Echo program name prefixed message to standard error, and exit.
+func_fatal_error ()
+{
+ func_error ${1+"$@"}
+ exit $EXIT_FAILURE
+}
+
+# func_fatal_help arg...
+# Echo program name prefixed message to standard error, followed by
+# a help hint, and exit.
+func_fatal_help ()
+{
+ func_error ${1+"$@"}
+ func_fatal_error "$help"
+}
+help="Try \`$progname --help' for more information." ## default
+
+
+# func_grep expression filename
+# Check whether EXPRESSION matches any line of FILENAME, without output.
+func_grep ()
+{
+ $GREP "$1" "$2" >/dev/null 2>&1
+}
+
+
+# func_mkdir_p directory-path
+# Make sure the entire path to DIRECTORY-PATH is available.
+func_mkdir_p ()
+{
+ my_directory_path="$1"
+ my_dir_list=
+
+ if test -n "$my_directory_path" && test "$opt_dry_run" != ":"; then
+
+ # Protect directory names starting with `-'
+ case $my_directory_path in
+ -*) my_directory_path="./$my_directory_path" ;;
+ esac
+
+ # While some portion of DIR does not yet exist...
+ while test ! -d "$my_directory_path"; do
+ # ...make a list in topmost first order. Use a colon delimited
+ # list incase some portion of path contains whitespace.
+ my_dir_list="$my_directory_path:$my_dir_list"
+
+ # If the last portion added has no slash in it, the list is done
+ case $my_directory_path in */*) ;; *) break ;; esac
+
+ # ...otherwise throw away the child directory and loop
+ my_directory_path=`$ECHO "$my_directory_path" | $SED -e "$dirname"`
+ done
+ my_dir_list=`$ECHO "$my_dir_list" | $SED 's,:*$,,'`
+
+ save_mkdir_p_IFS="$IFS"; IFS=':'
+ for my_dir in $my_dir_list; do
+ IFS="$save_mkdir_p_IFS"
+ # mkdir can fail with a `File exist' error if two processes
+ # try to create one of the directories concurrently. Don't
+ # stop in that case!
+ $MKDIR "$my_dir" 2>/dev/null || :
+ done
+ IFS="$save_mkdir_p_IFS"
+
+ # Bail out if we (or some other process) failed to create a directory.
+ test -d "$my_directory_path" || \
+ func_fatal_error "Failed to create \`$1'"
+ fi
+}
+
+
+# func_mktempdir [string]
+# Make a temporary directory that won't clash with other running
+# libtool processes, and avoids race conditions if possible. If
+# given, STRING is the basename for that directory.
+func_mktempdir ()
+{
+ my_template="${TMPDIR-/tmp}/${1-$progname}"
+
+ if test "$opt_dry_run" = ":"; then
+ # Return a directory name, but don't create it in dry-run mode
+ my_tmpdir="${my_template}-$$"
+ else
+
+ # If mktemp works, use that first and foremost
+ my_tmpdir=`mktemp -d "${my_template}-XXXXXXXX" 2>/dev/null`
+
+ if test ! -d "$my_tmpdir"; then
+ # Failing that, at least try and use $RANDOM to avoid a race
+ my_tmpdir="${my_template}-${RANDOM-0}$$"
+
+ save_mktempdir_umask=`umask`
+ umask 0077
+ $MKDIR "$my_tmpdir"
+ umask $save_mktempdir_umask
+ fi
+
+ # If we're not in dry-run mode, bomb out on failure
+ test -d "$my_tmpdir" || \
+ func_fatal_error "cannot create temporary directory \`$my_tmpdir'"
+ fi
+
+ $ECHO "$my_tmpdir"
+}
+
+
+# func_quote_for_eval arg
+# Aesthetically quote ARG to be evaled later.
+# This function returns two values: FUNC_QUOTE_FOR_EVAL_RESULT
+# is double-quoted, suitable for a subsequent eval, whereas
+# FUNC_QUOTE_FOR_EVAL_UNQUOTED_RESULT has merely all characters
+# which are still active within double quotes backslashified.
+func_quote_for_eval ()
+{
+ case $1 in
+ *[\\\`\"\$]*)
+ func_quote_for_eval_unquoted_result=`$ECHO "$1" | $SED "$sed_quote_subst"` ;;
+ *)
+ func_quote_for_eval_unquoted_result="$1" ;;
+ esac
+
+ case $func_quote_for_eval_unquoted_result in
+ # Double-quote args containing shell metacharacters to delay
+ # word splitting, command substitution and and variable
+ # expansion for a subsequent eval.
+ # Many Bourne shells cannot handle close brackets correctly
+ # in scan sets, so we specify it separately.
+ *[\[\~\#\^\&\*\(\)\{\}\|\;\<\>\?\'\ \ ]*|*]*|"")
+ func_quote_for_eval_result="\"$func_quote_for_eval_unquoted_result\""
+ ;;
+ *)
+ func_quote_for_eval_result="$func_quote_for_eval_unquoted_result"
+ esac
+}
+
+
+# func_quote_for_expand arg
+# Aesthetically quote ARG to be evaled later; same as above,
+# but do not quote variable references.
+func_quote_for_expand ()
+{
+ case $1 in
+ *[\\\`\"]*)
+ my_arg=`$ECHO "$1" | $SED \
+ -e "$double_quote_subst" -e "$sed_double_backslash"` ;;
+ *)
+ my_arg="$1" ;;
+ esac
+
+ case $my_arg in
+ # Double-quote args containing shell metacharacters to delay
+ # word splitting and command substitution for a subsequent eval.
+ # Many Bourne shells cannot handle close brackets correctly
+ # in scan sets, so we specify it separately.
+ *[\[\~\#\^\&\*\(\)\{\}\|\;\<\>\?\'\ \ ]*|*]*|"")
+ my_arg="\"$my_arg\""
+ ;;
+ esac
+
+ func_quote_for_expand_result="$my_arg"
+}
+
+
+# func_show_eval cmd [fail_exp]
+# Unless opt_silent is true, then output CMD. Then, if opt_dryrun is
+# not true, evaluate CMD. If the evaluation of CMD fails, and FAIL_EXP
+# is given, then evaluate it.
+func_show_eval ()
+{
+ my_cmd="$1"
+ my_fail_exp="${2-:}"
+
+ ${opt_silent-false} || {
+ func_quote_for_expand "$my_cmd"
+ eval "func_echo $func_quote_for_expand_result"
+ }
+
+ if ${opt_dry_run-false}; then :; else
+ eval "$my_cmd"
+ my_status=$?
+ if test "$my_status" -eq 0; then :; else
+ eval "(exit $my_status); $my_fail_exp"
+ fi
+ fi
+}
+
+
+# func_show_eval_locale cmd [fail_exp]
+# Unless opt_silent is true, then output CMD. Then, if opt_dryrun is
+# not true, evaluate CMD. If the evaluation of CMD fails, and FAIL_EXP
+# is given, then evaluate it. Use the saved locale for evaluation.
+func_show_eval_locale ()
+{
+ my_cmd="$1"
+ my_fail_exp="${2-:}"
+
+ ${opt_silent-false} || {
+ func_quote_for_expand "$my_cmd"
+ eval "func_echo $func_quote_for_expand_result"
+ }
+
+ if ${opt_dry_run-false}; then :; else
+ eval "$lt_user_locale
+ $my_cmd"
+ my_status=$?
+ eval "$lt_safe_locale"
+ if test "$my_status" -eq 0; then :; else
+ eval "(exit $my_status); $my_fail_exp"
+ fi
+ fi
+}
+
+# func_tr_sh
+# Turn $1 into a string suitable for a shell variable name.
+# Result is stored in $func_tr_sh_result. All characters
+# not in the set a-zA-Z0-9_ are replaced with '_'. Further,
+# if $1 begins with a digit, a '_' is prepended as well.
+func_tr_sh ()
+{
+ case $1 in
+ [0-9]* | *[!a-zA-Z0-9_]*)
+ func_tr_sh_result=`$ECHO "$1" | $SED 's/^\([0-9]\)/_\1/; s/[^a-zA-Z0-9_]/_/g'`
+ ;;
+ * )
+ func_tr_sh_result=$1
+ ;;
+ esac
+}
+
+
+# func_version
+# Echo version message to standard output and exit.
+func_version ()
+{
+ $opt_debug
+
+ $SED -n '/(C)/!b go
+ :more
+ /\./!{
+ N
+ s/\n# / /
+ b more
+ }
+ :go
+ /^# '$PROGRAM' (GNU /,/# warranty; / {
+ s/^# //
+ s/^# *$//
+ s/\((C)\)[ 0-9,-]*\( [1-9][0-9]*\)/\1\2/
+ p
+ }' < "$progpath"
+ exit $?
+}
+
+# func_usage
+# Echo short help message to standard output and exit.
+func_usage ()
+{
+ $opt_debug
+
+ $SED -n '/^# Usage:/,/^# *.*--help/ {
+ s/^# //
+ s/^# *$//
+ s/\$progname/'$progname'/
+ p
+ }' < "$progpath"
+ echo
+ $ECHO "run \`$progname --help | more' for full usage"
+ exit $?
+}
+
+# func_help [NOEXIT]
+# Echo long help message to standard output and exit,
+# unless 'noexit' is passed as argument.
+func_help ()
+{
+ $opt_debug
+
+ $SED -n '/^# Usage:/,/# Report bugs to/ {
+ :print
+ s/^# //
+ s/^# *$//
+ s*\$progname*'$progname'*
+ s*\$host*'"$host"'*
+ s*\$SHELL*'"$SHELL"'*
+ s*\$LTCC*'"$LTCC"'*
+ s*\$LTCFLAGS*'"$LTCFLAGS"'*
+ s*\$LD*'"$LD"'*
+ s/\$with_gnu_ld/'"$with_gnu_ld"'/
+ s/\$automake_version/'"`(automake --version) 2>/dev/null |$SED 1q`"'/
+ s/\$autoconf_version/'"`(autoconf --version) 2>/dev/null |$SED 1q`"'/
+ p
+ d
+ }
+ /^# .* home page:/b print
+ /^# General help using/b print
+ ' < "$progpath"
+ ret=$?
+ if test -z "$1"; then
+ exit $ret
+ fi
+}
+
+# func_missing_arg argname
+# Echo program name prefixed message to standard error and set global
+# exit_cmd.
+func_missing_arg ()
+{
+ $opt_debug
+
+ func_error "missing argument for $1."
+ exit_cmd=exit
+}
+
+
+# func_split_short_opt shortopt
+# Set func_split_short_opt_name and func_split_short_opt_arg shell
+# variables after splitting SHORTOPT after the 2nd character.
+func_split_short_opt ()
+{
+ my_sed_short_opt='1s/^\(..\).*$/\1/;q'
+ my_sed_short_rest='1s/^..\(.*\)$/\1/;q'
+
+ func_split_short_opt_name=`$ECHO "$1" | $SED "$my_sed_short_opt"`
+ func_split_short_opt_arg=`$ECHO "$1" | $SED "$my_sed_short_rest"`
+} # func_split_short_opt may be replaced by extended shell implementation
+
+
+# func_split_long_opt longopt
+# Set func_split_long_opt_name and func_split_long_opt_arg shell
+# variables after splitting LONGOPT at the `=' sign.
+func_split_long_opt ()
+{
+ my_sed_long_opt='1s/^\(--[^=]*\)=.*/\1/;q'
+ my_sed_long_arg='1s/^--[^=]*=//'
+
+ func_split_long_opt_name=`$ECHO "$1" | $SED "$my_sed_long_opt"`
+ func_split_long_opt_arg=`$ECHO "$1" | $SED "$my_sed_long_arg"`
+} # func_split_long_opt may be replaced by extended shell implementation
+
+exit_cmd=:
+
+
+
+
+
+magic="%%%MAGIC variable%%%"
+magic_exe="%%%MAGIC EXE variable%%%"
+
+# Global variables.
+nonopt=
+preserve_args=
+lo2o="s/\\.lo\$/.${objext}/"
+o2lo="s/\\.${objext}\$/.lo/"
+extracted_archives=
+extracted_serial=0
+
+# If this variable is set in any of the actions, the command in it
+# will be execed at the end. This prevents here-documents from being
+# left over by shells.
+exec_cmd=
+
+# func_append var value
+# Append VALUE to the end of shell variable VAR.
+func_append ()
+{
+ eval "${1}=\$${1}\${2}"
+} # func_append may be replaced by extended shell implementation
+
+# func_append_quoted var value
+# Quote VALUE and append to the end of shell variable VAR, separated
+# by a space.
+func_append_quoted ()
+{
+ func_quote_for_eval "${2}"
+ eval "${1}=\$${1}\\ \$func_quote_for_eval_result"
+} # func_append_quoted may be replaced by extended shell implementation
+
+
+# func_arith arithmetic-term...
+func_arith ()
+{
+ func_arith_result=`expr "${@}"`
+} # func_arith may be replaced by extended shell implementation
+
+
+# func_len string
+# STRING may not start with a hyphen.
+func_len ()
+{
+ func_len_result=`expr "${1}" : ".*" 2>/dev/null || echo $max_cmd_len`
+} # func_len may be replaced by extended shell implementation
+
+
+# func_lo2o object
+func_lo2o ()
+{
+ func_lo2o_result=`$ECHO "${1}" | $SED "$lo2o"`
+} # func_lo2o may be replaced by extended shell implementation
+
+
+# func_xform libobj-or-source
+func_xform ()
+{
+ func_xform_result=`$ECHO "${1}" | $SED 's/\.[^.]*$/.lo/'`
+} # func_xform may be replaced by extended shell implementation
+
+
+# func_fatal_configuration arg...
+# Echo program name prefixed message to standard error, followed by
+# a configuration failure hint, and exit.
+func_fatal_configuration ()
+{
+ func_error ${1+"$@"}
+ func_error "See the $PACKAGE documentation for more information."
+ func_fatal_error "Fatal configuration error."
+}
+
+
+# func_config
+# Display the configuration for all the tags in this script.
+func_config ()
+{
+ re_begincf='^# ### BEGIN LIBTOOL'
+ re_endcf='^# ### END LIBTOOL'
+
+ # Default configuration.
+ $SED "1,/$re_begincf CONFIG/d;/$re_endcf CONFIG/,\$d" < "$progpath"
+
+ # Now print the configurations for the tags.
+ for tagname in $taglist; do
+ $SED -n "/$re_begincf TAG CONFIG: $tagname\$/,/$re_endcf TAG CONFIG: $tagname\$/p" < "$progpath"
+ done
+
+ exit $?
+}
+
+# func_features
+# Display the features supported by this script.
+func_features ()
+{
+ echo "host: $host"
+ if test "$build_libtool_libs" = yes; then
+ echo "enable shared libraries"
+ else
+ echo "disable shared libraries"
+ fi
+ if test "$build_old_libs" = yes; then
+ echo "enable static libraries"
+ else
+ echo "disable static libraries"
+ fi
+
+ exit $?
+}
+
+# func_enable_tag tagname
+# Verify that TAGNAME is valid, and either flag an error and exit, or
+# enable the TAGNAME tag. We also add TAGNAME to the global $taglist
+# variable here.
+func_enable_tag ()
+{
+ # Global variable:
+ tagname="$1"
+
+ re_begincf="^# ### BEGIN LIBTOOL TAG CONFIG: $tagname\$"
+ re_endcf="^# ### END LIBTOOL TAG CONFIG: $tagname\$"
+ sed_extractcf="/$re_begincf/,/$re_endcf/p"
+
+ # Validate tagname.
+ case $tagname in
+ *[!-_A-Za-z0-9,/]*)
+ func_fatal_error "invalid tag name: $tagname"
+ ;;
+ esac
+
+ # Don't test for the "default" C tag, as we know it's
+ # there but not specially marked.
+ case $tagname in
+ CC) ;;
+ *)
+ if $GREP "$re_begincf" "$progpath" >/dev/null 2>&1; then
+ taglist="$taglist $tagname"
+
+ # Evaluate the configuration. Be careful to quote the path
+ # and the sed script, to avoid splitting on whitespace, but
+ # also don't use non-portable quotes within backquotes within
+ # quotes we have to do it in 2 steps:
+ extractedcf=`$SED -n -e "$sed_extractcf" < "$progpath"`
+ eval "$extractedcf"
+ else
+ func_error "ignoring unknown tag $tagname"
+ fi
+ ;;
+ esac
+}
+
+# func_check_version_match
+# Ensure that we are using m4 macros, and libtool script from the same
+# release of libtool.
+func_check_version_match ()
+{
+ if test "$package_revision" != "$macro_revision"; then
+ if test "$VERSION" != "$macro_version"; then
+ if test -z "$macro_version"; then
+ cat >&2 <<_LT_EOF
+$progname: Version mismatch error. This is $PACKAGE $VERSION, but the
+$progname: definition of this LT_INIT comes from an older release.
+$progname: You should recreate aclocal.m4 with macros from $PACKAGE $VERSION
+$progname: and run autoconf again.
+_LT_EOF
+ else
+ cat >&2 <<_LT_EOF
+$progname: Version mismatch error. This is $PACKAGE $VERSION, but the
+$progname: definition of this LT_INIT comes from $PACKAGE $macro_version.
+$progname: You should recreate aclocal.m4 with macros from $PACKAGE $VERSION
+$progname: and run autoconf again.
+_LT_EOF
+ fi
+ else
+ cat >&2 <<_LT_EOF
+$progname: Version mismatch error. This is $PACKAGE $VERSION, revision $package_revision,
+$progname: but the definition of this LT_INIT comes from revision $macro_revision.
+$progname: You should recreate aclocal.m4 with macros from revision $package_revision
+$progname: of $PACKAGE $VERSION and run autoconf again.
+_LT_EOF
+ fi
+
+ exit $EXIT_MISMATCH
+ fi
+}
+
+
+# Shorthand for --mode=foo, only valid as the first argument
+case $1 in
+clean|clea|cle|cl)
+ shift; set dummy --mode clean ${1+"$@"}; shift
+ ;;
+compile|compil|compi|comp|com|co|c)
+ shift; set dummy --mode compile ${1+"$@"}; shift
+ ;;
+execute|execut|execu|exec|exe|ex|e)
+ shift; set dummy --mode execute ${1+"$@"}; shift
+ ;;
+finish|finis|fini|fin|fi|f)
+ shift; set dummy --mode finish ${1+"$@"}; shift
+ ;;
+install|instal|insta|inst|ins|in|i)
+ shift; set dummy --mode install ${1+"$@"}; shift
+ ;;
+link|lin|li|l)
+ shift; set dummy --mode link ${1+"$@"}; shift
+ ;;
+uninstall|uninstal|uninsta|uninst|unins|unin|uni|un|u)
+ shift; set dummy --mode uninstall ${1+"$@"}; shift
+ ;;
+esac
+
+
+
+# Option defaults:
+opt_debug=:
+opt_dry_run=false
+opt_config=false
+opt_preserve_dup_deps=false
+opt_features=false
+opt_finish=false
+opt_help=false
+opt_help_all=false
+opt_silent=:
+opt_verbose=:
+opt_silent=false
+opt_verbose=false
+
+
+# Parse options once, thoroughly. This comes as soon as possible in the
+# script to make things like `--version' happen as quickly as we can.
+{
+ # this just eases exit handling
+ while test $# -gt 0; do
+ opt="$1"
+ shift
+ case $opt in
+ --debug|-x) opt_debug='set -x'
+ func_echo "enabling shell trace mode"
+ $opt_debug
+ ;;
+ --dry-run|--dryrun|-n)
+ opt_dry_run=:
+ ;;
+ --config)
+ opt_config=:
+func_config
+ ;;
+ --dlopen|-dlopen)
+ optarg="$1"
+ opt_dlopen="${opt_dlopen+$opt_dlopen
+}$optarg"
+ shift
+ ;;
+ --preserve-dup-deps)
+ opt_preserve_dup_deps=:
+ ;;
+ --features)
+ opt_features=:
+func_features
+ ;;
+ --finish)
+ opt_finish=:
+set dummy --mode finish ${1+"$@"}; shift
+ ;;
+ --help)
+ opt_help=:
+ ;;
+ --help-all)
+ opt_help_all=:
+opt_help=': help-all'
+ ;;
+ --mode)
+ test $# = 0 && func_missing_arg $opt && break
+ optarg="$1"
+ opt_mode="$optarg"
+case $optarg in
+ # Valid mode arguments:
+ clean|compile|execute|finish|install|link|relink|uninstall) ;;
+
+ # Catch anything else as an error
+ *) func_error "invalid argument for $opt"
+ exit_cmd=exit
+ break
+ ;;
+esac
+ shift
+ ;;
+ --no-silent|--no-quiet)
+ opt_silent=false
+func_append preserve_args " $opt"
+ ;;
+ --no-verbose)
+ opt_verbose=false
+func_append preserve_args " $opt"
+ ;;
+ --silent|--quiet)
+ opt_silent=:
+func_append preserve_args " $opt"
+ opt_verbose=false
+ ;;
+ --verbose|-v)
+ opt_verbose=:
+func_append preserve_args " $opt"
+opt_silent=false
+ ;;
+ --tag)
+ test $# = 0 && func_missing_arg $opt && break
+ optarg="$1"
+ opt_tag="$optarg"
+func_append preserve_args " $opt $optarg"
+func_enable_tag "$optarg"
+ shift
+ ;;
+
+ -\?|-h) func_usage ;;
+ --help) func_help ;;
+ --version) func_version ;;
+
+ # Separate optargs to long options:
+ --*=*)
+ func_split_long_opt "$opt"
+ set dummy "$func_split_long_opt_name" "$func_split_long_opt_arg" ${1+"$@"}
+ shift
+ ;;
+
+ # Separate non-argument short options:
+ -\?*|-h*|-n*|-v*)
+ func_split_short_opt "$opt"
+ set dummy "$func_split_short_opt_name" "-$func_split_short_opt_arg" ${1+"$@"}
+ shift
+ ;;
+
+ --) break ;;
+ -*) func_fatal_help "unrecognized option \`$opt'" ;;
+ *) set dummy "$opt" ${1+"$@"}; shift; break ;;
+ esac
+ done
+
+ # Validate options:
+
+ # save first non-option argument
+ if test "$#" -gt 0; then
+ nonopt="$opt"
+ shift
+ fi
+
+ # preserve --debug
+ test "$opt_debug" = : || func_append preserve_args " --debug"
+
+ case $host in
+ *cygwin* | *mingw* | *pw32* | *cegcc*)
+ # don't eliminate duplications in $postdeps and $predeps
+ opt_duplicate_compiler_generated_deps=:
+ ;;
+ *)
+ opt_duplicate_compiler_generated_deps=$opt_preserve_dup_deps
+ ;;
+ esac
+
+ $opt_help || {
+ # Sanity checks first:
+ func_check_version_match
+
+ if test "$build_libtool_libs" != yes && test "$build_old_libs" != yes; then
+ func_fatal_configuration "not configured to build any kind of library"
+ fi
+
+ # Darwin sucks
+ eval std_shrext=\"$shrext_cmds\"
+
+ # Only execute mode is allowed to have -dlopen flags.
+ if test -n "$opt_dlopen" && test "$opt_mode" != execute; then
+ func_error "unrecognized option \`-dlopen'"
+ $ECHO "$help" 1>&2
+ exit $EXIT_FAILURE
+ fi
+
+ # Change the help message to a mode-specific one.
+ generic_help="$help"
+ help="Try \`$progname --help --mode=$opt_mode' for more information."
+ }
+
+
+ # Bail if the options were screwed
+ $exit_cmd $EXIT_FAILURE
+}
+
+
+
+
+## ----------- ##
+## Main. ##
+## ----------- ##
+
+# func_lalib_p file
+# True iff FILE is a libtool `.la' library or `.lo' object file.
+# This function is only a basic sanity check; it will hardly flush out
+# determined imposters.
+func_lalib_p ()
+{
+ test -f "$1" &&
+ $SED -e 4q "$1" 2>/dev/null \
+ | $GREP "^# Generated by .*$PACKAGE" > /dev/null 2>&1
+}
+
+# func_lalib_unsafe_p file
+# True iff FILE is a libtool `.la' library or `.lo' object file.
+# This function implements the same check as func_lalib_p without
+# resorting to external programs. To this end, it redirects stdin and
+# closes it afterwards, without saving the original file descriptor.
+# As a safety measure, use it only where a negative result would be
+# fatal anyway. Works if `file' does not exist.
+func_lalib_unsafe_p ()
+{
+ lalib_p=no
+ if test -f "$1" && test -r "$1" && exec 5<&0 <"$1"; then
+ for lalib_p_l in 1 2 3 4
+ do
+ read lalib_p_line
+ case "$lalib_p_line" in
+ \#\ Generated\ by\ *$PACKAGE* ) lalib_p=yes; break;;
+ esac
+ done
+ exec 0<&5 5<&-
+ fi
+ test "$lalib_p" = yes
+}
+
+# func_ltwrapper_script_p file
+# True iff FILE is a libtool wrapper script
+# This function is only a basic sanity check; it will hardly flush out
+# determined imposters.
+func_ltwrapper_script_p ()
+{
+ func_lalib_p "$1"
+}
+
+# func_ltwrapper_executable_p file
+# True iff FILE is a libtool wrapper executable
+# This function is only a basic sanity check; it will hardly flush out
+# determined imposters.
+func_ltwrapper_executable_p ()
+{
+ func_ltwrapper_exec_suffix=
+ case $1 in
+ *.exe) ;;
+ *) func_ltwrapper_exec_suffix=.exe ;;
+ esac
+ $GREP "$magic_exe" "$1$func_ltwrapper_exec_suffix" >/dev/null 2>&1
+}
+
+# func_ltwrapper_scriptname file
+# Assumes file is an ltwrapper_executable
+# uses $file to determine the appropriate filename for a
+# temporary ltwrapper_script.
+func_ltwrapper_scriptname ()
+{
+ func_dirname_and_basename "$1" "" "."
+ func_stripname '' '.exe' "$func_basename_result"
+ func_ltwrapper_scriptname_result="$func_dirname_result/$objdir/${func_stripname_result}_ltshwrapper"
+}
+
+# func_ltwrapper_p file
+# True iff FILE is a libtool wrapper script or wrapper executable
+# This function is only a basic sanity check; it will hardly flush out
+# determined imposters.
+func_ltwrapper_p ()
+{
+ func_ltwrapper_script_p "$1" || func_ltwrapper_executable_p "$1"
+}
+
+
+# func_execute_cmds commands fail_cmd
+# Execute tilde-delimited COMMANDS.
+# If FAIL_CMD is given, eval that upon failure.
+# FAIL_CMD may read-access the current command in variable CMD!
+func_execute_cmds ()
+{
+ $opt_debug
+ save_ifs=$IFS; IFS='~'
+ for cmd in $1; do
+ IFS=$save_ifs
+ eval cmd=\"$cmd\"
+ func_show_eval "$cmd" "${2-:}"
+ done
+ IFS=$save_ifs
+}
+
+
+# func_source file
+# Source FILE, adding directory component if necessary.
+# Note that it is not necessary on cygwin/mingw to append a dot to
+# FILE even if both FILE and FILE.exe exist: automatic-append-.exe
+# behavior happens only for exec(3), not for open(2)! Also, sourcing
+# `FILE.' does not work on cygwin managed mounts.
+func_source ()
+{
+ $opt_debug
+ case $1 in
+ */* | *\\*) . "$1" ;;
+ *) . "./$1" ;;
+ esac
+}
+
+
+# func_resolve_sysroot PATH
+# Replace a leading = in PATH with a sysroot. Store the result into
+# func_resolve_sysroot_result
+func_resolve_sysroot ()
+{
+ func_resolve_sysroot_result=$1
+ case $func_resolve_sysroot_result in
+ =*)
+ func_stripname '=' '' "$func_resolve_sysroot_result"
+ func_resolve_sysroot_result=$lt_sysroot$func_stripname_result
+ ;;
+ esac
+}
+
+# func_replace_sysroot PATH
+# If PATH begins with the sysroot, replace it with = and
+# store the result into func_replace_sysroot_result.
+func_replace_sysroot ()
+{
+ case "$lt_sysroot:$1" in
+ ?*:"$lt_sysroot"*)
+ func_stripname "$lt_sysroot" '' "$1"
+ func_replace_sysroot_result="=$func_stripname_result"
+ ;;
+ *)
+ # Including no sysroot.
+ func_replace_sysroot_result=$1
+ ;;
+ esac
+}
+
+# func_infer_tag arg
+# Infer tagged configuration to use if any are available and
+# if one wasn't chosen via the "--tag" command line option.
+# Only attempt this if the compiler in the base compile
+# command doesn't match the default compiler.
+# arg is usually of the form 'gcc ...'
+func_infer_tag ()
+{
+ $opt_debug
+ if test -n "$available_tags" && test -z "$tagname"; then
+ CC_quoted=
+ for arg in $CC; do
+ func_append_quoted CC_quoted "$arg"
+ done
+ CC_expanded=`func_echo_all $CC`
+ CC_quoted_expanded=`func_echo_all $CC_quoted`
+ case $@ in
+ # Blanks in the command may have been stripped by the calling shell,
+ # but not from the CC environment variable when configure was run.
+ " $CC "* | "$CC "* | " $CC_expanded "* | "$CC_expanded "* | \
+ " $CC_quoted"* | "$CC_quoted "* | " $CC_quoted_expanded "* | "$CC_quoted_expanded "*) ;;
+ # Blanks at the start of $base_compile will cause this to fail
+ # if we don't check for them as well.
+ *)
+ for z in $available_tags; do
+ if $GREP "^# ### BEGIN LIBTOOL TAG CONFIG: $z$" < "$progpath" > /dev/null; then
+ # Evaluate the configuration.
+ eval "`${SED} -n -e '/^# ### BEGIN LIBTOOL TAG CONFIG: '$z'$/,/^# ### END LIBTOOL TAG CONFIG: '$z'$/p' < $progpath`"
+ CC_quoted=
+ for arg in $CC; do
+ # Double-quote args containing other shell metacharacters.
+ func_append_quoted CC_quoted "$arg"
+ done
+ CC_expanded=`func_echo_all $CC`
+ CC_quoted_expanded=`func_echo_all $CC_quoted`
+ case "$@ " in
+ " $CC "* | "$CC "* | " $CC_expanded "* | "$CC_expanded "* | \
+ " $CC_quoted"* | "$CC_quoted "* | " $CC_quoted_expanded "* | "$CC_quoted_expanded "*)
+ # The compiler in the base compile command matches
+ # the one in the tagged configuration.
+ # Assume this is the tagged configuration we want.
+ tagname=$z
+ break
+ ;;
+ esac
+ fi
+ done
+ # If $tagname still isn't set, then no tagged configuration
+ # was found and let the user know that the "--tag" command
+ # line option must be used.
+ if test -z "$tagname"; then
+ func_echo "unable to infer tagged configuration"
+ func_fatal_error "specify a tag with \`--tag'"
+# else
+# func_verbose "using $tagname tagged configuration"
+ fi
+ ;;
+ esac
+ fi
+}
+
+
+
+# func_write_libtool_object output_name pic_name nonpic_name
+# Create a libtool object file (analogous to a ".la" file),
+# but don't create it if we're doing a dry run.
+func_write_libtool_object ()
+{
+ write_libobj=${1}
+ if test "$build_libtool_libs" = yes; then
+ write_lobj=\'${2}\'
+ else
+ write_lobj=none
+ fi
+
+ if test "$build_old_libs" = yes; then
+ write_oldobj=\'${3}\'
+ else
+ write_oldobj=none
+ fi
+
+ $opt_dry_run || {
+ cat >${write_libobj}T <<EOF
+# $write_libobj - a libtool object file
+# Generated by $PROGRAM (GNU $PACKAGE$TIMESTAMP) $VERSION
+#
+# Please DO NOT delete this file!
+# It is necessary for linking the library.
+
+# Name of the PIC object.
+pic_object=$write_lobj
+
+# Name of the non-PIC object
+non_pic_object=$write_oldobj
+
+EOF
+ $MV "${write_libobj}T" "${write_libobj}"
+ }
+}
+
+
+##################################################
+# FILE NAME AND PATH CONVERSION HELPER FUNCTIONS #
+##################################################
+
+# func_convert_core_file_wine_to_w32 ARG
+# Helper function used by file name conversion functions when $build is *nix,
+# and $host is mingw, cygwin, or some other w32 environment. Relies on a
+# correctly configured wine environment available, with the winepath program
+# in $build's $PATH.
+#
+# ARG is the $build file name to be converted to w32 format.
+# Result is available in $func_convert_core_file_wine_to_w32_result, and will
+# be empty on error (or when ARG is empty)
+func_convert_core_file_wine_to_w32 ()
+{
+ $opt_debug
+ func_convert_core_file_wine_to_w32_result="$1"
+ if test -n "$1"; then
+ # Unfortunately, winepath does not exit with a non-zero error code, so we
+ # are forced to check the contents of stdout. On the other hand, if the
+ # command is not found, the shell will set an exit code of 127 and print
+ # *an error message* to stdout. So we must check for both error code of
+ # zero AND non-empty stdout, which explains the odd construction:
+ func_convert_core_file_wine_to_w32_tmp=`winepath -w "$1" 2>/dev/null`
+ if test "$?" -eq 0 && test -n "${func_convert_core_file_wine_to_w32_tmp}"; then
+ func_convert_core_file_wine_to_w32_result=`$ECHO "$func_convert_core_file_wine_to_w32_tmp" |
+ $SED -e "$lt_sed_naive_backslashify"`
+ else
+ func_convert_core_file_wine_to_w32_result=
+ fi
+ fi
+}
+# end: func_convert_core_file_wine_to_w32
+
+
+# func_convert_core_path_wine_to_w32 ARG
+# Helper function used by path conversion functions when $build is *nix, and
+# $host is mingw, cygwin, or some other w32 environment. Relies on a correctly
+# configured wine environment available, with the winepath program in $build's
+# $PATH. Assumes ARG has no leading or trailing path separator characters.
+#
+# ARG is path to be converted from $build format to win32.
+# Result is available in $func_convert_core_path_wine_to_w32_result.
+# Unconvertible file (directory) names in ARG are skipped; if no directory names
+# are convertible, then the result may be empty.
+func_convert_core_path_wine_to_w32 ()
+{
+ $opt_debug
+ # unfortunately, winepath doesn't convert paths, only file names
+ func_convert_core_path_wine_to_w32_result=""
+ if test -n "$1"; then
+ oldIFS=$IFS
+ IFS=:
+ for func_convert_core_path_wine_to_w32_f in $1; do
+ IFS=$oldIFS
+ func_convert_core_file_wine_to_w32 "$func_convert_core_path_wine_to_w32_f"
+ if test -n "$func_convert_core_file_wine_to_w32_result" ; then
+ if test -z "$func_convert_core_path_wine_to_w32_result"; then
+ func_convert_core_path_wine_to_w32_result="$func_convert_core_file_wine_to_w32_result"
+ else
+ func_append func_convert_core_path_wine_to_w32_result ";$func_convert_core_file_wine_to_w32_result"
+ fi
+ fi
+ done
+ IFS=$oldIFS
+ fi
+}
+# end: func_convert_core_path_wine_to_w32
+
+
+# func_cygpath ARGS...
+# Wrapper around calling the cygpath program via LT_CYGPATH. This is used when
+# when (1) $build is *nix and Cygwin is hosted via a wine environment; or (2)
+# $build is MSYS and $host is Cygwin, or (3) $build is Cygwin. In case (1) or
+# (2), returns the Cygwin file name or path in func_cygpath_result (input
+# file name or path is assumed to be in w32 format, as previously converted
+# from $build's *nix or MSYS format). In case (3), returns the w32 file name
+# or path in func_cygpath_result (input file name or path is assumed to be in
+# Cygwin format). Returns an empty string on error.
+#
+# ARGS are passed to cygpath, with the last one being the file name or path to
+# be converted.
+#
+# Specify the absolute *nix (or w32) name to cygpath in the LT_CYGPATH
+# environment variable; do not put it in $PATH.
+func_cygpath ()
+{
+ $opt_debug
+ if test -n "$LT_CYGPATH" && test -f "$LT_CYGPATH"; then
+ func_cygpath_result=`$LT_CYGPATH "$@" 2>/dev/null`
+ if test "$?" -ne 0; then
+ # on failure, ensure result is empty
+ func_cygpath_result=
+ fi
+ else
+ func_cygpath_result=
+ func_error "LT_CYGPATH is empty or specifies non-existent file: \`$LT_CYGPATH'"
+ fi
+}
+#end: func_cygpath
+
+
+# func_convert_core_msys_to_w32 ARG
+# Convert file name or path ARG from MSYS format to w32 format. Return
+# result in func_convert_core_msys_to_w32_result.
+func_convert_core_msys_to_w32 ()
+{
+ $opt_debug
+ # awkward: cmd appends spaces to result
+ func_convert_core_msys_to_w32_result=`( cmd //c echo "$1" ) 2>/dev/null |
+ $SED -e 's/[ ]*$//' -e "$lt_sed_naive_backslashify"`
+}
+#end: func_convert_core_msys_to_w32
+
+
+# func_convert_file_check ARG1 ARG2
+# Verify that ARG1 (a file name in $build format) was converted to $host
+# format in ARG2. Otherwise, emit an error message, but continue (resetting
+# func_to_host_file_result to ARG1).
+func_convert_file_check ()
+{
+ $opt_debug
+ if test -z "$2" && test -n "$1" ; then
+ func_error "Could not determine host file name corresponding to"
+ func_error " \`$1'"
+ func_error "Continuing, but uninstalled executables may not work."
+ # Fallback:
+ func_to_host_file_result="$1"
+ fi
+}
+# end func_convert_file_check
+
+
+# func_convert_path_check FROM_PATHSEP TO_PATHSEP FROM_PATH TO_PATH
+# Verify that FROM_PATH (a path in $build format) was converted to $host
+# format in TO_PATH. Otherwise, emit an error message, but continue, resetting
+# func_to_host_file_result to a simplistic fallback value (see below).
+func_convert_path_check ()
+{
+ $opt_debug
+ if test -z "$4" && test -n "$3"; then
+ func_error "Could not determine the host path corresponding to"
+ func_error " \`$3'"
+ func_error "Continuing, but uninstalled executables may not work."
+ # Fallback. This is a deliberately simplistic "conversion" and
+ # should not be "improved". See libtool.info.
+ if test "x$1" != "x$2"; then
+ lt_replace_pathsep_chars="s|$1|$2|g"
+ func_to_host_path_result=`echo "$3" |
+ $SED -e "$lt_replace_pathsep_chars"`
+ else
+ func_to_host_path_result="$3"
+ fi
+ fi
+}
+# end func_convert_path_check
+
+
+# func_convert_path_front_back_pathsep FRONTPAT BACKPAT REPL ORIG
+# Modifies func_to_host_path_result by prepending REPL if ORIG matches FRONTPAT
+# and appending REPL if ORIG matches BACKPAT.
+func_convert_path_front_back_pathsep ()
+{
+ $opt_debug
+ case $4 in
+ $1 ) func_to_host_path_result="$3$func_to_host_path_result"
+ ;;
+ esac
+ case $4 in
+ $2 ) func_append func_to_host_path_result "$3"
+ ;;
+ esac
+}
+# end func_convert_path_front_back_pathsep
+
+
+##################################################
+# $build to $host FILE NAME CONVERSION FUNCTIONS #
+##################################################
+# invoked via `$to_host_file_cmd ARG'
+#
+# In each case, ARG is the path to be converted from $build to $host format.
+# Result will be available in $func_to_host_file_result.
+
+
+# func_to_host_file ARG
+# Converts the file name ARG from $build format to $host format. Return result
+# in func_to_host_file_result.
+func_to_host_file ()
+{
+ $opt_debug
+ $to_host_file_cmd "$1"
+}
+# end func_to_host_file
+
+
+# func_to_tool_file ARG LAZY
+# converts the file name ARG from $build format to toolchain format. Return
+# result in func_to_tool_file_result. If the conversion in use is listed
+# in (the comma separated) LAZY, no conversion takes place.
+func_to_tool_file ()
+{
+ $opt_debug
+ case ,$2, in
+ *,"$to_tool_file_cmd",*)
+ func_to_tool_file_result=$1
+ ;;
+ *)
+ $to_tool_file_cmd "$1"
+ func_to_tool_file_result=$func_to_host_file_result
+ ;;
+ esac
+}
+# end func_to_tool_file
+
+
+# func_convert_file_noop ARG
+# Copy ARG to func_to_host_file_result.
+func_convert_file_noop ()
+{
+ func_to_host_file_result="$1"
+}
+# end func_convert_file_noop
+
+
+# func_convert_file_msys_to_w32 ARG
+# Convert file name ARG from (mingw) MSYS to (mingw) w32 format; automatic
+# conversion to w32 is not available inside the cwrapper. Returns result in
+# func_to_host_file_result.
+func_convert_file_msys_to_w32 ()
+{
+ $opt_debug
+ func_to_host_file_result="$1"
+ if test -n "$1"; then
+ func_convert_core_msys_to_w32 "$1"
+ func_to_host_file_result="$func_convert_core_msys_to_w32_result"
+ fi
+ func_convert_file_check "$1" "$func_to_host_file_result"
+}
+# end func_convert_file_msys_to_w32
+
+
+# func_convert_file_cygwin_to_w32 ARG
+# Convert file name ARG from Cygwin to w32 format. Returns result in
+# func_to_host_file_result.
+func_convert_file_cygwin_to_w32 ()
+{
+ $opt_debug
+ func_to_host_file_result="$1"
+ if test -n "$1"; then
+ # because $build is cygwin, we call "the" cygpath in $PATH; no need to use
+ # LT_CYGPATH in this case.
+ func_to_host_file_result=`cygpath -m "$1"`
+ fi
+ func_convert_file_check "$1" "$func_to_host_file_result"
+}
+# end func_convert_file_cygwin_to_w32
+
+
+# func_convert_file_nix_to_w32 ARG
+# Convert file name ARG from *nix to w32 format. Requires a wine environment
+# and a working winepath. Returns result in func_to_host_file_result.
+func_convert_file_nix_to_w32 ()
+{
+ $opt_debug
+ func_to_host_file_result="$1"
+ if test -n "$1"; then
+ func_convert_core_file_wine_to_w32 "$1"
+ func_to_host_file_result="$func_convert_core_file_wine_to_w32_result"
+ fi
+ func_convert_file_check "$1" "$func_to_host_file_result"
+}
+# end func_convert_file_nix_to_w32
+
+
+# func_convert_file_msys_to_cygwin ARG
+# Convert file name ARG from MSYS to Cygwin format. Requires LT_CYGPATH set.
+# Returns result in func_to_host_file_result.
+func_convert_file_msys_to_cygwin ()
+{
+ $opt_debug
+ func_to_host_file_result="$1"
+ if test -n "$1"; then
+ func_convert_core_msys_to_w32 "$1"
+ func_cygpath -u "$func_convert_core_msys_to_w32_result"
+ func_to_host_file_result="$func_cygpath_result"
+ fi
+ func_convert_file_check "$1" "$func_to_host_file_result"
+}
+# end func_convert_file_msys_to_cygwin
+
+
+# func_convert_file_nix_to_cygwin ARG
+# Convert file name ARG from *nix to Cygwin format. Requires Cygwin installed
+# in a wine environment, working winepath, and LT_CYGPATH set. Returns result
+# in func_to_host_file_result.
+func_convert_file_nix_to_cygwin ()
+{
+ $opt_debug
+ func_to_host_file_result="$1"
+ if test -n "$1"; then
+ # convert from *nix to w32, then use cygpath to convert from w32 to cygwin.
+ func_convert_core_file_wine_to_w32 "$1"
+ func_cygpath -u "$func_convert_core_file_wine_to_w32_result"
+ func_to_host_file_result="$func_cygpath_result"
+ fi
+ func_convert_file_check "$1" "$func_to_host_file_result"
+}
+# end func_convert_file_nix_to_cygwin
+
+
+#############################################
+# $build to $host PATH CONVERSION FUNCTIONS #
+#############################################
+# invoked via `$to_host_path_cmd ARG'
+#
+# In each case, ARG is the path to be converted from $build to $host format.
+# The result will be available in $func_to_host_path_result.
+#
+# Path separators are also converted from $build format to $host format. If
+# ARG begins or ends with a path separator character, it is preserved (but
+# converted to $host format) on output.
+#
+# All path conversion functions are named using the following convention:
+# file name conversion function : func_convert_file_X_to_Y ()
+# path conversion function : func_convert_path_X_to_Y ()
+# where, for any given $build/$host combination the 'X_to_Y' value is the
+# same. If conversion functions are added for new $build/$host combinations,
+# the two new functions must follow this pattern, or func_init_to_host_path_cmd
+# will break.
+
+
+# func_init_to_host_path_cmd
+# Ensures that function "pointer" variable $to_host_path_cmd is set to the
+# appropriate value, based on the value of $to_host_file_cmd.
+to_host_path_cmd=
+func_init_to_host_path_cmd ()
+{
+ $opt_debug
+ if test -z "$to_host_path_cmd"; then
+ func_stripname 'func_convert_file_' '' "$to_host_file_cmd"
+ to_host_path_cmd="func_convert_path_${func_stripname_result}"
+ fi
+}
+
+
+# func_to_host_path ARG
+# Converts the path ARG from $build format to $host format. Return result
+# in func_to_host_path_result.
+func_to_host_path ()
+{
+ $opt_debug
+ func_init_to_host_path_cmd
+ $to_host_path_cmd "$1"
+}
+# end func_to_host_path
+
+
+# func_convert_path_noop ARG
+# Copy ARG to func_to_host_path_result.
+func_convert_path_noop ()
+{
+ func_to_host_path_result="$1"
+}
+# end func_convert_path_noop
+
+
+# func_convert_path_msys_to_w32 ARG
+# Convert path ARG from (mingw) MSYS to (mingw) w32 format; automatic
+# conversion to w32 is not available inside the cwrapper. Returns result in
+# func_to_host_path_result.
+func_convert_path_msys_to_w32 ()
+{
+ $opt_debug
+ func_to_host_path_result="$1"
+ if test -n "$1"; then
+ # Remove leading and trailing path separator characters from ARG. MSYS
+ # behavior is inconsistent here; cygpath turns them into '.;' and ';.';
+ # and winepath ignores them completely.
+ func_stripname : : "$1"
+ func_to_host_path_tmp1=$func_stripname_result
+ func_convert_core_msys_to_w32 "$func_to_host_path_tmp1"
+ func_to_host_path_result="$func_convert_core_msys_to_w32_result"
+ func_convert_path_check : ";" \
+ "$func_to_host_path_tmp1" "$func_to_host_path_result"
+ func_convert_path_front_back_pathsep ":*" "*:" ";" "$1"
+ fi
+}
+# end func_convert_path_msys_to_w32
+
+
+# func_convert_path_cygwin_to_w32 ARG
+# Convert path ARG from Cygwin to w32 format. Returns result in
+# func_to_host_file_result.
+func_convert_path_cygwin_to_w32 ()
+{
+ $opt_debug
+ func_to_host_path_result="$1"
+ if test -n "$1"; then
+ # See func_convert_path_msys_to_w32:
+ func_stripname : : "$1"
+ func_to_host_path_tmp1=$func_stripname_result
+ func_to_host_path_result=`cygpath -m -p "$func_to_host_path_tmp1"`
+ func_convert_path_check : ";" \
+ "$func_to_host_path_tmp1" "$func_to_host_path_result"
+ func_convert_path_front_back_pathsep ":*" "*:" ";" "$1"
+ fi
+}
+# end func_convert_path_cygwin_to_w32
+
+
+# func_convert_path_nix_to_w32 ARG
+# Convert path ARG from *nix to w32 format. Requires a wine environment and
+# a working winepath. Returns result in func_to_host_file_result.
+func_convert_path_nix_to_w32 ()
+{
+ $opt_debug
+ func_to_host_path_result="$1"
+ if test -n "$1"; then
+ # See func_convert_path_msys_to_w32:
+ func_stripname : : "$1"
+ func_to_host_path_tmp1=$func_stripname_result
+ func_convert_core_path_wine_to_w32 "$func_to_host_path_tmp1"
+ func_to_host_path_result="$func_convert_core_path_wine_to_w32_result"
+ func_convert_path_check : ";" \
+ "$func_to_host_path_tmp1" "$func_to_host_path_result"
+ func_convert_path_front_back_pathsep ":*" "*:" ";" "$1"
+ fi
+}
+# end func_convert_path_nix_to_w32
+
+
+# func_convert_path_msys_to_cygwin ARG
+# Convert path ARG from MSYS to Cygwin format. Requires LT_CYGPATH set.
+# Returns result in func_to_host_file_result.
+func_convert_path_msys_to_cygwin ()
+{
+ $opt_debug
+ func_to_host_path_result="$1"
+ if test -n "$1"; then
+ # See func_convert_path_msys_to_w32:
+ func_stripname : : "$1"
+ func_to_host_path_tmp1=$func_stripname_result
+ func_convert_core_msys_to_w32 "$func_to_host_path_tmp1"
+ func_cygpath -u -p "$func_convert_core_msys_to_w32_result"
+ func_to_host_path_result="$func_cygpath_result"
+ func_convert_path_check : : \
+ "$func_to_host_path_tmp1" "$func_to_host_path_result"
+ func_convert_path_front_back_pathsep ":*" "*:" : "$1"
+ fi
+}
+# end func_convert_path_msys_to_cygwin
+
+
+# func_convert_path_nix_to_cygwin ARG
+# Convert path ARG from *nix to Cygwin format. Requires Cygwin installed in a
+# a wine environment, working winepath, and LT_CYGPATH set. Returns result in
+# func_to_host_file_result.
+func_convert_path_nix_to_cygwin ()
+{
+ $opt_debug
+ func_to_host_path_result="$1"
+ if test -n "$1"; then
+ # Remove leading and trailing path separator characters from
+ # ARG. msys behavior is inconsistent here, cygpath turns them
+ # into '.;' and ';.', and winepath ignores them completely.
+ func_stripname : : "$1"
+ func_to_host_path_tmp1=$func_stripname_result
+ func_convert_core_path_wine_to_w32 "$func_to_host_path_tmp1"
+ func_cygpath -u -p "$func_convert_core_path_wine_to_w32_result"
+ func_to_host_path_result="$func_cygpath_result"
+ func_convert_path_check : : \
+ "$func_to_host_path_tmp1" "$func_to_host_path_result"
+ func_convert_path_front_back_pathsep ":*" "*:" : "$1"
+ fi
+}
+# end func_convert_path_nix_to_cygwin
+
+
+# func_mode_compile arg...
+func_mode_compile ()
+{
+ $opt_debug
+ # Get the compilation command and the source file.
+ base_compile=
+ srcfile="$nonopt" # always keep a non-empty value in "srcfile"
+ suppress_opt=yes
+ suppress_output=
+ arg_mode=normal
+ libobj=
+ later=
+ pie_flag=
+
+ for arg
+ do
+ case $arg_mode in
+ arg )
+ # do not "continue". Instead, add this to base_compile
+ lastarg="$arg"
+ arg_mode=normal
+ ;;
+
+ target )
+ libobj="$arg"
+ arg_mode=normal
+ continue
+ ;;
+
+ normal )
+ # Accept any command-line options.
+ case $arg in
+ -o)
+ test -n "$libobj" && \
+ func_fatal_error "you cannot specify \`-o' more than once"
+ arg_mode=target
+ continue
+ ;;
+
+ -pie | -fpie | -fPIE)
+ func_append pie_flag " $arg"
+ continue
+ ;;
+
+ -shared | -static | -prefer-pic | -prefer-non-pic)
+ func_append later " $arg"
+ continue
+ ;;
+
+ -no-suppress)
+ suppress_opt=no
+ continue
+ ;;
+
+ -Xcompiler)
+ arg_mode=arg # the next one goes into the "base_compile" arg list
+ continue # The current "srcfile" will either be retained or
+ ;; # replaced later. I would guess that would be a bug.
+
+ -Wc,*)
+ func_stripname '-Wc,' '' "$arg"
+ args=$func_stripname_result
+ lastarg=
+ save_ifs="$IFS"; IFS=','
+ for arg in $args; do
+ IFS="$save_ifs"
+ func_append_quoted lastarg "$arg"
+ done
+ IFS="$save_ifs"
+ func_stripname ' ' '' "$lastarg"
+ lastarg=$func_stripname_result
+
+ # Add the arguments to base_compile.
+ func_append base_compile " $lastarg"
+ continue
+ ;;
+
+ *)
+ # Accept the current argument as the source file.
+ # The previous "srcfile" becomes the current argument.
+ #
+ lastarg="$srcfile"
+ srcfile="$arg"
+ ;;
+ esac # case $arg
+ ;;
+ esac # case $arg_mode
+
+ # Aesthetically quote the previous argument.
+ func_append_quoted base_compile "$lastarg"
+ done # for arg
+
+ case $arg_mode in
+ arg)
+ func_fatal_error "you must specify an argument for -Xcompile"
+ ;;
+ target)
+ func_fatal_error "you must specify a target with \`-o'"
+ ;;
+ *)
+ # Get the name of the library object.
+ test -z "$libobj" && {
+ func_basename "$srcfile"
+ libobj="$func_basename_result"
+ }
+ ;;
+ esac
+
+ # Recognize several different file suffixes.
+ # If the user specifies -o file.o, it is replaced with file.lo
+ case $libobj in
+ *.[cCFSifmso] | \
+ *.ada | *.adb | *.ads | *.asm | \
+ *.c++ | *.cc | *.ii | *.class | *.cpp | *.cxx | \
+ *.[fF][09]? | *.for | *.java | *.obj | *.sx | *.cu | *.cup)
+ func_xform "$libobj"
+ libobj=$func_xform_result
+ ;;
+ esac
+
+ case $libobj in
+ *.lo) func_lo2o "$libobj"; obj=$func_lo2o_result ;;
+ *)
+ func_fatal_error "cannot determine name of library object from \`$libobj'"
+ ;;
+ esac
+
+ func_infer_tag $base_compile
+
+ for arg in $later; do
+ case $arg in
+ -shared)
+ test "$build_libtool_libs" != yes && \
+ func_fatal_configuration "can not build a shared library"
+ build_old_libs=no
+ continue
+ ;;
+
+ -static)
+ build_libtool_libs=no
+ build_old_libs=yes
+ continue
+ ;;
+
+ -prefer-pic)
+ pic_mode=yes
+ continue
+ ;;
+
+ -prefer-non-pic)
+ pic_mode=no
+ continue
+ ;;
+ esac
+ done
+
+ func_quote_for_eval "$libobj"
+ test "X$libobj" != "X$func_quote_for_eval_result" \
+ && $ECHO "X$libobj" | $GREP '[]~#^*{};<>?"'"'"' &()|`$[]' \
+ && func_warning "libobj name \`$libobj' may not contain shell special characters."
+ func_dirname_and_basename "$obj" "/" ""
+ objname="$func_basename_result"
+ xdir="$func_dirname_result"
+ lobj=${xdir}$objdir/$objname
+
+ test -z "$base_compile" && \
+ func_fatal_help "you must specify a compilation command"
+
+ # Delete any leftover library objects.
+ if test "$build_old_libs" = yes; then
+ removelist="$obj $lobj $libobj ${libobj}T"
+ else
+ removelist="$lobj $libobj ${libobj}T"
+ fi
+
+ # On Cygwin there's no "real" PIC flag so we must build both object types
+ case $host_os in
+ cygwin* | mingw* | pw32* | os2* | cegcc*)
+ pic_mode=default
+ ;;
+ esac
+ if test "$pic_mode" = no && test "$deplibs_check_method" != pass_all; then
+ # non-PIC code in shared libraries is not supported
+ pic_mode=default
+ fi
+
+ # Calculate the filename of the output object if compiler does
+ # not support -o with -c
+ if test "$compiler_c_o" = no; then
+ output_obj=`$ECHO "$srcfile" | $SED 's%^.*/%%; s%\.[^.]*$%%'`.${objext}
+ lockfile="$output_obj.lock"
+ else
+ output_obj=
+ need_locks=no
+ lockfile=
+ fi
+
+ # Lock this critical section if it is needed
+ # We use this script file to make the link, it avoids creating a new file
+ if test "$need_locks" = yes; then
+ until $opt_dry_run || ln "$progpath" "$lockfile" 2>/dev/null; do
+ func_echo "Waiting for $lockfile to be removed"
+ sleep 2
+ done
+ elif test "$need_locks" = warn; then
+ if test -f "$lockfile"; then
+ $ECHO "\
+*** ERROR, $lockfile exists and contains:
+`cat $lockfile 2>/dev/null`
+
+This indicates that another process is trying to use the same
+temporary object file, and libtool could not work around it because
+your compiler does not support \`-c' and \`-o' together. If you
+repeat this compilation, it may succeed, by chance, but you had better
+avoid parallel builds (make -j) in this platform, or get a better
+compiler."
+
+ $opt_dry_run || $RM $removelist
+ exit $EXIT_FAILURE
+ fi
+ func_append removelist " $output_obj"
+ $ECHO "$srcfile" > "$lockfile"
+ fi
+
+ $opt_dry_run || $RM $removelist
+ func_append removelist " $lockfile"
+ trap '$opt_dry_run || $RM $removelist; exit $EXIT_FAILURE' 1 2 15
+
+ func_to_tool_file "$srcfile" func_convert_file_msys_to_w32
+ srcfile=$func_to_tool_file_result
+ func_quote_for_eval "$srcfile"
+ qsrcfile=$func_quote_for_eval_result
+
+ # Only build a PIC object if we are building libtool libraries.
+ if test "$build_libtool_libs" = yes; then
+ # Without this assignment, base_compile gets emptied.
+ fbsd_hideous_sh_bug=$base_compile
+
+ if test "$pic_mode" != no; then
+ command="$base_compile $qsrcfile $pic_flag"
+ else
+ # Don't build PIC code
+ command="$base_compile $qsrcfile"
+ fi
+
+ func_mkdir_p "$xdir$objdir"
+
+ if test -z "$output_obj"; then
+ # Place PIC objects in $objdir
+ func_append command " -o $lobj"
+ fi
+
+ func_show_eval_locale "$command" \
+ 'test -n "$output_obj" && $RM $removelist; exit $EXIT_FAILURE'
+
+ if test "$need_locks" = warn &&
+ test "X`cat $lockfile 2>/dev/null`" != "X$srcfile"; then
+ $ECHO "\
+*** ERROR, $lockfile contains:
+`cat $lockfile 2>/dev/null`
+
+but it should contain:
+$srcfile
+
+This indicates that another process is trying to use the same
+temporary object file, and libtool could not work around it because
+your compiler does not support \`-c' and \`-o' together. If you
+repeat this compilation, it may succeed, by chance, but you had better
+avoid parallel builds (make -j) in this platform, or get a better
+compiler."
+
+ $opt_dry_run || $RM $removelist
+ exit $EXIT_FAILURE
+ fi
+
+ # Just move the object if needed, then go on to compile the next one
+ if test -n "$output_obj" && test "X$output_obj" != "X$lobj"; then
+ func_show_eval '$MV "$output_obj" "$lobj"' \
+ 'error=$?; $opt_dry_run || $RM $removelist; exit $error'
+ fi
+
+ # Allow error messages only from the first compilation.
+ if test "$suppress_opt" = yes; then
+ suppress_output=' >/dev/null 2>&1'
+ fi
+ fi
+
+ # Only build a position-dependent object if we build old libraries.
+ if test "$build_old_libs" = yes; then
+ if test "$pic_mode" != yes; then
+ # Don't build PIC code
+ command="$base_compile $qsrcfile$pie_flag"
+ else
+ command="$base_compile $qsrcfile $pic_flag"
+ fi
+ if test "$compiler_c_o" = yes; then
+ func_append command " -o $obj"
+ fi
+
+ # Suppress compiler output if we already did a PIC compilation.
+ func_append command "$suppress_output"
+ func_show_eval_locale "$command" \
+ '$opt_dry_run || $RM $removelist; exit $EXIT_FAILURE'
+
+ if test "$need_locks" = warn &&
+ test "X`cat $lockfile 2>/dev/null`" != "X$srcfile"; then
+ $ECHO "\
+*** ERROR, $lockfile contains:
+`cat $lockfile 2>/dev/null`
+
+but it should contain:
+$srcfile
+
+This indicates that another process is trying to use the same
+temporary object file, and libtool could not work around it because
+your compiler does not support \`-c' and \`-o' together. If you
+repeat this compilation, it may succeed, by chance, but you had better
+avoid parallel builds (make -j) in this platform, or get a better
+compiler."
+
+ $opt_dry_run || $RM $removelist
+ exit $EXIT_FAILURE
+ fi
+
+ # Just move the object if needed
+ if test -n "$output_obj" && test "X$output_obj" != "X$obj"; then
+ func_show_eval '$MV "$output_obj" "$obj"' \
+ 'error=$?; $opt_dry_run || $RM $removelist; exit $error'
+ fi
+ fi
+
+ $opt_dry_run || {
+ func_write_libtool_object "$libobj" "$objdir/$objname" "$objname"
+
+ # Unlock the critical section if it was locked
+ if test "$need_locks" != no; then
+ removelist=$lockfile
+ $RM "$lockfile"
+ fi
+ }
+
+ exit $EXIT_SUCCESS
+}
+
+$opt_help || {
+ test "$opt_mode" = compile && func_mode_compile ${1+"$@"}
+}
+
+func_mode_help ()
+{
+ # We need to display help for each of the modes.
+ case $opt_mode in
+ "")
+ # Generic help is extracted from the usage comments
+ # at the start of this file.
+ func_help
+ ;;
+
+ clean)
+ $ECHO \
+"Usage: $progname [OPTION]... --mode=clean RM [RM-OPTION]... FILE...
+
+Remove files from the build directory.
+
+RM is the name of the program to use to delete files associated with each FILE
+(typically \`/bin/rm'). RM-OPTIONS are options (such as \`-f') to be passed
+to RM.
+
+If FILE is a libtool library, object or program, all the files associated
+with it are deleted. Otherwise, only FILE itself is deleted using RM."
+ ;;
+
+ compile)
+ $ECHO \
+"Usage: $progname [OPTION]... --mode=compile COMPILE-COMMAND... SOURCEFILE
+
+Compile a source file into a libtool library object.
+
+This mode accepts the following additional options:
+
+ -o OUTPUT-FILE set the output file name to OUTPUT-FILE
+ -no-suppress do not suppress compiler output for multiple passes
+ -prefer-pic try to build PIC objects only
+ -prefer-non-pic try to build non-PIC objects only
+ -shared do not build a \`.o' file suitable for static linking
+ -static only build a \`.o' file suitable for static linking
+ -Wc,FLAG pass FLAG directly to the compiler
+
+COMPILE-COMMAND is a command to be used in creating a \`standard' object file
+from the given SOURCEFILE.
+
+The output file name is determined by removing the directory component from
+SOURCEFILE, then substituting the C source code suffix \`.c' with the
+library object suffix, \`.lo'."
+ ;;
+
+ execute)
+ $ECHO \
+"Usage: $progname [OPTION]... --mode=execute COMMAND [ARGS]...
+
+Automatically set library path, then run a program.
+
+This mode accepts the following additional options:
+
+ -dlopen FILE add the directory containing FILE to the library path
+
+This mode sets the library path environment variable according to \`-dlopen'
+flags.
+
+If any of the ARGS are libtool executable wrappers, then they are translated
+into their corresponding uninstalled binary, and any of their required library
+directories are added to the library path.
+
+Then, COMMAND is executed, with ARGS as arguments."
+ ;;
+
+ finish)
+ $ECHO \
+"Usage: $progname [OPTION]... --mode=finish [LIBDIR]...
+
+Complete the installation of libtool libraries.
+
+Each LIBDIR is a directory that contains libtool libraries.
+
+The commands that this mode executes may require superuser privileges. Use
+the \`--dry-run' option if you just want to see what would be executed."
+ ;;
+
+ install)
+ $ECHO \
+"Usage: $progname [OPTION]... --mode=install INSTALL-COMMAND...
+
+Install executables or libraries.
+
+INSTALL-COMMAND is the installation command. The first component should be
+either the \`install' or \`cp' program.
+
+The following components of INSTALL-COMMAND are treated specially:
+
+ -inst-prefix-dir PREFIX-DIR Use PREFIX-DIR as a staging area for installation
+
+The rest of the components are interpreted as arguments to that command (only
+BSD-compatible install options are recognized)."
+ ;;
+
+ link)
+ $ECHO \
+"Usage: $progname [OPTION]... --mode=link LINK-COMMAND...
+
+Link object files or libraries together to form another library, or to
+create an executable program.
+
+LINK-COMMAND is a command using the C compiler that you would use to create
+a program from several object files.
+
+The following components of LINK-COMMAND are treated specially:
+
+ -all-static do not do any dynamic linking at all
+ -avoid-version do not add a version suffix if possible
+ -bindir BINDIR specify path to binaries directory (for systems where
+ libraries must be found in the PATH setting at runtime)
+ -dlopen FILE \`-dlpreopen' FILE if it cannot be dlopened at runtime
+ -dlpreopen FILE link in FILE and add its symbols to lt_preloaded_symbols
+ -export-dynamic allow symbols from OUTPUT-FILE to be resolved with dlsym(3)
+ -export-symbols SYMFILE
+ try to export only the symbols listed in SYMFILE
+ -export-symbols-regex REGEX
+ try to export only the symbols matching REGEX
+ -LLIBDIR search LIBDIR for required installed libraries
+ -lNAME OUTPUT-FILE requires the installed library libNAME
+ -module build a library that can dlopened
+ -no-fast-install disable the fast-install mode
+ -no-install link a not-installable executable
+ -no-undefined declare that a library does not refer to external symbols
+ -o OUTPUT-FILE create OUTPUT-FILE from the specified objects
+ -objectlist FILE Use a list of object files found in FILE to specify objects
+ -precious-files-regex REGEX
+ don't remove output files matching REGEX
+ -release RELEASE specify package release information
+ -rpath LIBDIR the created library will eventually be installed in LIBDIR
+ -R[ ]LIBDIR add LIBDIR to the runtime path of programs and libraries
+ -shared only do dynamic linking of libtool libraries
+ -shrext SUFFIX override the standard shared library file extension
+ -static do not do any dynamic linking of uninstalled libtool libraries
+ -static-libtool-libs
+ do not do any dynamic linking of libtool libraries
+ -version-info CURRENT[:REVISION[:AGE]]
+ specify library version info [each variable defaults to 0]
+ -weak LIBNAME declare that the target provides the LIBNAME interface
+ -Wc,FLAG
+ -Xcompiler FLAG pass linker-specific FLAG directly to the compiler
+ -Wl,FLAG
+ -Xlinker FLAG pass linker-specific FLAG directly to the linker
+ -XCClinker FLAG pass link-specific FLAG to the compiler driver (CC)
+
+All other options (arguments beginning with \`-') are ignored.
+
+Every other argument is treated as a filename. Files ending in \`.la' are
+treated as uninstalled libtool libraries, other files are standard or library
+object files.
+
+If the OUTPUT-FILE ends in \`.la', then a libtool library is created,
+only library objects (\`.lo' files) may be specified, and \`-rpath' is
+required, except when creating a convenience library.
+
+If OUTPUT-FILE ends in \`.a' or \`.lib', then a standard library is created
+using \`ar' and \`ranlib', or on Windows using \`lib'.
+
+If OUTPUT-FILE ends in \`.lo' or \`.${objext}', then a reloadable object file
+is created, otherwise an executable program is created."
+ ;;
+
+ uninstall)
+ $ECHO \
+"Usage: $progname [OPTION]... --mode=uninstall RM [RM-OPTION]... FILE...
+
+Remove libraries from an installation directory.
+
+RM is the name of the program to use to delete files associated with each FILE
+(typically \`/bin/rm'). RM-OPTIONS are options (such as \`-f') to be passed
+to RM.
+
+If FILE is a libtool library, all the files associated with it are deleted.
+Otherwise, only FILE itself is deleted using RM."
+ ;;
+
+ *)
+ func_fatal_help "invalid operation mode \`$opt_mode'"
+ ;;
+ esac
+
+ echo
+ $ECHO "Try \`$progname --help' for more information about other modes."
+}
+
+# Now that we've collected a possible --mode arg, show help if necessary
+if $opt_help; then
+ if test "$opt_help" = :; then
+ func_mode_help
+ else
+ {
+ func_help noexit
+ for opt_mode in compile link execute install finish uninstall clean; do
+ func_mode_help
+ done
+ } | sed -n '1p; 2,$s/^Usage:/ or: /p'
+ {
+ func_help noexit
+ for opt_mode in compile link execute install finish uninstall clean; do
+ echo
+ func_mode_help
+ done
+ } |
+ sed '1d
+ /^When reporting/,/^Report/{
+ H
+ d
+ }
+ $x
+ /information about other modes/d
+ /more detailed .*MODE/d
+ s/^Usage:.*--mode=\([^ ]*\) .*/Description of \1 mode:/'
+ fi
+ exit $?
+fi
+
+
+# func_mode_execute arg...
+func_mode_execute ()
+{
+ $opt_debug
+ # The first argument is the command name.
+ cmd="$nonopt"
+ test -z "$cmd" && \
+ func_fatal_help "you must specify a COMMAND"
+
+ # Handle -dlopen flags immediately.
+ for file in $opt_dlopen; do
+ test -f "$file" \
+ || func_fatal_help "\`$file' is not a file"
+
+ dir=
+ case $file in
+ *.la)
+ func_resolve_sysroot "$file"
+ file=$func_resolve_sysroot_result
+
+ # Check to see that this really is a libtool archive.
+ func_lalib_unsafe_p "$file" \
+ || func_fatal_help "\`$lib' is not a valid libtool archive"
+
+ # Read the libtool library.
+ dlname=
+ library_names=
+ func_source "$file"
+
+ # Skip this library if it cannot be dlopened.
+ if test -z "$dlname"; then
+ # Warn if it was a shared library.
+ test -n "$library_names" && \
+ func_warning "\`$file' was not linked with \`-export-dynamic'"
+ continue
+ fi
+
+ func_dirname "$file" "" "."
+ dir="$func_dirname_result"
+
+ if test -f "$dir/$objdir/$dlname"; then
+ func_append dir "/$objdir"
+ else
+ if test ! -f "$dir/$dlname"; then
+ func_fatal_error "cannot find \`$dlname' in \`$dir' or \`$dir/$objdir'"
+ fi
+ fi
+ ;;
+
+ *.lo)
+ # Just add the directory containing the .lo file.
+ func_dirname "$file" "" "."
+ dir="$func_dirname_result"
+ ;;
+
+ *)
+ func_warning "\`-dlopen' is ignored for non-libtool libraries and objects"
+ continue
+ ;;
+ esac
+
+ # Get the absolute pathname.
+ absdir=`cd "$dir" && pwd`
+ test -n "$absdir" && dir="$absdir"
+
+ # Now add the directory to shlibpath_var.
+ if eval "test -z \"\$$shlibpath_var\""; then
+ eval "$shlibpath_var=\"\$dir\""
+ else
+ eval "$shlibpath_var=\"\$dir:\$$shlibpath_var\""
+ fi
+ done
+
+ # This variable tells wrapper scripts just to set shlibpath_var
+ # rather than running their programs.
+ libtool_execute_magic="$magic"
+
+ # Check if any of the arguments is a wrapper script.
+ args=
+ for file
+ do
+ case $file in
+ -* | *.la | *.lo ) ;;
+ *)
+ # Do a test to see if this is really a libtool program.
+ if func_ltwrapper_script_p "$file"; then
+ func_source "$file"
+ # Transform arg to wrapped name.
+ file="$progdir/$program"
+ elif func_ltwrapper_executable_p "$file"; then
+ func_ltwrapper_scriptname "$file"
+ func_source "$func_ltwrapper_scriptname_result"
+ # Transform arg to wrapped name.
+ file="$progdir/$program"
+ fi
+ ;;
+ esac
+ # Quote arguments (to preserve shell metacharacters).
+ func_append_quoted args "$file"
+ done
+
+ if test "X$opt_dry_run" = Xfalse; then
+ if test -n "$shlibpath_var"; then
+ # Export the shlibpath_var.
+ eval "export $shlibpath_var"
+ fi
+
+ # Restore saved environment variables
+ for lt_var in LANG LANGUAGE LC_ALL LC_CTYPE LC_COLLATE LC_MESSAGES
+ do
+ eval "if test \"\${save_$lt_var+set}\" = set; then
+ $lt_var=\$save_$lt_var; export $lt_var
+ else
+ $lt_unset $lt_var
+ fi"
+ done
+
+ # Now prepare to actually exec the command.
+ exec_cmd="\$cmd$args"
+ else
+ # Display what would be done.
+ if test -n "$shlibpath_var"; then
+ eval "\$ECHO \"\$shlibpath_var=\$$shlibpath_var\""
+ echo "export $shlibpath_var"
+ fi
+ $ECHO "$cmd$args"
+ exit $EXIT_SUCCESS
+ fi
+}
+
+test "$opt_mode" = execute && func_mode_execute ${1+"$@"}
+
+
+# func_mode_finish arg...
+func_mode_finish ()
+{
+ $opt_debug
+ libs=
+ libdirs=
+ admincmds=
+
+ for opt in "$nonopt" ${1+"$@"}
+ do
+ if test -d "$opt"; then
+ func_append libdirs " $opt"
+
+ elif test -f "$opt"; then
+ if func_lalib_unsafe_p "$opt"; then
+ func_append libs " $opt"
+ else
+ func_warning "\`$opt' is not a valid libtool archive"
+ fi
+
+ else
+ func_fatal_error "invalid argument \`$opt'"
+ fi
+ done
+
+ if test -n "$libs"; then
+ if test -n "$lt_sysroot"; then
+ sysroot_regex=`$ECHO "$lt_sysroot" | $SED "$sed_make_literal_regex"`
+ sysroot_cmd="s/\([ ']\)$sysroot_regex/\1/g;"
+ else
+ sysroot_cmd=
+ fi
+
+ # Remove sysroot references
+ if $opt_dry_run; then
+ for lib in $libs; do
+ echo "removing references to $lt_sysroot and \`=' prefixes from $lib"
+ done
+ else
+ tmpdir=`func_mktempdir`
+ for lib in $libs; do
+ sed -e "${sysroot_cmd} s/\([ ']-[LR]\)=/\1/g; s/\([ ']\)=/\1/g" $lib \
+ > $tmpdir/tmp-la
+ mv -f $tmpdir/tmp-la $lib
+ done
+ ${RM}r "$tmpdir"
+ fi
+ fi
+
+ if test -n "$finish_cmds$finish_eval" && test -n "$libdirs"; then
+ for libdir in $libdirs; do
+ if test -n "$finish_cmds"; then
+ # Do each command in the finish commands.
+ func_execute_cmds "$finish_cmds" 'admincmds="$admincmds
+'"$cmd"'"'
+ fi
+ if test -n "$finish_eval"; then
+ # Do the single finish_eval.
+ eval cmds=\"$finish_eval\"
+ $opt_dry_run || eval "$cmds" || func_append admincmds "
+ $cmds"
+ fi
+ done
+ fi
+
+ # Exit here if they wanted silent mode.
+ $opt_silent && exit $EXIT_SUCCESS
+
+ if test -n "$finish_cmds$finish_eval" && test -n "$libdirs"; then
+ echo "----------------------------------------------------------------------"
+ echo "Libraries have been installed in:"
+ for libdir in $libdirs; do
+ $ECHO " $libdir"
+ done
+ echo
+ echo "If you ever happen to want to link against installed libraries"
+ echo "in a given directory, LIBDIR, you must either use libtool, and"
+ echo "specify the full pathname of the library, or use the \`-LLIBDIR'"
+ echo "flag during linking and do at least one of the following:"
+ if test -n "$shlibpath_var"; then
+ echo " - add LIBDIR to the \`$shlibpath_var' environment variable"
+ echo " during execution"
+ fi
+ if test -n "$runpath_var"; then
+ echo " - add LIBDIR to the \`$runpath_var' environment variable"
+ echo " during linking"
+ fi
+ if test -n "$hardcode_libdir_flag_spec"; then
+ libdir=LIBDIR
+ eval flag=\"$hardcode_libdir_flag_spec\"
+
+ $ECHO " - use the \`$flag' linker flag"
+ fi
+ if test -n "$admincmds"; then
+ $ECHO " - have your system administrator run these commands:$admincmds"
+ fi
+ if test -f /etc/ld.so.conf; then
+ echo " - have your system administrator add LIBDIR to \`/etc/ld.so.conf'"
+ fi
+ echo
+
+ echo "See any operating system documentation about shared libraries for"
+ case $host in
+ solaris2.[6789]|solaris2.1[0-9])
+ echo "more information, such as the ld(1), crle(1) and ld.so(8) manual"
+ echo "pages."
+ ;;
+ *)
+ echo "more information, such as the ld(1) and ld.so(8) manual pages."
+ ;;
+ esac
+ echo "----------------------------------------------------------------------"
+ fi
+ exit $EXIT_SUCCESS
+}
+
+test "$opt_mode" = finish && func_mode_finish ${1+"$@"}
+
+
+# func_mode_install arg...
+func_mode_install ()
+{
+ $opt_debug
+ # There may be an optional sh(1) argument at the beginning of
+ # install_prog (especially on Windows NT).
+ if test "$nonopt" = "$SHELL" || test "$nonopt" = /bin/sh ||
+ # Allow the use of GNU shtool's install command.
+ case $nonopt in *shtool*) :;; *) false;; esac; then
+ # Aesthetically quote it.
+ func_quote_for_eval "$nonopt"
+ install_prog="$func_quote_for_eval_result "
+ arg=$1
+ shift
+ else
+ install_prog=
+ arg=$nonopt
+ fi
+
+ # The real first argument should be the name of the installation program.
+ # Aesthetically quote it.
+ func_quote_for_eval "$arg"
+ func_append install_prog "$func_quote_for_eval_result"
+ install_shared_prog=$install_prog
+ case " $install_prog " in
+ *[\\\ /]cp\ *) install_cp=: ;;
+ *) install_cp=false ;;
+ esac
+
+ # We need to accept at least all the BSD install flags.
+ dest=
+ files=
+ opts=
+ prev=
+ install_type=
+ isdir=no
+ stripme=
+ no_mode=:
+ for arg
+ do
+ arg2=
+ if test -n "$dest"; then
+ func_append files " $dest"
+ dest=$arg
+ continue
+ fi
+
+ case $arg in
+ -d) isdir=yes ;;
+ -f)
+ if $install_cp; then :; else
+ prev=$arg
+ fi
+ ;;
+ -g | -m | -o)
+ prev=$arg
+ ;;
+ -s)
+ stripme=" -s"
+ continue
+ ;;
+ -*)
+ ;;
+ *)
+ # If the previous option needed an argument, then skip it.
+ if test -n "$prev"; then
+ if test "x$prev" = x-m && test -n "$install_override_mode"; then
+ arg2=$install_override_mode
+ no_mode=false
+ fi
+ prev=
+ else
+ dest=$arg
+ continue
+ fi
+ ;;
+ esac
+
+ # Aesthetically quote the argument.
+ func_quote_for_eval "$arg"
+ func_append install_prog " $func_quote_for_eval_result"
+ if test -n "$arg2"; then
+ func_quote_for_eval "$arg2"
+ fi
+ func_append install_shared_prog " $func_quote_for_eval_result"
+ done
+
+ test -z "$install_prog" && \
+ func_fatal_help "you must specify an install program"
+
+ test -n "$prev" && \
+ func_fatal_help "the \`$prev' option requires an argument"
+
+ if test -n "$install_override_mode" && $no_mode; then
+ if $install_cp; then :; else
+ func_quote_for_eval "$install_override_mode"
+ func_append install_shared_prog " -m $func_quote_for_eval_result"
+ fi
+ fi
+
+ if test -z "$files"; then
+ if test -z "$dest"; then
+ func_fatal_help "no file or destination specified"
+ else
+ func_fatal_help "you must specify a destination"
+ fi
+ fi
+
+ # Strip any trailing slash from the destination.
+ func_stripname '' '/' "$dest"
+ dest=$func_stripname_result
+
+ # Check to see that the destination is a directory.
+ test -d "$dest" && isdir=yes
+ if test "$isdir" = yes; then
+ destdir="$dest"
+ destname=
+ else
+ func_dirname_and_basename "$dest" "" "."
+ destdir="$func_dirname_result"
+ destname="$func_basename_result"
+
+ # Not a directory, so check to see that there is only one file specified.
+ set dummy $files; shift
+ test "$#" -gt 1 && \
+ func_fatal_help "\`$dest' is not a directory"
+ fi
+ case $destdir in
+ [\\/]* | [A-Za-z]:[\\/]*) ;;
+ *)
+ for file in $files; do
+ case $file in
+ *.lo) ;;
+ *)
+ func_fatal_help "\`$destdir' must be an absolute directory name"
+ ;;
+ esac
+ done
+ ;;
+ esac
+
+ # This variable tells wrapper scripts just to set variables rather
+ # than running their programs.
+ libtool_install_magic="$magic"
+
+ staticlibs=
+ future_libdirs=
+ current_libdirs=
+ for file in $files; do
+
+ # Do each installation.
+ case $file in
+ *.$libext)
+ # Do the static libraries later.
+ func_append staticlibs " $file"
+ ;;
+
+ *.la)
+ func_resolve_sysroot "$file"
+ file=$func_resolve_sysroot_result
+
+ # Check to see that this really is a libtool archive.
+ func_lalib_unsafe_p "$file" \
+ || func_fatal_help "\`$file' is not a valid libtool archive"
+
+ library_names=
+ old_library=
+ relink_command=
+ func_source "$file"
+
+ # Add the libdir to current_libdirs if it is the destination.
+ if test "X$destdir" = "X$libdir"; then
+ case "$current_libdirs " in
+ *" $libdir "*) ;;
+ *) func_append current_libdirs " $libdir" ;;
+ esac
+ else
+ # Note the libdir as a future libdir.
+ case "$future_libdirs " in
+ *" $libdir "*) ;;
+ *) func_append future_libdirs " $libdir" ;;
+ esac
+ fi
+
+ func_dirname "$file" "/" ""
+ dir="$func_dirname_result"
+ func_append dir "$objdir"
+
+ if test -n "$relink_command"; then
+ # Determine the prefix the user has applied to our future dir.
+ inst_prefix_dir=`$ECHO "$destdir" | $SED -e "s%$libdir\$%%"`
+
+ # Don't allow the user to place us outside of our expected
+ # location b/c this prevents finding dependent libraries that
+ # are installed to the same prefix.
+ # At present, this check doesn't affect windows .dll's that
+ # are installed into $libdir/../bin (currently, that works fine)
+ # but it's something to keep an eye on.
+ test "$inst_prefix_dir" = "$destdir" && \
+ func_fatal_error "error: cannot install \`$file' to a directory not ending in $libdir"
+
+ if test -n "$inst_prefix_dir"; then
+ # Stick the inst_prefix_dir data into the link command.
+ relink_command=`$ECHO "$relink_command" | $SED "s%@inst_prefix_dir@%-inst-prefix-dir $inst_prefix_dir%"`
+ else
+ relink_command=`$ECHO "$relink_command" | $SED "s%@inst_prefix_dir@%%"`
+ fi
+
+ func_warning "relinking \`$file'"
+ func_show_eval "$relink_command" \
+ 'func_fatal_error "error: relink \`$file'\'' with the above command before installing it"'
+ fi
+
+ # See the names of the shared library.
+ set dummy $library_names; shift
+ if test -n "$1"; then
+ realname="$1"
+ shift
+
+ srcname="$realname"
+ test -n "$relink_command" && srcname="$realname"T
+
+ # Install the shared library and build the symlinks.
+ func_show_eval "$install_shared_prog $dir/$srcname $destdir/$realname" \
+ 'exit $?'
+ tstripme="$stripme"
+ case $host_os in
+ cygwin* | mingw* | pw32* | cegcc*)
+ case $realname in
+ *.dll.a)
+ tstripme=""
+ ;;
+ esac
+ ;;
+ esac
+ if test -n "$tstripme" && test -n "$striplib"; then
+ func_show_eval "$striplib $destdir/$realname" 'exit $?'
+ fi
+
+ if test "$#" -gt 0; then
+ # Delete the old symlinks, and create new ones.
+ # Try `ln -sf' first, because the `ln' binary might depend on
+ # the symlink we replace! Solaris /bin/ln does not understand -f,
+ # so we also need to try rm && ln -s.
+ for linkname
+ do
+ test "$linkname" != "$realname" \
+ && func_show_eval "(cd $destdir && { $LN_S -f $realname $linkname || { $RM $linkname && $LN_S $realname $linkname; }; })"
+ done
+ fi
+
+ # Do each command in the postinstall commands.
+ lib="$destdir/$realname"
+ func_execute_cmds "$postinstall_cmds" 'exit $?'
+ fi
+
+ # Install the pseudo-library for information purposes.
+ func_basename "$file"
+ name="$func_basename_result"
+ instname="$dir/$name"i
+ func_show_eval "$install_prog $instname $destdir/$name" 'exit $?'
+
+ # Maybe install the static library, too.
+ test -n "$old_library" && func_append staticlibs " $dir/$old_library"
+ ;;
+
+ *.lo)
+ # Install (i.e. copy) a libtool object.
+
+ # Figure out destination file name, if it wasn't already specified.
+ if test -n "$destname"; then
+ destfile="$destdir/$destname"
+ else
+ func_basename "$file"
+ destfile="$func_basename_result"
+ destfile="$destdir/$destfile"
+ fi
+
+ # Deduce the name of the destination old-style object file.
+ case $destfile in
+ *.lo)
+ func_lo2o "$destfile"
+ staticdest=$func_lo2o_result
+ ;;
+ *.$objext)
+ staticdest="$destfile"
+ destfile=
+ ;;
+ *)
+ func_fatal_help "cannot copy a libtool object to \`$destfile'"
+ ;;
+ esac
+
+ # Install the libtool object if requested.
+ test -n "$destfile" && \
+ func_show_eval "$install_prog $file $destfile" 'exit $?'
+
+ # Install the old object if enabled.
+ if test "$build_old_libs" = yes; then
+ # Deduce the name of the old-style object file.
+ func_lo2o "$file"
+ staticobj=$func_lo2o_result
+ func_show_eval "$install_prog \$staticobj \$staticdest" 'exit $?'
+ fi
+ exit $EXIT_SUCCESS
+ ;;
+
+ *)
+ # Figure out destination file name, if it wasn't already specified.
+ if test -n "$destname"; then
+ destfile="$destdir/$destname"
+ else
+ func_basename "$file"
+ destfile="$func_basename_result"
+ destfile="$destdir/$destfile"
+ fi
+
+ # If the file is missing, and there is a .exe on the end, strip it
+ # because it is most likely a libtool script we actually want to
+ # install
+ stripped_ext=""
+ case $file in
+ *.exe)
+ if test ! -f "$file"; then
+ func_stripname '' '.exe' "$file"
+ file=$func_stripname_result
+ stripped_ext=".exe"
+ fi
+ ;;
+ esac
+
+ # Do a test to see if this is really a libtool program.
+ case $host in
+ *cygwin* | *mingw*)
+ if func_ltwrapper_executable_p "$file"; then
+ func_ltwrapper_scriptname "$file"
+ wrapper=$func_ltwrapper_scriptname_result
+ else
+ func_stripname '' '.exe' "$file"
+ wrapper=$func_stripname_result
+ fi
+ ;;
+ *)
+ wrapper=$file
+ ;;
+ esac
+ if func_ltwrapper_script_p "$wrapper"; then
+ notinst_deplibs=
+ relink_command=
+
+ func_source "$wrapper"
+
+ # Check the variables that should have been set.
+ test -z "$generated_by_libtool_version" && \
+ func_fatal_error "invalid libtool wrapper script \`$wrapper'"
+
+ finalize=yes
+ for lib in $notinst_deplibs; do
+ # Check to see that each library is installed.
+ libdir=
+ if test -f "$lib"; then
+ func_source "$lib"
+ fi
+ libfile="$libdir/"`$ECHO "$lib" | $SED 's%^.*/%%g'` ### testsuite: skip nested quoting test
+ if test -n "$libdir" && test ! -f "$libfile"; then
+ func_warning "\`$lib' has not been installed in \`$libdir'"
+ finalize=no
+ fi
+ done
+
+ relink_command=
+ func_source "$wrapper"
+
+ outputname=
+ if test "$fast_install" = no && test -n "$relink_command"; then
+ $opt_dry_run || {
+ if test "$finalize" = yes; then
+ tmpdir=`func_mktempdir`
+ func_basename "$file$stripped_ext"
+ file="$func_basename_result"
+ outputname="$tmpdir/$file"
+ # Replace the output file specification.
+ relink_command=`$ECHO "$relink_command" | $SED 's%@OUTPUT@%'"$outputname"'%g'`
+
+ $opt_silent || {
+ func_quote_for_expand "$relink_command"
+ eval "func_echo $func_quote_for_expand_result"
+ }
+ if eval "$relink_command"; then :
+ else
+ func_error "error: relink \`$file' with the above command before installing it"
+ $opt_dry_run || ${RM}r "$tmpdir"
+ continue
+ fi
+ file="$outputname"
+ else
+ func_warning "cannot relink \`$file'"
+ fi
+ }
+ else
+ # Install the binary that we compiled earlier.
+ file=`$ECHO "$file$stripped_ext" | $SED "s%\([^/]*\)$%$objdir/\1%"`
+ fi
+ fi
+
+ # remove .exe since cygwin /usr/bin/install will append another
+ # one anyway
+ case $install_prog,$host in
+ */usr/bin/install*,*cygwin*)
+ case $file:$destfile in
+ *.exe:*.exe)
+ # this is ok
+ ;;
+ *.exe:*)
+ destfile=$destfile.exe
+ ;;
+ *:*.exe)
+ func_stripname '' '.exe' "$destfile"
+ destfile=$func_stripname_result
+ ;;
+ esac
+ ;;
+ esac
+ func_show_eval "$install_prog\$stripme \$file \$destfile" 'exit $?'
+ $opt_dry_run || if test -n "$outputname"; then
+ ${RM}r "$tmpdir"
+ fi
+ ;;
+ esac
+ done
+
+ for file in $staticlibs; do
+ func_basename "$file"
+ name="$func_basename_result"
+
+ # Set up the ranlib parameters.
+ oldlib="$destdir/$name"
+
+ func_show_eval "$install_prog \$file \$oldlib" 'exit $?'
+
+ if test -n "$stripme" && test -n "$old_striplib"; then
+ func_show_eval "$old_striplib $oldlib" 'exit $?'
+ fi
+
+ # Do each command in the postinstall commands.
+ func_execute_cmds "$old_postinstall_cmds" 'exit $?'
+ done
+
+ test -n "$future_libdirs" && \
+ func_warning "remember to run \`$progname --finish$future_libdirs'"
+
+ if test -n "$current_libdirs"; then
+ # Maybe just do a dry run.
+ $opt_dry_run && current_libdirs=" -n$current_libdirs"
+ exec_cmd='$SHELL $progpath $preserve_args --finish$current_libdirs'
+ else
+ exit $EXIT_SUCCESS
+ fi
+}
+
+test "$opt_mode" = install && func_mode_install ${1+"$@"}
+
+
+# func_generate_dlsyms outputname originator pic_p
+# Extract symbols from dlprefiles and create ${outputname}S.o with
+# a dlpreopen symbol table.
+func_generate_dlsyms ()
+{
+ $opt_debug
+ my_outputname="$1"
+ my_originator="$2"
+ my_pic_p="${3-no}"
+ my_prefix=`$ECHO "$my_originator" | sed 's%[^a-zA-Z0-9]%_%g'`
+ my_dlsyms=
+
+ if test -n "$dlfiles$dlprefiles" || test "$dlself" != no; then
+ if test -n "$NM" && test -n "$global_symbol_pipe"; then
+ my_dlsyms="${my_outputname}S.c"
+ else
+ func_error "not configured to extract global symbols from dlpreopened files"
+ fi
+ fi
+
+ if test -n "$my_dlsyms"; then
+ case $my_dlsyms in
+ "") ;;
+ *.c)
+ # Discover the nlist of each of the dlfiles.
+ nlist="$output_objdir/${my_outputname}.nm"
+
+ func_show_eval "$RM $nlist ${nlist}S ${nlist}T"
+
+ # Parse the name list into a source file.
+ func_verbose "creating $output_objdir/$my_dlsyms"
+
+ $opt_dry_run || $ECHO > "$output_objdir/$my_dlsyms" "\
+/* $my_dlsyms - symbol resolution table for \`$my_outputname' dlsym emulation. */
+/* Generated by $PROGRAM (GNU $PACKAGE$TIMESTAMP) $VERSION */
+
+#ifdef __cplusplus
+extern \"C\" {
+#endif
+
+#if defined(__GNUC__) && (((__GNUC__ == 4) && (__GNUC_MINOR__ >= 4)) || (__GNUC__ > 4))
+#pragma GCC diagnostic ignored \"-Wstrict-prototypes\"
+#endif
+
+/* Keep this code in sync between libtool.m4, ltmain, lt_system.h, and tests. */
+#if defined(_WIN32) || defined(__CYGWIN__) || defined(_WIN32_WCE)
+/* DATA imports from DLLs on WIN32 con't be const, because runtime
+ relocations are performed -- see ld's documentation on pseudo-relocs. */
+# define LT_DLSYM_CONST
+#elif defined(__osf__)
+/* This system does not cope well with relocations in const data. */
+# define LT_DLSYM_CONST
+#else
+# define LT_DLSYM_CONST const
+#endif
+
+/* External symbol declarations for the compiler. */\
+"
+
+ if test "$dlself" = yes; then
+ func_verbose "generating symbol list for \`$output'"
+
+ $opt_dry_run || echo ': @PROGRAM@ ' > "$nlist"
+
+ # Add our own program objects to the symbol list.
+ progfiles=`$ECHO "$objs$old_deplibs" | $SP2NL | $SED "$lo2o" | $NL2SP`
+ for progfile in $progfiles; do
+ func_to_tool_file "$progfile" func_convert_file_msys_to_w32
+ func_verbose "extracting global C symbols from \`$func_to_tool_file_result'"
+ $opt_dry_run || eval "$NM $func_to_tool_file_result | $global_symbol_pipe >> '$nlist'"
+ done
+
+ if test -n "$exclude_expsyms"; then
+ $opt_dry_run || {
+ eval '$EGREP -v " ($exclude_expsyms)$" "$nlist" > "$nlist"T'
+ eval '$MV "$nlist"T "$nlist"'
+ }
+ fi
+
+ if test -n "$export_symbols_regex"; then
+ $opt_dry_run || {
+ eval '$EGREP -e "$export_symbols_regex" "$nlist" > "$nlist"T'
+ eval '$MV "$nlist"T "$nlist"'
+ }
+ fi
+
+ # Prepare the list of exported symbols
+ if test -z "$export_symbols"; then
+ export_symbols="$output_objdir/$outputname.exp"
+ $opt_dry_run || {
+ $RM $export_symbols
+ eval "${SED} -n -e '/^: @PROGRAM@ $/d' -e 's/^.* \(.*\)$/\1/p' "'< "$nlist" > "$export_symbols"'
+ case $host in
+ *cygwin* | *mingw* | *cegcc* )
+ eval "echo EXPORTS "'> "$output_objdir/$outputname.def"'
+ eval 'cat "$export_symbols" >> "$output_objdir/$outputname.def"'
+ ;;
+ esac
+ }
+ else
+ $opt_dry_run || {
+ eval "${SED} -e 's/\([].[*^$]\)/\\\\\1/g' -e 's/^/ /' -e 's/$/$/'"' < "$export_symbols" > "$output_objdir/$outputname.exp"'
+ eval '$GREP -f "$output_objdir/$outputname.exp" < "$nlist" > "$nlist"T'
+ eval '$MV "$nlist"T "$nlist"'
+ case $host in
+ *cygwin* | *mingw* | *cegcc* )
+ eval "echo EXPORTS "'> "$output_objdir/$outputname.def"'
+ eval 'cat "$nlist" >> "$output_objdir/$outputname.def"'
+ ;;
+ esac
+ }
+ fi
+ fi
+
+ for dlprefile in $dlprefiles; do
+ func_verbose "extracting global C symbols from \`$dlprefile'"
+ func_basename "$dlprefile"
+ name="$func_basename_result"
+ case $host in
+ *cygwin* | *mingw* | *cegcc* )
+ # if an import library, we need to obtain dlname
+ if func_win32_import_lib_p "$dlprefile"; then
+ func_tr_sh "$dlprefile"
+ eval "curr_lafile=\$libfile_$func_tr_sh_result"
+ dlprefile_dlbasename=""
+ if test -n "$curr_lafile" && func_lalib_p "$curr_lafile"; then
+ # Use subshell, to avoid clobbering current variable values
+ dlprefile_dlname=`source "$curr_lafile" && echo "$dlname"`
+ if test -n "$dlprefile_dlname" ; then
+ func_basename "$dlprefile_dlname"
+ dlprefile_dlbasename="$func_basename_result"
+ else
+ # no lafile. user explicitly requested -dlpreopen <import library>.
+ $sharedlib_from_linklib_cmd "$dlprefile"
+ dlprefile_dlbasename=$sharedlib_from_linklib_result
+ fi
+ fi
+ $opt_dry_run || {
+ if test -n "$dlprefile_dlbasename" ; then
+ eval '$ECHO ": $dlprefile_dlbasename" >> "$nlist"'
+ else
+ func_warning "Could not compute DLL name from $name"
+ eval '$ECHO ": $name " >> "$nlist"'
+ fi
+ func_to_tool_file "$dlprefile" func_convert_file_msys_to_w32
+ eval "$NM \"$func_to_tool_file_result\" 2>/dev/null | $global_symbol_pipe |
+ $SED -e '/I __imp/d' -e 's/I __nm_/D /;s/_nm__//' >> '$nlist'"
+ }
+ else # not an import lib
+ $opt_dry_run || {
+ eval '$ECHO ": $name " >> "$nlist"'
+ func_to_tool_file "$dlprefile" func_convert_file_msys_to_w32
+ eval "$NM \"$func_to_tool_file_result\" 2>/dev/null | $global_symbol_pipe >> '$nlist'"
+ }
+ fi
+ ;;
+ *)
+ $opt_dry_run || {
+ eval '$ECHO ": $name " >> "$nlist"'
+ func_to_tool_file "$dlprefile" func_convert_file_msys_to_w32
+ eval "$NM \"$func_to_tool_file_result\" 2>/dev/null | $global_symbol_pipe >> '$nlist'"
+ }
+ ;;
+ esac
+ done
+
+ $opt_dry_run || {
+ # Make sure we have at least an empty file.
+ test -f "$nlist" || : > "$nlist"
+
+ if test -n "$exclude_expsyms"; then
+ $EGREP -v " ($exclude_expsyms)$" "$nlist" > "$nlist"T
+ $MV "$nlist"T "$nlist"
+ fi
+
+ # Try sorting and uniquifying the output.
+ if $GREP -v "^: " < "$nlist" |
+ if sort -k 3 </dev/null >/dev/null 2>&1; then
+ sort -k 3
+ else
+ sort +2
+ fi |
+ uniq > "$nlist"S; then
+ :
+ else
+ $GREP -v "^: " < "$nlist" > "$nlist"S
+ fi
+
+ if test -f "$nlist"S; then
+ eval "$global_symbol_to_cdecl"' < "$nlist"S >> "$output_objdir/$my_dlsyms"'
+ else
+ echo '/* NONE */' >> "$output_objdir/$my_dlsyms"
+ fi
+
+ echo >> "$output_objdir/$my_dlsyms" "\
+
+/* The mapping between symbol names and symbols. */
+typedef struct {
+ const char *name;
+ void *address;
+} lt_dlsymlist;
+extern LT_DLSYM_CONST lt_dlsymlist
+lt_${my_prefix}_LTX_preloaded_symbols[];
+LT_DLSYM_CONST lt_dlsymlist
+lt_${my_prefix}_LTX_preloaded_symbols[] =
+{\
+ { \"$my_originator\", (void *) 0 },"
+
+ case $need_lib_prefix in
+ no)
+ eval "$global_symbol_to_c_name_address" < "$nlist" >> "$output_objdir/$my_dlsyms"
+ ;;
+ *)
+ eval "$global_symbol_to_c_name_address_lib_prefix" < "$nlist" >> "$output_objdir/$my_dlsyms"
+ ;;
+ esac
+ echo >> "$output_objdir/$my_dlsyms" "\
+ {0, (void *) 0}
+};
+
+/* This works around a problem in FreeBSD linker */
+#ifdef FREEBSD_WORKAROUND
+static const void *lt_preloaded_setup() {
+ return lt_${my_prefix}_LTX_preloaded_symbols;
+}
+#endif
+
+#ifdef __cplusplus
+}
+#endif\
+"
+ } # !$opt_dry_run
+
+ pic_flag_for_symtable=
+ case "$compile_command " in
+ *" -static "*) ;;
+ *)
+ case $host in
+ # compiling the symbol table file with pic_flag works around
+ # a FreeBSD bug that causes programs to crash when -lm is
+ # linked before any other PIC object. But we must not use
+ # pic_flag when linking with -static. The problem exists in
+ # FreeBSD 2.2.6 and is fixed in FreeBSD 3.1.
+ *-*-freebsd2*|*-*-freebsd3.0*|*-*-freebsdelf3.0*)
+ pic_flag_for_symtable=" $pic_flag -DFREEBSD_WORKAROUND" ;;
+ *-*-hpux*)
+ pic_flag_for_symtable=" $pic_flag" ;;
+ *)
+ if test "X$my_pic_p" != Xno; then
+ pic_flag_for_symtable=" $pic_flag"
+ fi
+ ;;
+ esac
+ ;;
+ esac
+ symtab_cflags=
+ for arg in $LTCFLAGS; do
+ case $arg in
+ -pie | -fpie | -fPIE) ;;
+ *) func_append symtab_cflags " $arg" ;;
+ esac
+ done
+
+ # Now compile the dynamic symbol file.
+ func_show_eval '(cd $output_objdir && $LTCC$symtab_cflags -c$no_builtin_flag$pic_flag_for_symtable "$my_dlsyms")' 'exit $?'
+
+ # Clean up the generated files.
+ func_show_eval '$RM "$output_objdir/$my_dlsyms" "$nlist" "${nlist}S" "${nlist}T"'
+
+ # Transform the symbol file into the correct name.
+ symfileobj="$output_objdir/${my_outputname}S.$objext"
+ case $host in
+ *cygwin* | *mingw* | *cegcc* )
+ if test -f "$output_objdir/$my_outputname.def"; then
+ compile_command=`$ECHO "$compile_command" | $SED "s%@SYMFILE@%$output_objdir/$my_outputname.def $symfileobj%"`
+ finalize_command=`$ECHO "$finalize_command" | $SED "s%@SYMFILE@%$output_objdir/$my_outputname.def $symfileobj%"`
+ else
+ compile_command=`$ECHO "$compile_command" | $SED "s%@SYMFILE@%$symfileobj%"`
+ finalize_command=`$ECHO "$finalize_command" | $SED "s%@SYMFILE@%$symfileobj%"`
+ fi
+ ;;
+ *)
+ compile_command=`$ECHO "$compile_command" | $SED "s%@SYMFILE@%$symfileobj%"`
+ finalize_command=`$ECHO "$finalize_command" | $SED "s%@SYMFILE@%$symfileobj%"`
+ ;;
+ esac
+ ;;
+ *)
+ func_fatal_error "unknown suffix for \`$my_dlsyms'"
+ ;;
+ esac
+ else
+ # We keep going just in case the user didn't refer to
+ # lt_preloaded_symbols. The linker will fail if global_symbol_pipe
+ # really was required.
+
+ # Nullify the symbol file.
+ compile_command=`$ECHO "$compile_command" | $SED "s% @SYMFILE@%%"`
+ finalize_command=`$ECHO "$finalize_command" | $SED "s% @SYMFILE@%%"`
+ fi
+}
+
+# func_win32_libid arg
+# return the library type of file 'arg'
+#
+# Need a lot of goo to handle *both* DLLs and import libs
+# Has to be a shell function in order to 'eat' the argument
+# that is supplied when $file_magic_command is called.
+# Despite the name, also deal with 64 bit binaries.
+func_win32_libid ()
+{
+ $opt_debug
+ win32_libid_type="unknown"
+ win32_fileres=`file -L $1 2>/dev/null`
+ case $win32_fileres in
+ *ar\ archive\ import\ library*) # definitely import
+ win32_libid_type="x86 archive import"
+ ;;
+ *ar\ archive*) # could be an import, or static
+ # Keep the egrep pattern in sync with the one in _LT_CHECK_MAGIC_METHOD.
+ if eval $OBJDUMP -f $1 | $SED -e '10q' 2>/dev/null |
+ $EGREP 'file format (pei*-i386(.*architecture: i386)?|pe-arm-wince|pe-x86-64)' >/dev/null; then
+ func_to_tool_file "$1" func_convert_file_msys_to_w32
+ win32_nmres=`eval $NM -f posix -A \"$func_to_tool_file_result\" |
+ $SED -n -e '
+ 1,100{
+ / I /{
+ s,.*,import,
+ p
+ q
+ }
+ }'`
+ case $win32_nmres in
+ import*) win32_libid_type="x86 archive import";;
+ *) win32_libid_type="x86 archive static";;
+ esac
+ fi
+ ;;
+ *DLL*)
+ win32_libid_type="x86 DLL"
+ ;;
+ *executable*) # but shell scripts are "executable" too...
+ case $win32_fileres in
+ *MS\ Windows\ PE\ Intel*)
+ win32_libid_type="x86 DLL"
+ ;;
+ esac
+ ;;
+ esac
+ $ECHO "$win32_libid_type"
+}
+
+# func_cygming_dll_for_implib ARG
+#
+# Platform-specific function to extract the
+# name of the DLL associated with the specified
+# import library ARG.
+# Invoked by eval'ing the libtool variable
+# $sharedlib_from_linklib_cmd
+# Result is available in the variable
+# $sharedlib_from_linklib_result
+func_cygming_dll_for_implib ()
+{
+ $opt_debug
+ sharedlib_from_linklib_result=`$DLLTOOL --identify-strict --identify "$1"`
+}
+
+# func_cygming_dll_for_implib_fallback_core SECTION_NAME LIBNAMEs
+#
+# The is the core of a fallback implementation of a
+# platform-specific function to extract the name of the
+# DLL associated with the specified import library LIBNAME.
+#
+# SECTION_NAME is either .idata$6 or .idata$7, depending
+# on the platform and compiler that created the implib.
+#
+# Echos the name of the DLL associated with the
+# specified import library.
+func_cygming_dll_for_implib_fallback_core ()
+{
+ $opt_debug
+ match_literal=`$ECHO "$1" | $SED "$sed_make_literal_regex"`
+ $OBJDUMP -s --section "$1" "$2" 2>/dev/null |
+ $SED '/^Contents of section '"$match_literal"':/{
+ # Place marker at beginning of archive member dllname section
+ s/.*/====MARK====/
+ p
+ d
+ }
+ # These lines can sometimes be longer than 43 characters, but
+ # are always uninteresting
+ /:[ ]*file format pe[i]\{,1\}-/d
+ /^In archive [^:]*:/d
+ # Ensure marker is printed
+ /^====MARK====/p
+ # Remove all lines with less than 43 characters
+ /^.\{43\}/!d
+ # From remaining lines, remove first 43 characters
+ s/^.\{43\}//' |
+ $SED -n '
+ # Join marker and all lines until next marker into a single line
+ /^====MARK====/ b para
+ H
+ $ b para
+ b
+ :para
+ x
+ s/\n//g
+ # Remove the marker
+ s/^====MARK====//
+ # Remove trailing dots and whitespace
+ s/[\. \t]*$//
+ # Print
+ /./p' |
+ # we now have a list, one entry per line, of the stringified
+ # contents of the appropriate section of all members of the
+ # archive which possess that section. Heuristic: eliminate
+ # all those which have a first or second character that is
+ # a '.' (that is, objdump's representation of an unprintable
+ # character.) This should work for all archives with less than
+ # 0x302f exports -- but will fail for DLLs whose name actually
+ # begins with a literal '.' or a single character followed by
+ # a '.'.
+ #
+ # Of those that remain, print the first one.
+ $SED -e '/^\./d;/^.\./d;q'
+}
+
+# func_cygming_gnu_implib_p ARG
+# This predicate returns with zero status (TRUE) if
+# ARG is a GNU/binutils-style import library. Returns
+# with nonzero status (FALSE) otherwise.
+func_cygming_gnu_implib_p ()
+{
+ $opt_debug
+ func_to_tool_file "$1" func_convert_file_msys_to_w32
+ func_cygming_gnu_implib_tmp=`$NM "$func_to_tool_file_result" | eval "$global_symbol_pipe" | $EGREP ' (_head_[A-Za-z0-9_]+_[ad]l*|[A-Za-z0-9_]+_[ad]l*_iname)$'`
+ test -n "$func_cygming_gnu_implib_tmp"
+}
+
+# func_cygming_ms_implib_p ARG
+# This predicate returns with zero status (TRUE) if
+# ARG is an MS-style import library. Returns
+# with nonzero status (FALSE) otherwise.
+func_cygming_ms_implib_p ()
+{
+ $opt_debug
+ func_to_tool_file "$1" func_convert_file_msys_to_w32
+ func_cygming_ms_implib_tmp=`$NM "$func_to_tool_file_result" | eval "$global_symbol_pipe" | $GREP '_NULL_IMPORT_DESCRIPTOR'`
+ test -n "$func_cygming_ms_implib_tmp"
+}
+
+# func_cygming_dll_for_implib_fallback ARG
+# Platform-specific function to extract the
+# name of the DLL associated with the specified
+# import library ARG.
+#
+# This fallback implementation is for use when $DLLTOOL
+# does not support the --identify-strict option.
+# Invoked by eval'ing the libtool variable
+# $sharedlib_from_linklib_cmd
+# Result is available in the variable
+# $sharedlib_from_linklib_result
+func_cygming_dll_for_implib_fallback ()
+{
+ $opt_debug
+ if func_cygming_gnu_implib_p "$1" ; then
+ # binutils import library
+ sharedlib_from_linklib_result=`func_cygming_dll_for_implib_fallback_core '.idata$7' "$1"`
+ elif func_cygming_ms_implib_p "$1" ; then
+ # ms-generated import library
+ sharedlib_from_linklib_result=`func_cygming_dll_for_implib_fallback_core '.idata$6' "$1"`
+ else
+ # unknown
+ sharedlib_from_linklib_result=""
+ fi
+}
+
+
+# func_extract_an_archive dir oldlib
+func_extract_an_archive ()
+{
+ $opt_debug
+ f_ex_an_ar_dir="$1"; shift
+ f_ex_an_ar_oldlib="$1"
+ if test "$lock_old_archive_extraction" = yes; then
+ lockfile=$f_ex_an_ar_oldlib.lock
+ until $opt_dry_run || ln "$progpath" "$lockfile" 2>/dev/null; do
+ func_echo "Waiting for $lockfile to be removed"
+ sleep 2
+ done
+ fi
+ func_show_eval "(cd \$f_ex_an_ar_dir && $AR x \"\$f_ex_an_ar_oldlib\")" \
+ 'stat=$?; rm -f "$lockfile"; exit $stat'
+ if test "$lock_old_archive_extraction" = yes; then
+ $opt_dry_run || rm -f "$lockfile"
+ fi
+ if ($AR t "$f_ex_an_ar_oldlib" | sort | sort -uc >/dev/null 2>&1); then
+ :
+ else
+ func_fatal_error "object name conflicts in archive: $f_ex_an_ar_dir/$f_ex_an_ar_oldlib"
+ fi
+}
+
+
+# func_extract_archives gentop oldlib ...
+func_extract_archives ()
+{
+ $opt_debug
+ my_gentop="$1"; shift
+ my_oldlibs=${1+"$@"}
+ my_oldobjs=""
+ my_xlib=""
+ my_xabs=""
+ my_xdir=""
+
+ for my_xlib in $my_oldlibs; do
+ # Extract the objects.
+ case $my_xlib in
+ [\\/]* | [A-Za-z]:[\\/]*) my_xabs="$my_xlib" ;;
+ *) my_xabs=`pwd`"/$my_xlib" ;;
+ esac
+ func_basename "$my_xlib"
+ my_xlib="$func_basename_result"
+ my_xlib_u=$my_xlib
+ while :; do
+ case " $extracted_archives " in
+ *" $my_xlib_u "*)
+ func_arith $extracted_serial + 1
+ extracted_serial=$func_arith_result
+ my_xlib_u=lt$extracted_serial-$my_xlib ;;
+ *) break ;;
+ esac
+ done
+ extracted_archives="$extracted_archives $my_xlib_u"
+ my_xdir="$my_gentop/$my_xlib_u"
+
+ func_mkdir_p "$my_xdir"
+
+ case $host in
+ *-darwin*)
+ func_verbose "Extracting $my_xabs"
+ # Do not bother doing anything if just a dry run
+ $opt_dry_run || {
+ darwin_orig_dir=`pwd`
+ cd $my_xdir || exit $?
+ darwin_archive=$my_xabs
+ darwin_curdir=`pwd`
+ darwin_base_archive=`basename "$darwin_archive"`
+ darwin_arches=`$LIPO -info "$darwin_archive" 2>/dev/null | $GREP Architectures 2>/dev/null || true`
+ if test -n "$darwin_arches"; then
+ darwin_arches=`$ECHO "$darwin_arches" | $SED -e 's/.*are://'`
+ darwin_arch=
+ func_verbose "$darwin_base_archive has multiple architectures $darwin_arches"
+ for darwin_arch in $darwin_arches ; do
+ func_mkdir_p "unfat-$$/${darwin_base_archive}-${darwin_arch}"
+ $LIPO -thin $darwin_arch -output "unfat-$$/${darwin_base_archive}-${darwin_arch}/${darwin_base_archive}" "${darwin_archive}"
+ cd "unfat-$$/${darwin_base_archive}-${darwin_arch}"
+ func_extract_an_archive "`pwd`" "${darwin_base_archive}"
+ cd "$darwin_curdir"
+ $RM "unfat-$$/${darwin_base_archive}-${darwin_arch}/${darwin_base_archive}"
+ done # $darwin_arches
+ ## Okay now we've a bunch of thin objects, gotta fatten them up :)
+ darwin_filelist=`find unfat-$$ -type f -name \*.o -print -o -name \*.lo -print | $SED -e "$basename" | sort -u`
+ darwin_file=
+ darwin_files=
+ for darwin_file in $darwin_filelist; do
+ darwin_files=`find unfat-$$ -name $darwin_file -print | sort | $NL2SP`
+ $LIPO -create -output "$darwin_file" $darwin_files
+ done # $darwin_filelist
+ $RM -rf unfat-$$
+ cd "$darwin_orig_dir"
+ else
+ cd $darwin_orig_dir
+ func_extract_an_archive "$my_xdir" "$my_xabs"
+ fi # $darwin_arches
+ } # !$opt_dry_run
+ ;;
+ *)
+ func_extract_an_archive "$my_xdir" "$my_xabs"
+ ;;
+ esac
+ my_oldobjs="$my_oldobjs "`find $my_xdir -name \*.$objext -print -o -name \*.lo -print | sort | $NL2SP`
+ done
+
+ func_extract_archives_result="$my_oldobjs"
+}
+
+
+# func_emit_wrapper [arg=no]
+#
+# Emit a libtool wrapper script on stdout.
+# Don't directly open a file because we may want to
+# incorporate the script contents within a cygwin/mingw
+# wrapper executable. Must ONLY be called from within
+# func_mode_link because it depends on a number of variables
+# set therein.
+#
+# ARG is the value that the WRAPPER_SCRIPT_BELONGS_IN_OBJDIR
+# variable will take. If 'yes', then the emitted script
+# will assume that the directory in which it is stored is
+# the $objdir directory. This is a cygwin/mingw-specific
+# behavior.
+func_emit_wrapper ()
+{
+ func_emit_wrapper_arg1=${1-no}
+
+ $ECHO "\
+#! $SHELL
+
+# $output - temporary wrapper script for $objdir/$outputname
+# Generated by $PROGRAM (GNU $PACKAGE$TIMESTAMP) $VERSION
+#
+# The $output program cannot be directly executed until all the libtool
+# libraries that it depends on are installed.
+#
+# This wrapper script should never be moved out of the build directory.
+# If it is, it will not operate correctly.
+
+# Sed substitution that helps us do robust quoting. It backslashifies
+# metacharacters that are still active within double-quoted strings.
+sed_quote_subst='$sed_quote_subst'
+
+# Be Bourne compatible
+if test -n \"\${ZSH_VERSION+set}\" && (emulate sh) >/dev/null 2>&1; then
+ emulate sh
+ NULLCMD=:
+ # Zsh 3.x and 4.x performs word splitting on \${1+\"\$@\"}, which
+ # is contrary to our usage. Disable this feature.
+ alias -g '\${1+\"\$@\"}'='\"\$@\"'
+ setopt NO_GLOB_SUBST
+else
+ case \`(set -o) 2>/dev/null\` in *posix*) set -o posix;; esac
+fi
+BIN_SH=xpg4; export BIN_SH # for Tru64
+DUALCASE=1; export DUALCASE # for MKS sh
+
+# The HP-UX ksh and POSIX shell print the target directory to stdout
+# if CDPATH is set.
+(unset CDPATH) >/dev/null 2>&1 && unset CDPATH
+
+relink_command=\"$relink_command\"
+
+# This environment variable determines our operation mode.
+if test \"\$libtool_install_magic\" = \"$magic\"; then
+ # install mode needs the following variables:
+ generated_by_libtool_version='$macro_version'
+ notinst_deplibs='$notinst_deplibs'
+else
+ # When we are sourced in execute mode, \$file and \$ECHO are already set.
+ if test \"\$libtool_execute_magic\" != \"$magic\"; then
+ file=\"\$0\""
+
+ qECHO=`$ECHO "$ECHO" | $SED "$sed_quote_subst"`
+ $ECHO "\
+
+# A function that is used when there is no print builtin or printf.
+func_fallback_echo ()
+{
+ eval 'cat <<_LTECHO_EOF
+\$1
+_LTECHO_EOF'
+}
+ ECHO=\"$qECHO\"
+ fi
+
+# Very basic option parsing. These options are (a) specific to
+# the libtool wrapper, (b) are identical between the wrapper
+# /script/ and the wrapper /executable/ which is used only on
+# windows platforms, and (c) all begin with the string "--lt-"
+# (application programs are unlikely to have options which match
+# this pattern).
+#
+# There are only two supported options: --lt-debug and
+# --lt-dump-script. There is, deliberately, no --lt-help.
+#
+# The first argument to this parsing function should be the
+# script's $0 value, followed by "$@".
+lt_option_debug=
+func_parse_lt_options ()
+{
+ lt_script_arg0=\$0
+ shift
+ for lt_opt
+ do
+ case \"\$lt_opt\" in
+ --lt-debug) lt_option_debug=1 ;;
+ --lt-dump-script)
+ lt_dump_D=\`\$ECHO \"X\$lt_script_arg0\" | $SED -e 's/^X//' -e 's%/[^/]*$%%'\`
+ test \"X\$lt_dump_D\" = \"X\$lt_script_arg0\" && lt_dump_D=.
+ lt_dump_F=\`\$ECHO \"X\$lt_script_arg0\" | $SED -e 's/^X//' -e 's%^.*/%%'\`
+ cat \"\$lt_dump_D/\$lt_dump_F\"
+ exit 0
+ ;;
+ --lt-*)
+ \$ECHO \"Unrecognized --lt- option: '\$lt_opt'\" 1>&2
+ exit 1
+ ;;
+ esac
+ done
+
+ # Print the debug banner immediately:
+ if test -n \"\$lt_option_debug\"; then
+ echo \"${outputname}:${output}:\${LINENO}: libtool wrapper (GNU $PACKAGE$TIMESTAMP) $VERSION\" 1>&2
+ fi
+}
+
+# Used when --lt-debug. Prints its arguments to stdout
+# (redirection is the responsibility of the caller)
+func_lt_dump_args ()
+{
+ lt_dump_args_N=1;
+ for lt_arg
+ do
+ \$ECHO \"${outputname}:${output}:\${LINENO}: newargv[\$lt_dump_args_N]: \$lt_arg\"
+ lt_dump_args_N=\`expr \$lt_dump_args_N + 1\`
+ done
+}
+
+# Core function for launching the target application
+func_exec_program_core ()
+{
+"
+ case $host in
+ # Backslashes separate directories on plain windows
+ *-*-mingw | *-*-os2* | *-cegcc*)
+ $ECHO "\
+ if test -n \"\$lt_option_debug\"; then
+ \$ECHO \"${outputname}:${output}:\${LINENO}: newargv[0]: \$progdir\\\\\$program\" 1>&2
+ func_lt_dump_args \${1+\"\$@\"} 1>&2
+ fi
+ exec \"\$progdir\\\\\$program\" \${1+\"\$@\"}
+"
+ ;;
+
+ *)
+ $ECHO "\
+ if test -n \"\$lt_option_debug\"; then
+ \$ECHO \"${outputname}:${output}:\${LINENO}: newargv[0]: \$progdir/\$program\" 1>&2
+ func_lt_dump_args \${1+\"\$@\"} 1>&2
+ fi
+ exec \"\$progdir/\$program\" \${1+\"\$@\"}
+"
+ ;;
+ esac
+ $ECHO "\
+ \$ECHO \"\$0: cannot exec \$program \$*\" 1>&2
+ exit 1
+}
+
+# A function to encapsulate launching the target application
+# Strips options in the --lt-* namespace from \$@ and
+# launches target application with the remaining arguments.
+func_exec_program ()
+{
+ for lt_wr_arg
+ do
+ case \$lt_wr_arg in
+ --lt-*) ;;
+ *) set x \"\$@\" \"\$lt_wr_arg\"; shift;;
+ esac
+ shift
+ done
+ func_exec_program_core \${1+\"\$@\"}
+}
+
+ # Parse options
+ func_parse_lt_options \"\$0\" \${1+\"\$@\"}
+
+ # Find the directory that this script lives in.
+ thisdir=\`\$ECHO \"\$file\" | $SED 's%/[^/]*$%%'\`
+ test \"x\$thisdir\" = \"x\$file\" && thisdir=.
+
+ # Follow symbolic links until we get to the real thisdir.
+ file=\`ls -ld \"\$file\" | $SED -n 's/.*-> //p'\`
+ while test -n \"\$file\"; do
+ destdir=\`\$ECHO \"\$file\" | $SED 's%/[^/]*\$%%'\`
+
+ # If there was a directory component, then change thisdir.
+ if test \"x\$destdir\" != \"x\$file\"; then
+ case \"\$destdir\" in
+ [\\\\/]* | [A-Za-z]:[\\\\/]*) thisdir=\"\$destdir\" ;;
+ *) thisdir=\"\$thisdir/\$destdir\" ;;
+ esac
+ fi
+
+ file=\`\$ECHO \"\$file\" | $SED 's%^.*/%%'\`
+ file=\`ls -ld \"\$thisdir/\$file\" | $SED -n 's/.*-> //p'\`
+ done
+
+ # Usually 'no', except on cygwin/mingw when embedded into
+ # the cwrapper.
+ WRAPPER_SCRIPT_BELONGS_IN_OBJDIR=$func_emit_wrapper_arg1
+ if test \"\$WRAPPER_SCRIPT_BELONGS_IN_OBJDIR\" = \"yes\"; then
+ # special case for '.'
+ if test \"\$thisdir\" = \".\"; then
+ thisdir=\`pwd\`
+ fi
+ # remove .libs from thisdir
+ case \"\$thisdir\" in
+ *[\\\\/]$objdir ) thisdir=\`\$ECHO \"\$thisdir\" | $SED 's%[\\\\/][^\\\\/]*$%%'\` ;;
+ $objdir ) thisdir=. ;;
+ esac
+ fi
+
+ # Try to get the absolute directory name.
+ absdir=\`cd \"\$thisdir\" && pwd\`
+ test -n \"\$absdir\" && thisdir=\"\$absdir\"
+"
+
+ if test "$fast_install" = yes; then
+ $ECHO "\
+ program=lt-'$outputname'$exeext
+ progdir=\"\$thisdir/$objdir\"
+
+ if test ! -f \"\$progdir/\$program\" ||
+ { file=\`ls -1dt \"\$progdir/\$program\" \"\$progdir/../\$program\" 2>/dev/null | ${SED} 1q\`; \\
+ test \"X\$file\" != \"X\$progdir/\$program\"; }; then
+
+ file=\"\$\$-\$program\"
+
+ if test ! -d \"\$progdir\"; then
+ $MKDIR \"\$progdir\"
+ else
+ $RM \"\$progdir/\$file\"
+ fi"
+
+ $ECHO "\
+
+ # relink executable if necessary
+ if test -n \"\$relink_command\"; then
+ if relink_command_output=\`eval \$relink_command 2>&1\`; then :
+ else
+ $ECHO \"\$relink_command_output\" >&2
+ $RM \"\$progdir/\$file\"
+ exit 1
+ fi
+ fi
+
+ $MV \"\$progdir/\$file\" \"\$progdir/\$program\" 2>/dev/null ||
+ { $RM \"\$progdir/\$program\";
+ $MV \"\$progdir/\$file\" \"\$progdir/\$program\"; }
+ $RM \"\$progdir/\$file\"
+ fi"
+ else
+ $ECHO "\
+ program='$outputname'
+ progdir=\"\$thisdir/$objdir\"
+"
+ fi
+
+ $ECHO "\
+
+ if test -f \"\$progdir/\$program\"; then"
+
+ # fixup the dll searchpath if we need to.
+ #
+ # Fix the DLL searchpath if we need to. Do this before prepending
+ # to shlibpath, because on Windows, both are PATH and uninstalled
+ # libraries must come first.
+ if test -n "$dllsearchpath"; then
+ $ECHO "\
+ # Add the dll search path components to the executable PATH
+ PATH=$dllsearchpath:\$PATH
+"
+ fi
+
+ # Export our shlibpath_var if we have one.
+ if test "$shlibpath_overrides_runpath" = yes && test -n "$shlibpath_var" && test -n "$temp_rpath"; then
+ $ECHO "\
+ # Add our own library path to $shlibpath_var
+ $shlibpath_var=\"$temp_rpath\$$shlibpath_var\"
+
+ # Some systems cannot cope with colon-terminated $shlibpath_var
+ # The second colon is a workaround for a bug in BeOS R4 sed
+ $shlibpath_var=\`\$ECHO \"\$$shlibpath_var\" | $SED 's/::*\$//'\`
+
+ export $shlibpath_var
+"
+ fi
+
+ $ECHO "\
+ if test \"\$libtool_execute_magic\" != \"$magic\"; then
+ # Run the actual program with our arguments.
+ func_exec_program \${1+\"\$@\"}
+ fi
+ else
+ # The program doesn't exist.
+ \$ECHO \"\$0: error: \\\`\$progdir/\$program' does not exist\" 1>&2
+ \$ECHO \"This script is just a wrapper for \$program.\" 1>&2
+ \$ECHO \"See the $PACKAGE documentation for more information.\" 1>&2
+ exit 1
+ fi
+fi\
+"
+}
+
+
+# func_emit_cwrapperexe_src
+# emit the source code for a wrapper executable on stdout
+# Must ONLY be called from within func_mode_link because
+# it depends on a number of variable set therein.
+func_emit_cwrapperexe_src ()
+{
+ cat <<EOF
+
+/* $cwrappersource - temporary wrapper executable for $objdir/$outputname
+ Generated by $PROGRAM (GNU $PACKAGE$TIMESTAMP) $VERSION
+
+ The $output program cannot be directly executed until all the libtool
+ libraries that it depends on are installed.
+
+ This wrapper executable should never be moved out of the build directory.
+ If it is, it will not operate correctly.
+*/
+EOF
+ cat <<"EOF"
+#ifdef _MSC_VER
+# define _CRT_SECURE_NO_DEPRECATE 1
+#endif
+#include <stdio.h>
+#include <stdlib.h>
+#ifdef _MSC_VER
+# include <direct.h>
+# include <process.h>
+# include <io.h>
+#else
+# include <unistd.h>
+# include <stdint.h>
+# ifdef __CYGWIN__
+# include <io.h>
+# endif
+#endif
+#include <malloc.h>
+#include <stdarg.h>
+#include <assert.h>
+#include <string.h>
+#include <ctype.h>
+#include <errno.h>
+#include <fcntl.h>
+#include <sys/stat.h>
+
+/* declarations of non-ANSI functions */
+#if defined(__MINGW32__)
+# ifdef __STRICT_ANSI__
+int _putenv (const char *);
+# endif
+#elif defined(__CYGWIN__)
+# ifdef __STRICT_ANSI__
+char *realpath (const char *, char *);
+int putenv (char *);
+int setenv (const char *, const char *, int);
+# endif
+/* #elif defined (other platforms) ... */
+#endif
+
+/* portability defines, excluding path handling macros */
+#if defined(_MSC_VER)
+# define setmode _setmode
+# define stat _stat
+# define chmod _chmod
+# define getcwd _getcwd
+# define putenv _putenv
+# define S_IXUSR _S_IEXEC
+# ifndef _INTPTR_T_DEFINED
+# define _INTPTR_T_DEFINED
+# define intptr_t int
+# endif
+#elif defined(__MINGW32__)
+# define setmode _setmode
+# define stat _stat
+# define chmod _chmod
+# define getcwd _getcwd
+# define putenv _putenv
+#elif defined(__CYGWIN__)
+# define HAVE_SETENV
+# define FOPEN_WB "wb"
+/* #elif defined (other platforms) ... */
+#endif
+
+#if defined(PATH_MAX)
+# define LT_PATHMAX PATH_MAX
+#elif defined(MAXPATHLEN)
+# define LT_PATHMAX MAXPATHLEN
+#else
+# define LT_PATHMAX 1024
+#endif
+
+#ifndef S_IXOTH
+# define S_IXOTH 0
+#endif
+#ifndef S_IXGRP
+# define S_IXGRP 0
+#endif
+
+/* path handling portability macros */
+#ifndef DIR_SEPARATOR
+# define DIR_SEPARATOR '/'
+# define PATH_SEPARATOR ':'
+#endif
+
+#if defined (_WIN32) || defined (__MSDOS__) || defined (__DJGPP__) || \
+ defined (__OS2__)
+# define HAVE_DOS_BASED_FILE_SYSTEM
+# define FOPEN_WB "wb"
+# ifndef DIR_SEPARATOR_2
+# define DIR_SEPARATOR_2 '\\'
+# endif
+# ifndef PATH_SEPARATOR_2
+# define PATH_SEPARATOR_2 ';'
+# endif
+#endif
+
+#ifndef DIR_SEPARATOR_2
+# define IS_DIR_SEPARATOR(ch) ((ch) == DIR_SEPARATOR)
+#else /* DIR_SEPARATOR_2 */
+# define IS_DIR_SEPARATOR(ch) \
+ (((ch) == DIR_SEPARATOR) || ((ch) == DIR_SEPARATOR_2))
+#endif /* DIR_SEPARATOR_2 */
+
+#ifndef PATH_SEPARATOR_2
+# define IS_PATH_SEPARATOR(ch) ((ch) == PATH_SEPARATOR)
+#else /* PATH_SEPARATOR_2 */
+# define IS_PATH_SEPARATOR(ch) ((ch) == PATH_SEPARATOR_2)
+#endif /* PATH_SEPARATOR_2 */
+
+#ifndef FOPEN_WB
+# define FOPEN_WB "w"
+#endif
+#ifndef _O_BINARY
+# define _O_BINARY 0
+#endif
+
+#define XMALLOC(type, num) ((type *) xmalloc ((num) * sizeof(type)))
+#define XFREE(stale) do { \
+ if (stale) { free ((void *) stale); stale = 0; } \
+} while (0)
+
+#if defined(LT_DEBUGWRAPPER)
+static int lt_debug = 1;
+#else
+static int lt_debug = 0;
+#endif
+
+const char *program_name = "libtool-wrapper"; /* in case xstrdup fails */
+
+void *xmalloc (size_t num);
+char *xstrdup (const char *string);
+const char *base_name (const char *name);
+char *find_executable (const char *wrapper);
+char *chase_symlinks (const char *pathspec);
+int make_executable (const char *path);
+int check_executable (const char *path);
+char *strendzap (char *str, const char *pat);
+void lt_debugprintf (const char *file, int line, const char *fmt, ...);
+void lt_fatal (const char *file, int line, const char *message, ...);
+static const char *nonnull (const char *s);
+static const char *nonempty (const char *s);
+void lt_setenv (const char *name, const char *value);
+char *lt_extend_str (const char *orig_value, const char *add, int to_end);
+void lt_update_exe_path (const char *name, const char *value);
+void lt_update_lib_path (const char *name, const char *value);
+char **prepare_spawn (char **argv);
+void lt_dump_script (FILE *f);
+EOF
+
+ cat <<EOF
+volatile const char * MAGIC_EXE = "$magic_exe";
+const char * LIB_PATH_VARNAME = "$shlibpath_var";
+EOF
+
+ if test "$shlibpath_overrides_runpath" = yes && test -n "$shlibpath_var" && test -n "$temp_rpath"; then
+ func_to_host_path "$temp_rpath"
+ cat <<EOF
+const char * LIB_PATH_VALUE = "$func_to_host_path_result";
+EOF
+ else
+ cat <<"EOF"
+const char * LIB_PATH_VALUE = "";
+EOF
+ fi
+
+ if test -n "$dllsearchpath"; then
+ func_to_host_path "$dllsearchpath:"
+ cat <<EOF
+const char * EXE_PATH_VARNAME = "PATH";
+const char * EXE_PATH_VALUE = "$func_to_host_path_result";
+EOF
+ else
+ cat <<"EOF"
+const char * EXE_PATH_VARNAME = "";
+const char * EXE_PATH_VALUE = "";
+EOF
+ fi
+
+ if test "$fast_install" = yes; then
+ cat <<EOF
+const char * TARGET_PROGRAM_NAME = "lt-$outputname"; /* hopefully, no .exe */
+EOF
+ else
+ cat <<EOF
+const char * TARGET_PROGRAM_NAME = "$outputname"; /* hopefully, no .exe */
+EOF
+ fi
+
+
+ cat <<"EOF"
+
+#define LTWRAPPER_OPTION_PREFIX "--lt-"
+
+static const char *ltwrapper_option_prefix = LTWRAPPER_OPTION_PREFIX;
+static const char *dumpscript_opt = LTWRAPPER_OPTION_PREFIX "dump-script";
+static const char *debug_opt = LTWRAPPER_OPTION_PREFIX "debug";
+
+int
+main (int argc, char *argv[])
+{
+ char **newargz;
+ int newargc;
+ char *tmp_pathspec;
+ char *actual_cwrapper_path;
+ char *actual_cwrapper_name;
+ char *target_name;
+ char *lt_argv_zero;
+ intptr_t rval = 127;
+
+ int i;
+
+ program_name = (char *) xstrdup (base_name (argv[0]));
+ newargz = XMALLOC (char *, argc + 1);
+
+ /* very simple arg parsing; don't want to rely on getopt
+ * also, copy all non cwrapper options to newargz, except
+ * argz[0], which is handled differently
+ */
+ newargc=0;
+ for (i = 1; i < argc; i++)
+ {
+ if (strcmp (argv[i], dumpscript_opt) == 0)
+ {
+EOF
+ case "$host" in
+ *mingw* | *cygwin* )
+ # make stdout use "unix" line endings
+ echo " setmode(1,_O_BINARY);"
+ ;;
+ esac
+
+ cat <<"EOF"
+ lt_dump_script (stdout);
+ return 0;
+ }
+ if (strcmp (argv[i], debug_opt) == 0)
+ {
+ lt_debug = 1;
+ continue;
+ }
+ if (strcmp (argv[i], ltwrapper_option_prefix) == 0)
+ {
+ /* however, if there is an option in the LTWRAPPER_OPTION_PREFIX
+ namespace, but it is not one of the ones we know about and
+ have already dealt with, above (inluding dump-script), then
+ report an error. Otherwise, targets might begin to believe
+ they are allowed to use options in the LTWRAPPER_OPTION_PREFIX
+ namespace. The first time any user complains about this, we'll
+ need to make LTWRAPPER_OPTION_PREFIX a configure-time option
+ or a configure.ac-settable value.
+ */
+ lt_fatal (__FILE__, __LINE__,
+ "unrecognized %s option: '%s'",
+ ltwrapper_option_prefix, argv[i]);
+ }
+ /* otherwise ... */
+ newargz[++newargc] = xstrdup (argv[i]);
+ }
+ newargz[++newargc] = NULL;
+
+EOF
+ cat <<EOF
+ /* The GNU banner must be the first non-error debug message */
+ lt_debugprintf (__FILE__, __LINE__, "libtool wrapper (GNU $PACKAGE$TIMESTAMP) $VERSION\n");
+EOF
+ cat <<"EOF"
+ lt_debugprintf (__FILE__, __LINE__, "(main) argv[0]: %s\n", argv[0]);
+ lt_debugprintf (__FILE__, __LINE__, "(main) program_name: %s\n", program_name);
+
+ tmp_pathspec = find_executable (argv[0]);
+ if (tmp_pathspec == NULL)
+ lt_fatal (__FILE__, __LINE__, "couldn't find %s", argv[0]);
+ lt_debugprintf (__FILE__, __LINE__,
+ "(main) found exe (before symlink chase) at: %s\n",
+ tmp_pathspec);
+
+ actual_cwrapper_path = chase_symlinks (tmp_pathspec);
+ lt_debugprintf (__FILE__, __LINE__,
+ "(main) found exe (after symlink chase) at: %s\n",
+ actual_cwrapper_path);
+ XFREE (tmp_pathspec);
+
+ actual_cwrapper_name = xstrdup (base_name (actual_cwrapper_path));
+ strendzap (actual_cwrapper_path, actual_cwrapper_name);
+
+ /* wrapper name transforms */
+ strendzap (actual_cwrapper_name, ".exe");
+ tmp_pathspec = lt_extend_str (actual_cwrapper_name, ".exe", 1);
+ XFREE (actual_cwrapper_name);
+ actual_cwrapper_name = tmp_pathspec;
+ tmp_pathspec = 0;
+
+ /* target_name transforms -- use actual target program name; might have lt- prefix */
+ target_name = xstrdup (base_name (TARGET_PROGRAM_NAME));
+ strendzap (target_name, ".exe");
+ tmp_pathspec = lt_extend_str (target_name, ".exe", 1);
+ XFREE (target_name);
+ target_name = tmp_pathspec;
+ tmp_pathspec = 0;
+
+ lt_debugprintf (__FILE__, __LINE__,
+ "(main) libtool target name: %s\n",
+ target_name);
+EOF
+
+ cat <<EOF
+ newargz[0] =
+ XMALLOC (char, (strlen (actual_cwrapper_path) +
+ strlen ("$objdir") + 1 + strlen (actual_cwrapper_name) + 1));
+ strcpy (newargz[0], actual_cwrapper_path);
+ strcat (newargz[0], "$objdir");
+ strcat (newargz[0], "/");
+EOF
+
+ cat <<"EOF"
+ /* stop here, and copy so we don't have to do this twice */
+ tmp_pathspec = xstrdup (newargz[0]);
+
+ /* do NOT want the lt- prefix here, so use actual_cwrapper_name */
+ strcat (newargz[0], actual_cwrapper_name);
+
+ /* DO want the lt- prefix here if it exists, so use target_name */
+ lt_argv_zero = lt_extend_str (tmp_pathspec, target_name, 1);
+ XFREE (tmp_pathspec);
+ tmp_pathspec = NULL;
+EOF
+
+ case $host_os in
+ mingw*)
+ cat <<"EOF"
+ {
+ char* p;
+ while ((p = strchr (newargz[0], '\\')) != NULL)
+ {
+ *p = '/';
+ }
+ while ((p = strchr (lt_argv_zero, '\\')) != NULL)
+ {
+ *p = '/';
+ }
+ }
+EOF
+ ;;
+ esac
+
+ cat <<"EOF"
+ XFREE (target_name);
+ XFREE (actual_cwrapper_path);
+ XFREE (actual_cwrapper_name);
+
+ lt_setenv ("BIN_SH", "xpg4"); /* for Tru64 */
+ lt_setenv ("DUALCASE", "1"); /* for MSK sh */
+ /* Update the DLL searchpath. EXE_PATH_VALUE ($dllsearchpath) must
+ be prepended before (that is, appear after) LIB_PATH_VALUE ($temp_rpath)
+ because on Windows, both *_VARNAMEs are PATH but uninstalled
+ libraries must come first. */
+ lt_update_exe_path (EXE_PATH_VARNAME, EXE_PATH_VALUE);
+ lt_update_lib_path (LIB_PATH_VARNAME, LIB_PATH_VALUE);
+
+ lt_debugprintf (__FILE__, __LINE__, "(main) lt_argv_zero: %s\n",
+ nonnull (lt_argv_zero));
+ for (i = 0; i < newargc; i++)
+ {
+ lt_debugprintf (__FILE__, __LINE__, "(main) newargz[%d]: %s\n",
+ i, nonnull (newargz[i]));
+ }
+
+EOF
+
+ case $host_os in
+ mingw*)
+ cat <<"EOF"
+ /* execv doesn't actually work on mingw as expected on unix */
+ newargz = prepare_spawn (newargz);
+ rval = _spawnv (_P_WAIT, lt_argv_zero, (const char * const *) newargz);
+ if (rval == -1)
+ {
+ /* failed to start process */
+ lt_debugprintf (__FILE__, __LINE__,
+ "(main) failed to launch target \"%s\": %s\n",
+ lt_argv_zero, nonnull (strerror (errno)));
+ return 127;
+ }
+ return rval;
+EOF
+ ;;
+ *)
+ cat <<"EOF"
+ execv (lt_argv_zero, newargz);
+ return rval; /* =127, but avoids unused variable warning */
+EOF
+ ;;
+ esac
+
+ cat <<"EOF"
+}
+
+void *
+xmalloc (size_t num)
+{
+ void *p = (void *) malloc (num);
+ if (!p)
+ lt_fatal (__FILE__, __LINE__, "memory exhausted");
+
+ return p;
+}
+
+char *
+xstrdup (const char *string)
+{
+ return string ? strcpy ((char *) xmalloc (strlen (string) + 1),
+ string) : NULL;
+}
+
+const char *
+base_name (const char *name)
+{
+ const char *base;
+
+#if defined (HAVE_DOS_BASED_FILE_SYSTEM)
+ /* Skip over the disk name in MSDOS pathnames. */
+ if (isalpha ((unsigned char) name[0]) && name[1] == ':')
+ name += 2;
+#endif
+
+ for (base = name; *name; name++)
+ if (IS_DIR_SEPARATOR (*name))
+ base = name + 1;
+ return base;
+}
+
+int
+check_executable (const char *path)
+{
+ struct stat st;
+
+ lt_debugprintf (__FILE__, __LINE__, "(check_executable): %s\n",
+ nonempty (path));
+ if ((!path) || (!*path))
+ return 0;
+
+ if ((stat (path, &st) >= 0)
+ && (st.st_mode & (S_IXUSR | S_IXGRP | S_IXOTH)))
+ return 1;
+ else
+ return 0;
+}
+
+int
+make_executable (const char *path)
+{
+ int rval = 0;
+ struct stat st;
+
+ lt_debugprintf (__FILE__, __LINE__, "(make_executable): %s\n",
+ nonempty (path));
+ if ((!path) || (!*path))
+ return 0;
+
+ if (stat (path, &st) >= 0)
+ {
+ rval = chmod (path, st.st_mode | S_IXOTH | S_IXGRP | S_IXUSR);
+ }
+ return rval;
+}
+
+/* Searches for the full path of the wrapper. Returns
+ newly allocated full path name if found, NULL otherwise
+ Does not chase symlinks, even on platforms that support them.
+*/
+char *
+find_executable (const char *wrapper)
+{
+ int has_slash = 0;
+ const char *p;
+ const char *p_next;
+ /* static buffer for getcwd */
+ char tmp[LT_PATHMAX + 1];
+ int tmp_len;
+ char *concat_name;
+
+ lt_debugprintf (__FILE__, __LINE__, "(find_executable): %s\n",
+ nonempty (wrapper));
+
+ if ((wrapper == NULL) || (*wrapper == '\0'))
+ return NULL;
+
+ /* Absolute path? */
+#if defined (HAVE_DOS_BASED_FILE_SYSTEM)
+ if (isalpha ((unsigned char) wrapper[0]) && wrapper[1] == ':')
+ {
+ concat_name = xstrdup (wrapper);
+ if (check_executable (concat_name))
+ return concat_name;
+ XFREE (concat_name);
+ }
+ else
+ {
+#endif
+ if (IS_DIR_SEPARATOR (wrapper[0]))
+ {
+ concat_name = xstrdup (wrapper);
+ if (check_executable (concat_name))
+ return concat_name;
+ XFREE (concat_name);
+ }
+#if defined (HAVE_DOS_BASED_FILE_SYSTEM)
+ }
+#endif
+
+ for (p = wrapper; *p; p++)
+ if (*p == '/')
+ {
+ has_slash = 1;
+ break;
+ }
+ if (!has_slash)
+ {
+ /* no slashes; search PATH */
+ const char *path = getenv ("PATH");
+ if (path != NULL)
+ {
+ for (p = path; *p; p = p_next)
+ {
+ const char *q;
+ size_t p_len;
+ for (q = p; *q; q++)
+ if (IS_PATH_SEPARATOR (*q))
+ break;
+ p_len = q - p;
+ p_next = (*q == '\0' ? q : q + 1);
+ if (p_len == 0)
+ {
+ /* empty path: current directory */
+ if (getcwd (tmp, LT_PATHMAX) == NULL)
+ lt_fatal (__FILE__, __LINE__, "getcwd failed: %s",
+ nonnull (strerror (errno)));
+ tmp_len = strlen (tmp);
+ concat_name =
+ XMALLOC (char, tmp_len + 1 + strlen (wrapper) + 1);
+ memcpy (concat_name, tmp, tmp_len);
+ concat_name[tmp_len] = '/';
+ strcpy (concat_name + tmp_len + 1, wrapper);
+ }
+ else
+ {
+ concat_name =
+ XMALLOC (char, p_len + 1 + strlen (wrapper) + 1);
+ memcpy (concat_name, p, p_len);
+ concat_name[p_len] = '/';
+ strcpy (concat_name + p_len + 1, wrapper);
+ }
+ if (check_executable (concat_name))
+ return concat_name;
+ XFREE (concat_name);
+ }
+ }
+ /* not found in PATH; assume curdir */
+ }
+ /* Relative path | not found in path: prepend cwd */
+ if (getcwd (tmp, LT_PATHMAX) == NULL)
+ lt_fatal (__FILE__, __LINE__, "getcwd failed: %s",
+ nonnull (strerror (errno)));
+ tmp_len = strlen (tmp);
+ concat_name = XMALLOC (char, tmp_len + 1 + strlen (wrapper) + 1);
+ memcpy (concat_name, tmp, tmp_len);
+ concat_name[tmp_len] = '/';
+ strcpy (concat_name + tmp_len + 1, wrapper);
+
+ if (check_executable (concat_name))
+ return concat_name;
+ XFREE (concat_name);
+ return NULL;
+}
+
+char *
+chase_symlinks (const char *pathspec)
+{
+#ifndef S_ISLNK
+ return xstrdup (pathspec);
+#else
+ char buf[LT_PATHMAX];
+ struct stat s;
+ char *tmp_pathspec = xstrdup (pathspec);
+ char *p;
+ int has_symlinks = 0;
+ while (strlen (tmp_pathspec) && !has_symlinks)
+ {
+ lt_debugprintf (__FILE__, __LINE__,
+ "checking path component for symlinks: %s\n",
+ tmp_pathspec);
+ if (lstat (tmp_pathspec, &s) == 0)
+ {
+ if (S_ISLNK (s.st_mode) != 0)
+ {
+ has_symlinks = 1;
+ break;
+ }
+
+ /* search backwards for last DIR_SEPARATOR */
+ p = tmp_pathspec + strlen (tmp_pathspec) - 1;
+ while ((p > tmp_pathspec) && (!IS_DIR_SEPARATOR (*p)))
+ p--;
+ if ((p == tmp_pathspec) && (!IS_DIR_SEPARATOR (*p)))
+ {
+ /* no more DIR_SEPARATORS left */
+ break;
+ }
+ *p = '\0';
+ }
+ else
+ {
+ lt_fatal (__FILE__, __LINE__,
+ "error accessing file \"%s\": %s",
+ tmp_pathspec, nonnull (strerror (errno)));
+ }
+ }
+ XFREE (tmp_pathspec);
+
+ if (!has_symlinks)
+ {
+ return xstrdup (pathspec);
+ }
+
+ tmp_pathspec = realpath (pathspec, buf);
+ if (tmp_pathspec == 0)
+ {
+ lt_fatal (__FILE__, __LINE__,
+ "could not follow symlinks for %s", pathspec);
+ }
+ return xstrdup (tmp_pathspec);
+#endif
+}
+
+char *
+strendzap (char *str, const char *pat)
+{
+ size_t len, patlen;
+
+ assert (str != NULL);
+ assert (pat != NULL);
+
+ len = strlen (str);
+ patlen = strlen (pat);
+
+ if (patlen <= len)
+ {
+ str += len - patlen;
+ if (strcmp (str, pat) == 0)
+ *str = '\0';
+ }
+ return str;
+}
+
+void
+lt_debugprintf (const char *file, int line, const char *fmt, ...)
+{
+ va_list args;
+ if (lt_debug)
+ {
+ (void) fprintf (stderr, "%s:%s:%d: ", program_name, file, line);
+ va_start (args, fmt);
+ (void) vfprintf (stderr, fmt, args);
+ va_end (args);
+ }
+}
+
+static void
+lt_error_core (int exit_status, const char *file,
+ int line, const char *mode,
+ const char *message, va_list ap)
+{
+ fprintf (stderr, "%s:%s:%d: %s: ", program_name, file, line, mode);
+ vfprintf (stderr, message, ap);
+ fprintf (stderr, ".\n");
+
+ if (exit_status >= 0)
+ exit (exit_status);
+}
+
+void
+lt_fatal (const char *file, int line, const char *message, ...)
+{
+ va_list ap;
+ va_start (ap, message);
+ lt_error_core (EXIT_FAILURE, file, line, "FATAL", message, ap);
+ va_end (ap);
+}
+
+static const char *
+nonnull (const char *s)
+{
+ return s ? s : "(null)";
+}
+
+static const char *
+nonempty (const char *s)
+{
+ return (s && !*s) ? "(empty)" : nonnull (s);
+}
+
+void
+lt_setenv (const char *name, const char *value)
+{
+ lt_debugprintf (__FILE__, __LINE__,
+ "(lt_setenv) setting '%s' to '%s'\n",
+ nonnull (name), nonnull (value));
+ {
+#ifdef HAVE_SETENV
+ /* always make a copy, for consistency with !HAVE_SETENV */
+ char *str = xstrdup (value);
+ setenv (name, str, 1);
+#else
+ int len = strlen (name) + 1 + strlen (value) + 1;
+ char *str = XMALLOC (char, len);
+ sprintf (str, "%s=%s", name, value);
+ if (putenv (str) != EXIT_SUCCESS)
+ {
+ XFREE (str);
+ }
+#endif
+ }
+}
+
+char *
+lt_extend_str (const char *orig_value, const char *add, int to_end)
+{
+ char *new_value;
+ if (orig_value && *orig_value)
+ {
+ int orig_value_len = strlen (orig_value);
+ int add_len = strlen (add);
+ new_value = XMALLOC (char, add_len + orig_value_len + 1);
+ if (to_end)
+ {
+ strcpy (new_value, orig_value);
+ strcpy (new_value + orig_value_len, add);
+ }
+ else
+ {
+ strcpy (new_value, add);
+ strcpy (new_value + add_len, orig_value);
+ }
+ }
+ else
+ {
+ new_value = xstrdup (add);
+ }
+ return new_value;
+}
+
+void
+lt_update_exe_path (const char *name, const char *value)
+{
+ lt_debugprintf (__FILE__, __LINE__,
+ "(lt_update_exe_path) modifying '%s' by prepending '%s'\n",
+ nonnull (name), nonnull (value));
+
+ if (name && *name && value && *value)
+ {
+ char *new_value = lt_extend_str (getenv (name), value, 0);
+ /* some systems can't cope with a ':'-terminated path #' */
+ int len = strlen (new_value);
+ while (((len = strlen (new_value)) > 0) && IS_PATH_SEPARATOR (new_value[len-1]))
+ {
+ new_value[len-1] = '\0';
+ }
+ lt_setenv (name, new_value);
+ XFREE (new_value);
+ }
+}
+
+void
+lt_update_lib_path (const char *name, const char *value)
+{
+ lt_debugprintf (__FILE__, __LINE__,
+ "(lt_update_lib_path) modifying '%s' by prepending '%s'\n",
+ nonnull (name), nonnull (value));
+
+ if (name && *name && value && *value)
+ {
+ char *new_value = lt_extend_str (getenv (name), value, 0);
+ lt_setenv (name, new_value);
+ XFREE (new_value);
+ }
+}
+
+EOF
+ case $host_os in
+ mingw*)
+ cat <<"EOF"
+
+/* Prepares an argument vector before calling spawn().
+ Note that spawn() does not by itself call the command interpreter
+ (getenv ("COMSPEC") != NULL ? getenv ("COMSPEC") :
+ ({ OSVERSIONINFO v; v.dwOSVersionInfoSize = sizeof(OSVERSIONINFO);
+ GetVersionEx(&v);
+ v.dwPlatformId == VER_PLATFORM_WIN32_NT;
+ }) ? "cmd.exe" : "command.com").
+ Instead it simply concatenates the arguments, separated by ' ', and calls
+ CreateProcess(). We must quote the arguments since Win32 CreateProcess()
+ interprets characters like ' ', '\t', '\\', '"' (but not '<' and '>') in a
+ special way:
+ - Space and tab are interpreted as delimiters. They are not treated as
+ delimiters if they are surrounded by double quotes: "...".
+ - Unescaped double quotes are removed from the input. Their only effect is
+ that within double quotes, space and tab are treated like normal
+ characters.
+ - Backslashes not followed by double quotes are not special.
+ - But 2*n+1 backslashes followed by a double quote become
+ n backslashes followed by a double quote (n >= 0):
+ \" -> "
+ \\\" -> \"
+ \\\\\" -> \\"
+ */
+#define SHELL_SPECIAL_CHARS "\"\\ \001\002\003\004\005\006\007\010\011\012\013\014\015\016\017\020\021\022\023\024\025\026\027\030\031\032\033\034\035\036\037"
+#define SHELL_SPACE_CHARS " \001\002\003\004\005\006\007\010\011\012\013\014\015\016\017\020\021\022\023\024\025\026\027\030\031\032\033\034\035\036\037"
+char **
+prepare_spawn (char **argv)
+{
+ size_t argc;
+ char **new_argv;
+ size_t i;
+
+ /* Count number of arguments. */
+ for (argc = 0; argv[argc] != NULL; argc++)
+ ;
+
+ /* Allocate new argument vector. */
+ new_argv = XMALLOC (char *, argc + 1);
+
+ /* Put quoted arguments into the new argument vector. */
+ for (i = 0; i < argc; i++)
+ {
+ const char *string = argv[i];
+
+ if (string[0] == '\0')
+ new_argv[i] = xstrdup ("\"\"");
+ else if (strpbrk (string, SHELL_SPECIAL_CHARS) != NULL)
+ {
+ int quote_around = (strpbrk (string, SHELL_SPACE_CHARS) != NULL);
+ size_t length;
+ unsigned int backslashes;
+ const char *s;
+ char *quoted_string;
+ char *p;
+
+ length = 0;
+ backslashes = 0;
+ if (quote_around)
+ length++;
+ for (s = string; *s != '\0'; s++)
+ {
+ char c = *s;
+ if (c == '"')
+ length += backslashes + 1;
+ length++;
+ if (c == '\\')
+ backslashes++;
+ else
+ backslashes = 0;
+ }
+ if (quote_around)
+ length += backslashes + 1;
+
+ quoted_string = XMALLOC (char, length + 1);
+
+ p = quoted_string;
+ backslashes = 0;
+ if (quote_around)
+ *p++ = '"';
+ for (s = string; *s != '\0'; s++)
+ {
+ char c = *s;
+ if (c == '"')
+ {
+ unsigned int j;
+ for (j = backslashes + 1; j > 0; j--)
+ *p++ = '\\';
+ }
+ *p++ = c;
+ if (c == '\\')
+ backslashes++;
+ else
+ backslashes = 0;
+ }
+ if (quote_around)
+ {
+ unsigned int j;
+ for (j = backslashes; j > 0; j--)
+ *p++ = '\\';
+ *p++ = '"';
+ }
+ *p = '\0';
+
+ new_argv[i] = quoted_string;
+ }
+ else
+ new_argv[i] = (char *) string;
+ }
+ new_argv[argc] = NULL;
+
+ return new_argv;
+}
+EOF
+ ;;
+ esac
+
+ cat <<"EOF"
+void lt_dump_script (FILE* f)
+{
+EOF
+ func_emit_wrapper yes |
+ $SED -e 's/\([\\"]\)/\\\1/g' \
+ -e 's/^/ fputs ("/' -e 's/$/\\n", f);/'
+
+ cat <<"EOF"
+}
+EOF
+}
+# end: func_emit_cwrapperexe_src
+
+# func_win32_import_lib_p ARG
+# True if ARG is an import lib, as indicated by $file_magic_cmd
+func_win32_import_lib_p ()
+{
+ $opt_debug
+ case `eval $file_magic_cmd \"\$1\" 2>/dev/null | $SED -e 10q` in
+ *import*) : ;;
+ *) false ;;
+ esac
+}
+
+# func_mode_link arg...
+func_mode_link ()
+{
+ $opt_debug
+ case $host in
+ *-*-cygwin* | *-*-mingw* | *-*-pw32* | *-*-os2* | *-cegcc*)
+ # It is impossible to link a dll without this setting, and
+ # we shouldn't force the makefile maintainer to figure out
+ # which system we are compiling for in order to pass an extra
+ # flag for every libtool invocation.
+ # allow_undefined=no
+
+ # FIXME: Unfortunately, there are problems with the above when trying
+ # to make a dll which has undefined symbols, in which case not
+ # even a static library is built. For now, we need to specify
+ # -no-undefined on the libtool link line when we can be certain
+ # that all symbols are satisfied, otherwise we get a static library.
+ allow_undefined=yes
+ ;;
+ *)
+ allow_undefined=yes
+ ;;
+ esac
+ libtool_args=$nonopt
+ base_compile="$nonopt $@"
+ compile_command=$nonopt
+ finalize_command=$nonopt
+
+ compile_rpath=
+ finalize_rpath=
+ compile_shlibpath=
+ finalize_shlibpath=
+ convenience=
+ old_convenience=
+ deplibs=
+ old_deplibs=
+ compiler_flags=
+ linker_flags=
+ dllsearchpath=
+ lib_search_path=`pwd`
+ inst_prefix_dir=
+ new_inherited_linker_flags=
+
+ avoid_version=no
+ bindir=
+ dlfiles=
+ dlprefiles=
+ dlself=no
+ export_dynamic=no
+ export_symbols=
+ export_symbols_regex=
+ generated=
+ libobjs=
+ ltlibs=
+ module=no
+ no_install=no
+ objs=
+ non_pic_objects=
+ precious_files_regex=
+ prefer_static_libs=no
+ preload=no
+ prev=
+ prevarg=
+ release=
+ rpath=
+ xrpath=
+ perm_rpath=
+ temp_rpath=
+ thread_safe=no
+ vinfo=
+ vinfo_number=no
+ weak_libs=
+ single_module="${wl}-single_module"
+ func_infer_tag $base_compile
+
+ # We need to know -static, to get the right output filenames.
+ for arg
+ do
+ case $arg in
+ -shared)
+ test "$build_libtool_libs" != yes && \
+ func_fatal_configuration "can not build a shared library"
+ build_old_libs=no
+ break
+ ;;
+ -all-static | -static | -static-libtool-libs)
+ case $arg in
+ -all-static)
+ if test "$build_libtool_libs" = yes && test -z "$link_static_flag"; then
+ func_warning "complete static linking is impossible in this configuration"
+ fi
+ if test -n "$link_static_flag"; then
+ dlopen_self=$dlopen_self_static
+ fi
+ prefer_static_libs=yes
+ ;;
+ -static)
+ if test -z "$pic_flag" && test -n "$link_static_flag"; then
+ dlopen_self=$dlopen_self_static
+ fi
+ prefer_static_libs=built
+ ;;
+ -static-libtool-libs)
+ if test -z "$pic_flag" && test -n "$link_static_flag"; then
+ dlopen_self=$dlopen_self_static
+ fi
+ prefer_static_libs=yes
+ ;;
+ esac
+ build_libtool_libs=no
+ build_old_libs=yes
+ break
+ ;;
+ esac
+ done
+
+ # See if our shared archives depend on static archives.
+ test -n "$old_archive_from_new_cmds" && build_old_libs=yes
+
+ # Go through the arguments, transforming them on the way.
+ while test "$#" -gt 0; do
+ arg="$1"
+ shift
+ func_quote_for_eval "$arg"
+ qarg=$func_quote_for_eval_unquoted_result
+ func_append libtool_args " $func_quote_for_eval_result"
+
+ # If the previous option needs an argument, assign it.
+ if test -n "$prev"; then
+ case $prev in
+ output)
+ func_append compile_command " @OUTPUT@"
+ func_append finalize_command " @OUTPUT@"
+ ;;
+ esac
+
+ case $prev in
+ bindir)
+ bindir="$arg"
+ prev=
+ continue
+ ;;
+ dlfiles|dlprefiles)
+ if test "$preload" = no; then
+ # Add the symbol object into the linking commands.
+ func_append compile_command " @SYMFILE@"
+ func_append finalize_command " @SYMFILE@"
+ preload=yes
+ fi
+ case $arg in
+ *.la | *.lo) ;; # We handle these cases below.
+ force)
+ if test "$dlself" = no; then
+ dlself=needless
+ export_dynamic=yes
+ fi
+ prev=
+ continue
+ ;;
+ self)
+ if test "$prev" = dlprefiles; then
+ dlself=yes
+ elif test "$prev" = dlfiles && test "$dlopen_self" != yes; then
+ dlself=yes
+ else
+ dlself=needless
+ export_dynamic=yes
+ fi
+ prev=
+ continue
+ ;;
+ *)
+ if test "$prev" = dlfiles; then
+ func_append dlfiles " $arg"
+ else
+ func_append dlprefiles " $arg"
+ fi
+ prev=
+ continue
+ ;;
+ esac
+ ;;
+ expsyms)
+ export_symbols="$arg"
+ test -f "$arg" \
+ || func_fatal_error "symbol file \`$arg' does not exist"
+ prev=
+ continue
+ ;;
+ expsyms_regex)
+ export_symbols_regex="$arg"
+ prev=
+ continue
+ ;;
+ framework)
+ case $host in
+ *-*-darwin*)
+ case "$deplibs " in
+ *" $qarg.ltframework "*) ;;
+ *) func_append deplibs " $qarg.ltframework" # this is fixed later
+ ;;
+ esac
+ ;;
+ esac
+ prev=
+ continue
+ ;;
+ inst_prefix)
+ inst_prefix_dir="$arg"
+ prev=
+ continue
+ ;;
+ objectlist)
+ if test -f "$arg"; then
+ save_arg=$arg
+ moreargs=
+ for fil in `cat "$save_arg"`
+ do
+# func_append moreargs " $fil"
+ arg=$fil
+ # A libtool-controlled object.
+
+ # Check to see that this really is a libtool object.
+ if func_lalib_unsafe_p "$arg"; then
+ pic_object=
+ non_pic_object=
+
+ # Read the .lo file
+ func_source "$arg"
+
+ if test -z "$pic_object" ||
+ test -z "$non_pic_object" ||
+ test "$pic_object" = none &&
+ test "$non_pic_object" = none; then
+ func_fatal_error "cannot find name of object for \`$arg'"
+ fi
+
+ # Extract subdirectory from the argument.
+ func_dirname "$arg" "/" ""
+ xdir="$func_dirname_result"
+
+ if test "$pic_object" != none; then
+ # Prepend the subdirectory the object is found in.
+ pic_object="$xdir$pic_object"
+
+ if test "$prev" = dlfiles; then
+ if test "$build_libtool_libs" = yes && test "$dlopen_support" = yes; then
+ func_append dlfiles " $pic_object"
+ prev=
+ continue
+ else
+ # If libtool objects are unsupported, then we need to preload.
+ prev=dlprefiles
+ fi
+ fi
+
+ # CHECK ME: I think I busted this. -Ossama
+ if test "$prev" = dlprefiles; then
+ # Preload the old-style object.
+ func_append dlprefiles " $pic_object"
+ prev=
+ fi
+
+ # A PIC object.
+ func_append libobjs " $pic_object"
+ arg="$pic_object"
+ fi
+
+ # Non-PIC object.
+ if test "$non_pic_object" != none; then
+ # Prepend the subdirectory the object is found in.
+ non_pic_object="$xdir$non_pic_object"
+
+ # A standard non-PIC object
+ func_append non_pic_objects " $non_pic_object"
+ if test -z "$pic_object" || test "$pic_object" = none ; then
+ arg="$non_pic_object"
+ fi
+ else
+ # If the PIC object exists, use it instead.
+ # $xdir was prepended to $pic_object above.
+ non_pic_object="$pic_object"
+ func_append non_pic_objects " $non_pic_object"
+ fi
+ else
+ # Only an error if not doing a dry-run.
+ if $opt_dry_run; then
+ # Extract subdirectory from the argument.
+ func_dirname "$arg" "/" ""
+ xdir="$func_dirname_result"
+
+ func_lo2o "$arg"
+ pic_object=$xdir$objdir/$func_lo2o_result
+ non_pic_object=$xdir$func_lo2o_result
+ func_append libobjs " $pic_object"
+ func_append non_pic_objects " $non_pic_object"
+ else
+ func_fatal_error "\`$arg' is not a valid libtool object"
+ fi
+ fi
+ done
+ else
+ func_fatal_error "link input file \`$arg' does not exist"
+ fi
+ arg=$save_arg
+ prev=
+ continue
+ ;;
+ precious_regex)
+ precious_files_regex="$arg"
+ prev=
+ continue
+ ;;
+ release)
+ release="-$arg"
+ prev=
+ continue
+ ;;
+ rpath | xrpath)
+ # We need an absolute path.
+ case $arg in
+ [\\/]* | [A-Za-z]:[\\/]*) ;;
+ *)
+ func_fatal_error "only absolute run-paths are allowed"
+ ;;
+ esac
+ if test "$prev" = rpath; then
+ case "$rpath " in
+ *" $arg "*) ;;
+ *) func_append rpath " $arg" ;;
+ esac
+ else
+ case "$xrpath " in
+ *" $arg "*) ;;
+ *) func_append xrpath " $arg" ;;
+ esac
+ fi
+ prev=
+ continue
+ ;;
+ shrext)
+ shrext_cmds="$arg"
+ prev=
+ continue
+ ;;
+ weak)
+ func_append weak_libs " $arg"
+ prev=
+ continue
+ ;;
+ xcclinker)
+ func_append linker_flags " $qarg"
+ func_append compiler_flags " $qarg"
+ prev=
+ func_append compile_command " $qarg"
+ func_append finalize_command " $qarg"
+ continue
+ ;;
+ xcompiler)
+ func_append compiler_flags " $qarg"
+ prev=
+ func_append compile_command " $qarg"
+ func_append finalize_command " $qarg"
+ continue
+ ;;
+ xlinker)
+ func_append linker_flags " $qarg"
+ func_append compiler_flags " $wl$qarg"
+ prev=
+ func_append compile_command " $wl$qarg"
+ func_append finalize_command " $wl$qarg"
+ continue
+ ;;
+ *)
+ eval "$prev=\"\$arg\""
+ prev=
+ continue
+ ;;
+ esac
+ fi # test -n "$prev"
+
+ prevarg="$arg"
+
+ case $arg in
+ -all-static)
+ if test -n "$link_static_flag"; then
+ # See comment for -static flag below, for more details.
+ func_append compile_command " $link_static_flag"
+ func_append finalize_command " $link_static_flag"
+ fi
+ continue
+ ;;
+
+ -allow-undefined)
+ # FIXME: remove this flag sometime in the future.
+ func_fatal_error "\`-allow-undefined' must not be used because it is the default"
+ ;;
+
+ -avoid-version)
+ avoid_version=yes
+ continue
+ ;;
+
+ -bindir)
+ prev=bindir
+ continue
+ ;;
+
+ -dlopen)
+ prev=dlfiles
+ continue
+ ;;
+
+ -dlpreopen)
+ prev=dlprefiles
+ continue
+ ;;
+
+ -export-dynamic)
+ export_dynamic=yes
+ continue
+ ;;
+
+ -export-symbols | -export-symbols-regex)
+ if test -n "$export_symbols" || test -n "$export_symbols_regex"; then
+ func_fatal_error "more than one -exported-symbols argument is not allowed"
+ fi
+ if test "X$arg" = "X-export-symbols"; then
+ prev=expsyms
+ else
+ prev=expsyms_regex
+ fi
+ continue
+ ;;
+
+ -framework)
+ prev=framework
+ continue
+ ;;
+
+ -inst-prefix-dir)
+ prev=inst_prefix
+ continue
+ ;;
+
+ # The native IRIX linker understands -LANG:*, -LIST:* and -LNO:*
+ # so, if we see these flags be careful not to treat them like -L
+ -L[A-Z][A-Z]*:*)
+ case $with_gcc/$host in
+ no/*-*-irix* | /*-*-irix*)
+ func_append compile_command " $arg"
+ func_append finalize_command " $arg"
+ ;;
+ esac
+ continue
+ ;;
+
+ -L*)
+ func_stripname "-L" '' "$arg"
+ if test -z "$func_stripname_result"; then
+ if test "$#" -gt 0; then
+ func_fatal_error "require no space between \`-L' and \`$1'"
+ else
+ func_fatal_error "need path for \`-L' option"
+ fi
+ fi
+ func_resolve_sysroot "$func_stripname_result"
+ dir=$func_resolve_sysroot_result
+ # We need an absolute path.
+ case $dir in
+ [\\/]* | [A-Za-z]:[\\/]*) ;;
+ *)
+ absdir=`cd "$dir" && pwd`
+ test -z "$absdir" && \
+ func_fatal_error "cannot determine absolute directory name of \`$dir'"
+ dir="$absdir"
+ ;;
+ esac
+ case "$deplibs " in
+ *" -L$dir "* | *" $arg "*)
+ # Will only happen for absolute or sysroot arguments
+ ;;
+ *)
+ # Preserve sysroot, but never include relative directories
+ case $dir in
+ [\\/]* | [A-Za-z]:[\\/]* | =*) func_append deplibs " $arg" ;;
+ *) func_append deplibs " -L$dir" ;;
+ esac
+ func_append lib_search_path " $dir"
+ ;;
+ esac
+ case $host in
+ *-*-cygwin* | *-*-mingw* | *-*-pw32* | *-*-os2* | *-cegcc*)
+ testbindir=`$ECHO "$dir" | $SED 's*/lib$*/bin*'`
+ case :$dllsearchpath: in
+ *":$dir:"*) ;;
+ ::) dllsearchpath=$dir;;
+ *) func_append dllsearchpath ":$dir";;
+ esac
+ case :$dllsearchpath: in
+ *":$testbindir:"*) ;;
+ ::) dllsearchpath=$testbindir;;
+ *) func_append dllsearchpath ":$testbindir";;
+ esac
+ ;;
+ esac
+ continue
+ ;;
+
+ -l*)
+ if test "X$arg" = "X-lc" || test "X$arg" = "X-lm"; then
+ case $host in
+ *-*-cygwin* | *-*-mingw* | *-*-pw32* | *-*-beos* | *-cegcc* | *-*-haiku*)
+ # These systems don't actually have a C or math library (as such)
+ continue
+ ;;
+ *-*-os2*)
+ # These systems don't actually have a C library (as such)
+ test "X$arg" = "X-lc" && continue
+ ;;
+ *-*-openbsd* | *-*-freebsd* | *-*-dragonfly*)
+ # Do not include libc due to us having libc/libc_r.
+ test "X$arg" = "X-lc" && continue
+ ;;
+ *-*-rhapsody* | *-*-darwin1.[012])
+ # Rhapsody C and math libraries are in the System framework
+ func_append deplibs " System.ltframework"
+ continue
+ ;;
+ *-*-sco3.2v5* | *-*-sco5v6*)
+ # Causes problems with __ctype
+ test "X$arg" = "X-lc" && continue
+ ;;
+ *-*-sysv4.2uw2* | *-*-sysv5* | *-*-unixware* | *-*-OpenUNIX*)
+ # Compiler inserts libc in the correct place for threads to work
+ test "X$arg" = "X-lc" && continue
+ ;;
+ esac
+ elif test "X$arg" = "X-lc_r"; then
+ case $host in
+ *-*-openbsd* | *-*-freebsd* | *-*-dragonfly*)
+ # Do not include libc_r directly, use -pthread flag.
+ continue
+ ;;
+ esac
+ fi
+ func_append deplibs " $arg"
+ continue
+ ;;
+
+ -module)
+ module=yes
+ continue
+ ;;
+
+ # Tru64 UNIX uses -model [arg] to determine the layout of C++
+ # classes, name mangling, and exception handling.
+ # Darwin uses the -arch flag to determine output architecture.
+ -model|-arch|-isysroot|--sysroot)
+ func_append compiler_flags " $arg"
+ func_append compile_command " $arg"
+ func_append finalize_command " $arg"
+ prev=xcompiler
+ continue
+ ;;
+
+ -mt|-mthreads|-kthread|-Kthread|-pthread|-pthreads|--thread-safe|-threads)
+ func_append compiler_flags " $arg"
+ func_append compile_command " $arg"
+ func_append finalize_command " $arg"
+ case "$new_inherited_linker_flags " in
+ *" $arg "*) ;;
+ * ) func_append new_inherited_linker_flags " $arg" ;;
+ esac
+ continue
+ ;;
+
+ -multi_module)
+ single_module="${wl}-multi_module"
+ continue
+ ;;
+
+ -no-fast-install)
+ fast_install=no
+ continue
+ ;;
+
+ -no-install)
+ case $host in
+ *-*-cygwin* | *-*-mingw* | *-*-pw32* | *-*-os2* | *-*-darwin* | *-cegcc*)
+ # The PATH hackery in wrapper scripts is required on Windows
+ # and Darwin in order for the loader to find any dlls it needs.
+ func_warning "\`-no-install' is ignored for $host"
+ func_warning "assuming \`-no-fast-install' instead"
+ fast_install=no
+ ;;
+ *) no_install=yes ;;
+ esac
+ continue
+ ;;
+
+ -no-undefined)
+ allow_undefined=no
+ continue
+ ;;
+
+ -objectlist)
+ prev=objectlist
+ continue
+ ;;
+
+ -o) prev=output ;;
+
+ -precious-files-regex)
+ prev=precious_regex
+ continue
+ ;;
+
+ -release)
+ prev=release
+ continue
+ ;;
+
+ -rpath)
+ prev=rpath
+ continue
+ ;;
+
+ -R)
+ prev=xrpath
+ continue
+ ;;
+
+ -R*)
+ func_stripname '-R' '' "$arg"
+ dir=$func_stripname_result
+ # We need an absolute path.
+ case $dir in
+ [\\/]* | [A-Za-z]:[\\/]*) ;;
+ =*)
+ func_stripname '=' '' "$dir"
+ dir=$lt_sysroot$func_stripname_result
+ ;;
+ *)
+ func_fatal_error "only absolute run-paths are allowed"
+ ;;
+ esac
+ case "$xrpath " in
+ *" $dir "*) ;;
+ *) func_append xrpath " $dir" ;;
+ esac
+ continue
+ ;;
+
+ -shared)
+ # The effects of -shared are defined in a previous loop.
+ continue
+ ;;
+
+ -shrext)
+ prev=shrext
+ continue
+ ;;
+
+ -static | -static-libtool-libs)
+ # The effects of -static are defined in a previous loop.
+ # We used to do the same as -all-static on platforms that
+ # didn't have a PIC flag, but the assumption that the effects
+ # would be equivalent was wrong. It would break on at least
+ # Digital Unix and AIX.
+ continue
+ ;;
+
+ -thread-safe)
+ thread_safe=yes
+ continue
+ ;;
+
+ -version-info)
+ prev=vinfo
+ continue
+ ;;
+
+ -version-number)
+ prev=vinfo
+ vinfo_number=yes
+ continue
+ ;;
+
+ -weak)
+ prev=weak
+ continue
+ ;;
+
+ -Wc,*)
+ func_stripname '-Wc,' '' "$arg"
+ args=$func_stripname_result
+ arg=
+ save_ifs="$IFS"; IFS=','
+ for flag in $args; do
+ IFS="$save_ifs"
+ func_quote_for_eval "$flag"
+ func_append arg " $func_quote_for_eval_result"
+ func_append compiler_flags " $func_quote_for_eval_result"
+ done
+ IFS="$save_ifs"
+ func_stripname ' ' '' "$arg"
+ arg=$func_stripname_result
+ ;;
+
+ -Wl,*)
+ func_stripname '-Wl,' '' "$arg"
+ args=$func_stripname_result
+ arg=
+ save_ifs="$IFS"; IFS=','
+ for flag in $args; do
+ IFS="$save_ifs"
+ func_quote_for_eval "$flag"
+ func_append arg " $wl$func_quote_for_eval_result"
+ func_append compiler_flags " $wl$func_quote_for_eval_result"
+ func_append linker_flags " $func_quote_for_eval_result"
+ done
+ IFS="$save_ifs"
+ func_stripname ' ' '' "$arg"
+ arg=$func_stripname_result
+ ;;
+
+ -Xcompiler)
+ prev=xcompiler
+ continue
+ ;;
+
+ -Xlinker)
+ prev=xlinker
+ continue
+ ;;
+
+ -XCClinker)
+ prev=xcclinker
+ continue
+ ;;
+
+ # -msg_* for osf cc
+ -msg_*)
+ func_quote_for_eval "$arg"
+ arg="$func_quote_for_eval_result"
+ ;;
+
+ # Flags to be passed through unchanged, with rationale:
+ # -64, -mips[0-9] enable 64-bit mode for the SGI compiler
+ # -r[0-9][0-9]* specify processor for the SGI compiler
+ # -xarch=*, -xtarget=* enable 64-bit mode for the Sun compiler
+ # +DA*, +DD* enable 64-bit mode for the HP compiler
+ # -q* compiler args for the IBM compiler
+ # -m*, -t[45]*, -txscale* architecture-specific flags for GCC
+ # -F/path path to uninstalled frameworks, gcc on darwin
+ # -p, -pg, --coverage, -fprofile-* profiling flags for GCC
+ # @file GCC response files
+ # -tp=* Portland pgcc target processor selection
+ # --sysroot=* for sysroot support
+ # -O*, -flto*, -fwhopr*, -fuse-linker-plugin GCC link-time optimization
+ -64|-mips[0-9]|-r[0-9][0-9]*|-xarch=*|-xtarget=*|+DA*|+DD*|-q*|-m*| \
+ -t[45]*|-txscale*|-p|-pg|--coverage|-fprofile-*|-F*|@*|-tp=*|--sysroot=*| \
+ -O*|-flto*|-fwhopr*|-fuse-linker-plugin)
+ func_quote_for_eval "$arg"
+ arg="$func_quote_for_eval_result"
+ func_append compile_command " $arg"
+ func_append finalize_command " $arg"
+ func_append compiler_flags " $arg"
+ continue
+ ;;
+
+ # Some other compiler flag.
+ -* | +*)
+ func_quote_for_eval "$arg"
+ arg="$func_quote_for_eval_result"
+ ;;
+
+ *.$objext)
+ # A standard object.
+ func_append objs " $arg"
+ ;;
+
+ *.lo)
+ # A libtool-controlled object.
+
+ # Check to see that this really is a libtool object.
+ if func_lalib_unsafe_p "$arg"; then
+ pic_object=
+ non_pic_object=
+
+ # Read the .lo file
+ func_source "$arg"
+
+ if test -z "$pic_object" ||
+ test -z "$non_pic_object" ||
+ test "$pic_object" = none &&
+ test "$non_pic_object" = none; then
+ func_fatal_error "cannot find name of object for \`$arg'"
+ fi
+
+ # Extract subdirectory from the argument.
+ func_dirname "$arg" "/" ""
+ xdir="$func_dirname_result"
+
+ if test "$pic_object" != none; then
+ # Prepend the subdirectory the object is found in.
+ pic_object="$xdir$pic_object"
+
+ if test "$prev" = dlfiles; then
+ if test "$build_libtool_libs" = yes && test "$dlopen_support" = yes; then
+ func_append dlfiles " $pic_object"
+ prev=
+ continue
+ else
+ # If libtool objects are unsupported, then we need to preload.
+ prev=dlprefiles
+ fi
+ fi
+
+ # CHECK ME: I think I busted this. -Ossama
+ if test "$prev" = dlprefiles; then
+ # Preload the old-style object.
+ func_append dlprefiles " $pic_object"
+ prev=
+ fi
+
+ # A PIC object.
+ func_append libobjs " $pic_object"
+ arg="$pic_object"
+ fi
+
+ # Non-PIC object.
+ if test "$non_pic_object" != none; then
+ # Prepend the subdirectory the object is found in.
+ non_pic_object="$xdir$non_pic_object"
+
+ # A standard non-PIC object
+ func_append non_pic_objects " $non_pic_object"
+ if test -z "$pic_object" || test "$pic_object" = none ; then
+ arg="$non_pic_object"
+ fi
+ else
+ # If the PIC object exists, use it instead.
+ # $xdir was prepended to $pic_object above.
+ non_pic_object="$pic_object"
+ func_append non_pic_objects " $non_pic_object"
+ fi
+ else
+ # Only an error if not doing a dry-run.
+ if $opt_dry_run; then
+ # Extract subdirectory from the argument.
+ func_dirname "$arg" "/" ""
+ xdir="$func_dirname_result"
+
+ func_lo2o "$arg"
+ pic_object=$xdir$objdir/$func_lo2o_result
+ non_pic_object=$xdir$func_lo2o_result
+ func_append libobjs " $pic_object"
+ func_append non_pic_objects " $non_pic_object"
+ else
+ func_fatal_error "\`$arg' is not a valid libtool object"
+ fi
+ fi
+ ;;
+
+ *.$libext)
+ # An archive.
+ func_append deplibs " $arg"
+ func_append old_deplibs " $arg"
+ continue
+ ;;
+
+ *.la)
+ # A libtool-controlled library.
+
+ func_resolve_sysroot "$arg"
+ if test "$prev" = dlfiles; then
+ # This library was specified with -dlopen.
+ func_append dlfiles " $func_resolve_sysroot_result"
+ prev=
+ elif test "$prev" = dlprefiles; then
+ # The library was specified with -dlpreopen.
+ func_append dlprefiles " $func_resolve_sysroot_result"
+ prev=
+ else
+ func_append deplibs " $func_resolve_sysroot_result"
+ fi
+ continue
+ ;;
+
+ # Some other compiler argument.
+ *)
+ # Unknown arguments in both finalize_command and compile_command need
+ # to be aesthetically quoted because they are evaled later.
+ func_quote_for_eval "$arg"
+ arg="$func_quote_for_eval_result"
+ ;;
+ esac # arg
+
+ # Now actually substitute the argument into the commands.
+ if test -n "$arg"; then
+ func_append compile_command " $arg"
+ func_append finalize_command " $arg"
+ fi
+ done # argument parsing loop
+
+ test -n "$prev" && \
+ func_fatal_help "the \`$prevarg' option requires an argument"
+
+ if test "$export_dynamic" = yes && test -n "$export_dynamic_flag_spec"; then
+ eval arg=\"$export_dynamic_flag_spec\"
+ func_append compile_command " $arg"
+ func_append finalize_command " $arg"
+ fi
+
+ oldlibs=
+ # calculate the name of the file, without its directory
+ func_basename "$output"
+ outputname="$func_basename_result"
+ libobjs_save="$libobjs"
+
+ if test -n "$shlibpath_var"; then
+ # get the directories listed in $shlibpath_var
+ eval shlib_search_path=\`\$ECHO \"\${$shlibpath_var}\" \| \$SED \'s/:/ /g\'\`
+ else
+ shlib_search_path=
+ fi
+ eval sys_lib_search_path=\"$sys_lib_search_path_spec\"
+ eval sys_lib_dlsearch_path=\"$sys_lib_dlsearch_path_spec\"
+
+ func_dirname "$output" "/" ""
+ output_objdir="$func_dirname_result$objdir"
+ func_to_tool_file "$output_objdir/"
+ tool_output_objdir=$func_to_tool_file_result
+ # Create the object directory.
+ func_mkdir_p "$output_objdir"
+
+ # Determine the type of output
+ case $output in
+ "")
+ func_fatal_help "you must specify an output file"
+ ;;
+ *.$libext) linkmode=oldlib ;;
+ *.lo | *.$objext) linkmode=obj ;;
+ *.la) linkmode=lib ;;
+ *) linkmode=prog ;; # Anything else should be a program.
+ esac
+
+ specialdeplibs=
+
+ libs=
+ # Find all interdependent deplibs by searching for libraries
+ # that are linked more than once (e.g. -la -lb -la)
+ for deplib in $deplibs; do
+ if $opt_preserve_dup_deps ; then
+ case "$libs " in
+ *" $deplib "*) func_append specialdeplibs " $deplib" ;;
+ esac
+ fi
+ func_append libs " $deplib"
+ done
+
+ if test "$linkmode" = lib; then
+ libs="$predeps $libs $compiler_lib_search_path $postdeps"
+
+ # Compute libraries that are listed more than once in $predeps
+ # $postdeps and mark them as special (i.e., whose duplicates are
+ # not to be eliminated).
+ pre_post_deps=
+ if $opt_duplicate_compiler_generated_deps; then
+ for pre_post_dep in $predeps $postdeps; do
+ case "$pre_post_deps " in
+ *" $pre_post_dep "*) func_append specialdeplibs " $pre_post_deps" ;;
+ esac
+ func_append pre_post_deps " $pre_post_dep"
+ done
+ fi
+ pre_post_deps=
+ fi
+
+ deplibs=
+ newdependency_libs=
+ newlib_search_path=
+ need_relink=no # whether we're linking any uninstalled libtool libraries
+ notinst_deplibs= # not-installed libtool libraries
+ notinst_path= # paths that contain not-installed libtool libraries
+
+ case $linkmode in
+ lib)
+ passes="conv dlpreopen link"
+ for file in $dlfiles $dlprefiles; do
+ case $file in
+ *.la) ;;
+ *)
+ func_fatal_help "libraries can \`-dlopen' only libtool libraries: $file"
+ ;;
+ esac
+ done
+ ;;
+ prog)
+ compile_deplibs=
+ finalize_deplibs=
+ alldeplibs=no
+ newdlfiles=
+ newdlprefiles=
+ passes="conv scan dlopen dlpreopen link"
+ ;;
+ *) passes="conv"
+ ;;
+ esac
+
+ for pass in $passes; do
+ # The preopen pass in lib mode reverses $deplibs; put it back here
+ # so that -L comes before libs that need it for instance...
+ if test "$linkmode,$pass" = "lib,link"; then
+ ## FIXME: Find the place where the list is rebuilt in the wrong
+ ## order, and fix it there properly
+ tmp_deplibs=
+ for deplib in $deplibs; do
+ tmp_deplibs="$deplib $tmp_deplibs"
+ done
+ deplibs="$tmp_deplibs"
+ fi
+
+ if test "$linkmode,$pass" = "lib,link" ||
+ test "$linkmode,$pass" = "prog,scan"; then
+ libs="$deplibs"
+ deplibs=
+ fi
+ if test "$linkmode" = prog; then
+ case $pass in
+ dlopen) libs="$dlfiles" ;;
+ dlpreopen) libs="$dlprefiles" ;;
+ link) libs="$deplibs %DEPLIBS% $dependency_libs" ;;
+ esac
+ fi
+ if test "$linkmode,$pass" = "lib,dlpreopen"; then
+ # Collect and forward deplibs of preopened libtool libs
+ for lib in $dlprefiles; do
+ # Ignore non-libtool-libs
+ dependency_libs=
+ func_resolve_sysroot "$lib"
+ case $lib in
+ *.la) func_source "$func_resolve_sysroot_result" ;;
+ esac
+
+ # Collect preopened libtool deplibs, except any this library
+ # has declared as weak libs
+ for deplib in $dependency_libs; do
+ func_basename "$deplib"
+ deplib_base=$func_basename_result
+ case " $weak_libs " in
+ *" $deplib_base "*) ;;
+ *) func_append deplibs " $deplib" ;;
+ esac
+ done
+ done
+ libs="$dlprefiles"
+ fi
+ if test "$pass" = dlopen; then
+ # Collect dlpreopened libraries
+ save_deplibs="$deplibs"
+ deplibs=
+ fi
+
+ for deplib in $libs; do
+ lib=
+ found=no
+ case $deplib in
+ -mt|-mthreads|-kthread|-Kthread|-pthread|-pthreads|--thread-safe|-threads)
+ if test "$linkmode,$pass" = "prog,link"; then
+ compile_deplibs="$deplib $compile_deplibs"
+ finalize_deplibs="$deplib $finalize_deplibs"
+ else
+ func_append compiler_flags " $deplib"
+ if test "$linkmode" = lib ; then
+ case "$new_inherited_linker_flags " in
+ *" $deplib "*) ;;
+ * ) func_append new_inherited_linker_flags " $deplib" ;;
+ esac
+ fi
+ fi
+ continue
+ ;;
+ -l*)
+ if test "$linkmode" != lib && test "$linkmode" != prog; then
+ func_warning "\`-l' is ignored for archives/objects"
+ continue
+ fi
+ func_stripname '-l' '' "$deplib"
+ name=$func_stripname_result
+ if test "$linkmode" = lib; then
+ searchdirs="$newlib_search_path $lib_search_path $compiler_lib_search_dirs $sys_lib_search_path $shlib_search_path"
+ else
+ searchdirs="$newlib_search_path $lib_search_path $sys_lib_search_path $shlib_search_path"
+ fi
+ for searchdir in $searchdirs; do
+ for search_ext in .la $std_shrext .so .a; do
+ # Search the libtool library
+ lib="$searchdir/lib${name}${search_ext}"
+ if test -f "$lib"; then
+ if test "$search_ext" = ".la"; then
+ found=yes
+ else
+ found=no
+ fi
+ break 2
+ fi
+ done
+ done
+ if test "$found" != yes; then
+ # deplib doesn't seem to be a libtool library
+ if test "$linkmode,$pass" = "prog,link"; then
+ compile_deplibs="$deplib $compile_deplibs"
+ finalize_deplibs="$deplib $finalize_deplibs"
+ else
+ deplibs="$deplib $deplibs"
+ test "$linkmode" = lib && newdependency_libs="$deplib $newdependency_libs"
+ fi
+ continue
+ else # deplib is a libtool library
+ # If $allow_libtool_libs_with_static_runtimes && $deplib is a stdlib,
+ # We need to do some special things here, and not later.
+ if test "X$allow_libtool_libs_with_static_runtimes" = "Xyes" ; then
+ case " $predeps $postdeps " in
+ *" $deplib "*)
+ if func_lalib_p "$lib"; then
+ library_names=
+ old_library=
+ func_source "$lib"
+ for l in $old_library $library_names; do
+ ll="$l"
+ done
+ if test "X$ll" = "X$old_library" ; then # only static version available
+ found=no
+ func_dirname "$lib" "" "."
+ ladir="$func_dirname_result"
+ lib=$ladir/$old_library
+ if test "$linkmode,$pass" = "prog,link"; then
+ compile_deplibs="$deplib $compile_deplibs"
+ finalize_deplibs="$deplib $finalize_deplibs"
+ else
+ deplibs="$deplib $deplibs"
+ test "$linkmode" = lib && newdependency_libs="$deplib $newdependency_libs"
+ fi
+ continue
+ fi
+ fi
+ ;;
+ *) ;;
+ esac
+ fi
+ fi
+ ;; # -l
+ *.ltframework)
+ if test "$linkmode,$pass" = "prog,link"; then
+ compile_deplibs="$deplib $compile_deplibs"
+ finalize_deplibs="$deplib $finalize_deplibs"
+ else
+ deplibs="$deplib $deplibs"
+ if test "$linkmode" = lib ; then
+ case "$new_inherited_linker_flags " in
+ *" $deplib "*) ;;
+ * ) func_append new_inherited_linker_flags " $deplib" ;;
+ esac
+ fi
+ fi
+ continue
+ ;;
+ -L*)
+ case $linkmode in
+ lib)
+ deplibs="$deplib $deplibs"
+ test "$pass" = conv && continue
+ newdependency_libs="$deplib $newdependency_libs"
+ func_stripname '-L' '' "$deplib"
+ func_resolve_sysroot "$func_stripname_result"
+ func_append newlib_search_path " $func_resolve_sysroot_result"
+ ;;
+ prog)
+ if test "$pass" = conv; then
+ deplibs="$deplib $deplibs"
+ continue
+ fi
+ if test "$pass" = scan; then
+ deplibs="$deplib $deplibs"
+ else
+ compile_deplibs="$deplib $compile_deplibs"
+ finalize_deplibs="$deplib $finalize_deplibs"
+ fi
+ func_stripname '-L' '' "$deplib"
+ func_resolve_sysroot "$func_stripname_result"
+ func_append newlib_search_path " $func_resolve_sysroot_result"
+ ;;
+ *)
+ func_warning "\`-L' is ignored for archives/objects"
+ ;;
+ esac # linkmode
+ continue
+ ;; # -L
+ -R*)
+ if test "$pass" = link; then
+ func_stripname '-R' '' "$deplib"
+ func_resolve_sysroot "$func_stripname_result"
+ dir=$func_resolve_sysroot_result
+ # Make sure the xrpath contains only unique directories.
+ case "$xrpath " in
+ *" $dir "*) ;;
+ *) func_append xrpath " $dir" ;;
+ esac
+ fi
+ deplibs="$deplib $deplibs"
+ continue
+ ;;
+ *.la)
+ func_resolve_sysroot "$deplib"
+ lib=$func_resolve_sysroot_result
+ ;;
+ *.$libext)
+ if test "$pass" = conv; then
+ deplibs="$deplib $deplibs"
+ continue
+ fi
+ case $linkmode in
+ lib)
+ # Linking convenience modules into shared libraries is allowed,
+ # but linking other static libraries is non-portable.
+ case " $dlpreconveniencelibs " in
+ *" $deplib "*) ;;
+ *)
+ valid_a_lib=no
+ case $deplibs_check_method in
+ match_pattern*)
+ set dummy $deplibs_check_method; shift
+ match_pattern_regex=`expr "$deplibs_check_method" : "$1 \(.*\)"`
+ if eval "\$ECHO \"$deplib\"" 2>/dev/null | $SED 10q \
+ | $EGREP "$match_pattern_regex" > /dev/null; then
+ valid_a_lib=yes
+ fi
+ ;;
+ pass_all)
+ valid_a_lib=yes
+ ;;
+ esac
+ if test "$valid_a_lib" != yes; then
+ echo
+ $ECHO "*** Warning: Trying to link with static lib archive $deplib."
+ echo "*** I have the capability to make that library automatically link in when"
+ echo "*** you link to this library. But I can only do this if you have a"
+ echo "*** shared version of the library, which you do not appear to have"
+ echo "*** because the file extensions .$libext of this argument makes me believe"
+ echo "*** that it is just a static archive that I should not use here."
+ else
+ echo
+ $ECHO "*** Warning: Linking the shared library $output against the"
+ $ECHO "*** static library $deplib is not portable!"
+ deplibs="$deplib $deplibs"
+ fi
+ ;;
+ esac
+ continue
+ ;;
+ prog)
+ if test "$pass" != link; then
+ deplibs="$deplib $deplibs"
+ else
+ compile_deplibs="$deplib $compile_deplibs"
+ finalize_deplibs="$deplib $finalize_deplibs"
+ fi
+ continue
+ ;;
+ esac # linkmode
+ ;; # *.$libext
+ *.lo | *.$objext)
+ if test "$pass" = conv; then
+ deplibs="$deplib $deplibs"
+ elif test "$linkmode" = prog; then
+ if test "$pass" = dlpreopen || test "$dlopen_support" != yes || test "$build_libtool_libs" = no; then
+ # If there is no dlopen support or we're linking statically,
+ # we need to preload.
+ func_append newdlprefiles " $deplib"
+ compile_deplibs="$deplib $compile_deplibs"
+ finalize_deplibs="$deplib $finalize_deplibs"
+ else
+ func_append newdlfiles " $deplib"
+ fi
+ fi
+ continue
+ ;;
+ %DEPLIBS%)
+ alldeplibs=yes
+ continue
+ ;;
+ esac # case $deplib
+
+ if test "$found" = yes || test -f "$lib"; then :
+ else
+ func_fatal_error "cannot find the library \`$lib' or unhandled argument \`$deplib'"
+ fi
+
+ # Check to see that this really is a libtool archive.
+ func_lalib_unsafe_p "$lib" \
+ || func_fatal_error "\`$lib' is not a valid libtool archive"
+
+ func_dirname "$lib" "" "."
+ ladir="$func_dirname_result"
+
+ dlname=
+ dlopen=
+ dlpreopen=
+ libdir=
+ library_names=
+ old_library=
+ inherited_linker_flags=
+ # If the library was installed with an old release of libtool,
+ # it will not redefine variables installed, or shouldnotlink
+ installed=yes
+ shouldnotlink=no
+ avoidtemprpath=
+
+
+ # Read the .la file
+ func_source "$lib"
+
+ # Convert "-framework foo" to "foo.ltframework"
+ if test -n "$inherited_linker_flags"; then
+ tmp_inherited_linker_flags=`$ECHO "$inherited_linker_flags" | $SED 's/-framework \([^ $]*\)/\1.ltframework/g'`
+ for tmp_inherited_linker_flag in $tmp_inherited_linker_flags; do
+ case " $new_inherited_linker_flags " in
+ *" $tmp_inherited_linker_flag "*) ;;
+ *) func_append new_inherited_linker_flags " $tmp_inherited_linker_flag";;
+ esac
+ done
+ fi
+ dependency_libs=`$ECHO " $dependency_libs" | $SED 's% \([^ $]*\).ltframework% -framework \1%g'`
+ if test "$linkmode,$pass" = "lib,link" ||
+ test "$linkmode,$pass" = "prog,scan" ||
+ { test "$linkmode" != prog && test "$linkmode" != lib; }; then
+ test -n "$dlopen" && func_append dlfiles " $dlopen"
+ test -n "$dlpreopen" && func_append dlprefiles " $dlpreopen"
+ fi
+
+ if test "$pass" = conv; then
+ # Only check for convenience libraries
+ deplibs="$lib $deplibs"
+ if test -z "$libdir"; then
+ if test -z "$old_library"; then
+ func_fatal_error "cannot find name of link library for \`$lib'"
+ fi
+ # It is a libtool convenience library, so add in its objects.
+ func_append convenience " $ladir/$objdir/$old_library"
+ func_append old_convenience " $ladir/$objdir/$old_library"
+ elif test "$linkmode" != prog && test "$linkmode" != lib; then
+ func_fatal_error "\`$lib' is not a convenience library"
+ fi
+ tmp_libs=
+ for deplib in $dependency_libs; do
+ deplibs="$deplib $deplibs"
+ if $opt_preserve_dup_deps ; then
+ case "$tmp_libs " in
+ *" $deplib "*) func_append specialdeplibs " $deplib" ;;
+ esac
+ fi
+ func_append tmp_libs " $deplib"
+ done
+ continue
+ fi # $pass = conv
+
+
+ # Get the name of the library we link against.
+ linklib=
+ if test -n "$old_library" &&
+ { test "$prefer_static_libs" = yes ||
+ test "$prefer_static_libs,$installed" = "built,no"; }; then
+ linklib=$old_library
+ else
+ for l in $old_library $library_names; do
+ linklib="$l"
+ done
+ fi
+ if test -z "$linklib"; then
+ func_fatal_error "cannot find name of link library for \`$lib'"
+ fi
+
+ # This library was specified with -dlopen.
+ if test "$pass" = dlopen; then
+ if test -z "$libdir"; then
+ func_fatal_error "cannot -dlopen a convenience library: \`$lib'"
+ fi
+ if test -z "$dlname" ||
+ test "$dlopen_support" != yes ||
+ test "$build_libtool_libs" = no; then
+ # If there is no dlname, no dlopen support or we're linking
+ # statically, we need to preload. We also need to preload any
+ # dependent libraries so libltdl's deplib preloader doesn't
+ # bomb out in the load deplibs phase.
+ func_append dlprefiles " $lib $dependency_libs"
+ else
+ func_append newdlfiles " $lib"
+ fi
+ continue
+ fi # $pass = dlopen
+
+ # We need an absolute path.
+ case $ladir in
+ [\\/]* | [A-Za-z]:[\\/]*) abs_ladir="$ladir" ;;
+ *)
+ abs_ladir=`cd "$ladir" && pwd`
+ if test -z "$abs_ladir"; then
+ func_warning "cannot determine absolute directory name of \`$ladir'"
+ func_warning "passing it literally to the linker, although it might fail"
+ abs_ladir="$ladir"
+ fi
+ ;;
+ esac
+ func_basename "$lib"
+ laname="$func_basename_result"
+
+ # Find the relevant object directory and library name.
+ if test "X$installed" = Xyes; then
+ if test ! -f "$lt_sysroot$libdir/$linklib" && test -f "$abs_ladir/$linklib"; then
+ func_warning "library \`$lib' was moved."
+ dir="$ladir"
+ absdir="$abs_ladir"
+ libdir="$abs_ladir"
+ else
+ dir="$lt_sysroot$libdir"
+ absdir="$lt_sysroot$libdir"
+ fi
+ test "X$hardcode_automatic" = Xyes && avoidtemprpath=yes
+ else
+ if test ! -f "$ladir/$objdir/$linklib" && test -f "$abs_ladir/$linklib"; then
+ dir="$ladir"
+ absdir="$abs_ladir"
+ # Remove this search path later
+ func_append notinst_path " $abs_ladir"
+ else
+ dir="$ladir/$objdir"
+ absdir="$abs_ladir/$objdir"
+ # Remove this search path later
+ func_append notinst_path " $abs_ladir"
+ fi
+ fi # $installed = yes
+ func_stripname 'lib' '.la' "$laname"
+ name=$func_stripname_result
+
+ # This library was specified with -dlpreopen.
+ if test "$pass" = dlpreopen; then
+ if test -z "$libdir" && test "$linkmode" = prog; then
+ func_fatal_error "only libraries may -dlpreopen a convenience library: \`$lib'"
+ fi
+ case "$host" in
+ # special handling for platforms with PE-DLLs.
+ *cygwin* | *mingw* | *cegcc* )
+ # Linker will automatically link against shared library if both
+ # static and shared are present. Therefore, ensure we extract
+ # symbols from the import library if a shared library is present
+ # (otherwise, the dlopen module name will be incorrect). We do
+ # this by putting the import library name into $newdlprefiles.
+ # We recover the dlopen module name by 'saving' the la file
+ # name in a special purpose variable, and (later) extracting the
+ # dlname from the la file.
+ if test -n "$dlname"; then
+ func_tr_sh "$dir/$linklib"
+ eval "libfile_$func_tr_sh_result=\$abs_ladir/\$laname"
+ func_append newdlprefiles " $dir/$linklib"
+ else
+ func_append newdlprefiles " $dir/$old_library"
+ # Keep a list of preopened convenience libraries to check
+ # that they are being used correctly in the link pass.
+ test -z "$libdir" && \
+ func_append dlpreconveniencelibs " $dir/$old_library"
+ fi
+ ;;
+ * )
+ # Prefer using a static library (so that no silly _DYNAMIC symbols
+ # are required to link).
+ if test -n "$old_library"; then
+ func_append newdlprefiles " $dir/$old_library"
+ # Keep a list of preopened convenience libraries to check
+ # that they are being used correctly in the link pass.
+ test -z "$libdir" && \
+ func_append dlpreconveniencelibs " $dir/$old_library"
+ # Otherwise, use the dlname, so that lt_dlopen finds it.
+ elif test -n "$dlname"; then
+ func_append newdlprefiles " $dir/$dlname"
+ else
+ func_append newdlprefiles " $dir/$linklib"
+ fi
+ ;;
+ esac
+ fi # $pass = dlpreopen
+
+ if test -z "$libdir"; then
+ # Link the convenience library
+ if test "$linkmode" = lib; then
+ deplibs="$dir/$old_library $deplibs"
+ elif test "$linkmode,$pass" = "prog,link"; then
+ compile_deplibs="$dir/$old_library $compile_deplibs"
+ finalize_deplibs="$dir/$old_library $finalize_deplibs"
+ else
+ deplibs="$lib $deplibs" # used for prog,scan pass
+ fi
+ continue
+ fi
+
+
+ if test "$linkmode" = prog && test "$pass" != link; then
+ func_append newlib_search_path " $ladir"
+ deplibs="$lib $deplibs"
+
+ linkalldeplibs=no
+ if test "$link_all_deplibs" != no || test -z "$library_names" ||
+ test "$build_libtool_libs" = no; then
+ linkalldeplibs=yes
+ fi
+
+ tmp_libs=
+ for deplib in $dependency_libs; do
+ case $deplib in
+ -L*) func_stripname '-L' '' "$deplib"
+ func_resolve_sysroot "$func_stripname_result"
+ func_append newlib_search_path " $func_resolve_sysroot_result"
+ ;;
+ esac
+ # Need to link against all dependency_libs?
+ if test "$linkalldeplibs" = yes; then
+ deplibs="$deplib $deplibs"
+ else
+ # Need to hardcode shared library paths
+ # or/and link against static libraries
+ newdependency_libs="$deplib $newdependency_libs"
+ fi
+ if $opt_preserve_dup_deps ; then
+ case "$tmp_libs " in
+ *" $deplib "*) func_append specialdeplibs " $deplib" ;;
+ esac
+ fi
+ func_append tmp_libs " $deplib"
+ done # for deplib
+ continue
+ fi # $linkmode = prog...
+
+ if test "$linkmode,$pass" = "prog,link"; then
+ if test -n "$library_names" &&
+ { { test "$prefer_static_libs" = no ||
+ test "$prefer_static_libs,$installed" = "built,yes"; } ||
+ test -z "$old_library"; }; then
+ # We need to hardcode the library path
+ if test -n "$shlibpath_var" && test -z "$avoidtemprpath" ; then
+ # Make sure the rpath contains only unique directories.
+ case "$temp_rpath:" in
+ *"$absdir:"*) ;;
+ *) func_append temp_rpath "$absdir:" ;;
+ esac
+ fi
+
+ # Hardcode the library path.
+ # Skip directories that are in the system default run-time
+ # search path.
+ case " $sys_lib_dlsearch_path " in
+ *" $absdir "*) ;;
+ *)
+ case "$compile_rpath " in
+ *" $absdir "*) ;;
+ *) func_append compile_rpath " $absdir" ;;
+ esac
+ ;;
+ esac
+ case " $sys_lib_dlsearch_path " in
+ *" $libdir "*) ;;
+ *)
+ case "$finalize_rpath " in
+ *" $libdir "*) ;;
+ *) func_append finalize_rpath " $libdir" ;;
+ esac
+ ;;
+ esac
+ fi # $linkmode,$pass = prog,link...
+
+ if test "$alldeplibs" = yes &&
+ { test "$deplibs_check_method" = pass_all ||
+ { test "$build_libtool_libs" = yes &&
+ test -n "$library_names"; }; }; then
+ # We only need to search for static libraries
+ continue
+ fi
+ fi
+
+ link_static=no # Whether the deplib will be linked statically
+ use_static_libs=$prefer_static_libs
+ if test "$use_static_libs" = built && test "$installed" = yes; then
+ use_static_libs=no
+ fi
+ if test -n "$library_names" &&
+ { test "$use_static_libs" = no || test -z "$old_library"; }; then
+ case $host in
+ *cygwin* | *mingw* | *cegcc*)
+ # No point in relinking DLLs because paths are not encoded
+ func_append notinst_deplibs " $lib"
+ need_relink=no
+ ;;
+ *)
+ if test "$installed" = no; then
+ func_append notinst_deplibs " $lib"
+ need_relink=yes
+ fi
+ ;;
+ esac
+ # This is a shared library
+
+ # Warn about portability, can't link against -module's on some
+ # systems (darwin). Don't bleat about dlopened modules though!
+ dlopenmodule=""
+ for dlpremoduletest in $dlprefiles; do
+ if test "X$dlpremoduletest" = "X$lib"; then
+ dlopenmodule="$dlpremoduletest"
+ break
+ fi
+ done
+ if test -z "$dlopenmodule" && test "$shouldnotlink" = yes && test "$pass" = link; then
+ echo
+ if test "$linkmode" = prog; then
+ $ECHO "*** Warning: Linking the executable $output against the loadable module"
+ else
+ $ECHO "*** Warning: Linking the shared library $output against the loadable module"
+ fi
+ $ECHO "*** $linklib is not portable!"
+ fi
+ if test "$linkmode" = lib &&
+ test "$hardcode_into_libs" = yes; then
+ # Hardcode the library path.
+ # Skip directories that are in the system default run-time
+ # search path.
+ case " $sys_lib_dlsearch_path " in
+ *" $absdir "*) ;;
+ *)
+ case "$compile_rpath " in
+ *" $absdir "*) ;;
+ *) func_append compile_rpath " $absdir" ;;
+ esac
+ ;;
+ esac
+ case " $sys_lib_dlsearch_path " in
+ *" $libdir "*) ;;
+ *)
+ case "$finalize_rpath " in
+ *" $libdir "*) ;;
+ *) func_append finalize_rpath " $libdir" ;;
+ esac
+ ;;
+ esac
+ fi
+
+ if test -n "$old_archive_from_expsyms_cmds"; then
+ # figure out the soname
+ set dummy $library_names
+ shift
+ realname="$1"
+ shift
+ libname=`eval "\\$ECHO \"$libname_spec\""`
+ # use dlname if we got it. it's perfectly good, no?
+ if test -n "$dlname"; then
+ soname="$dlname"
+ elif test -n "$soname_spec"; then
+ # bleh windows
+ case $host in
+ *cygwin* | mingw* | *cegcc*)
+ func_arith $current - $age
+ major=$func_arith_result
+ versuffix="-$major"
+ ;;
+ esac
+ eval soname=\"$soname_spec\"
+ else
+ soname="$realname"
+ fi
+
+ # Make a new name for the extract_expsyms_cmds to use
+ soroot="$soname"
+ func_basename "$soroot"
+ soname="$func_basename_result"
+ func_stripname 'lib' '.dll' "$soname"
+ newlib=libimp-$func_stripname_result.a
+
+ # If the library has no export list, then create one now
+ if test -f "$output_objdir/$soname-def"; then :
+ else
+ func_verbose "extracting exported symbol list from \`$soname'"
+ func_execute_cmds "$extract_expsyms_cmds" 'exit $?'
+ fi
+
+ # Create $newlib
+ if test -f "$output_objdir/$newlib"; then :; else
+ func_verbose "generating import library for \`$soname'"
+ func_execute_cmds "$old_archive_from_expsyms_cmds" 'exit $?'
+ fi
+ # make sure the library variables are pointing to the new library
+ dir=$output_objdir
+ linklib=$newlib
+ fi # test -n "$old_archive_from_expsyms_cmds"
+
+ if test "$linkmode" = prog || test "$opt_mode" != relink; then
+ add_shlibpath=
+ add_dir=
+ add=
+ lib_linked=yes
+ case $hardcode_action in
+ immediate | unsupported)
+ if test "$hardcode_direct" = no; then
+ add="$dir/$linklib"
+ case $host in
+ *-*-sco3.2v5.0.[024]*) add_dir="-L$dir" ;;
+ *-*-sysv4*uw2*) add_dir="-L$dir" ;;
+ *-*-sysv5OpenUNIX* | *-*-sysv5UnixWare7.[01].[10]* | \
+ *-*-unixware7*) add_dir="-L$dir" ;;
+ *-*-darwin* )
+ # if the lib is a (non-dlopened) module then we can not
+ # link against it, someone is ignoring the earlier warnings
+ if /usr/bin/file -L $add 2> /dev/null |
+ $GREP ": [^:]* bundle" >/dev/null ; then
+ if test "X$dlopenmodule" != "X$lib"; then
+ $ECHO "*** Warning: lib $linklib is a module, not a shared library"
+ if test -z "$old_library" ; then
+ echo
+ echo "*** And there doesn't seem to be a static archive available"
+ echo "*** The link will probably fail, sorry"
+ else
+ add="$dir/$old_library"
+ fi
+ elif test -n "$old_library"; then
+ add="$dir/$old_library"
+ fi
+ fi
+ esac
+ elif test "$hardcode_minus_L" = no; then
+ case $host in
+ *-*-sunos*) add_shlibpath="$dir" ;;
+ esac
+ add_dir="-L$dir"
+ add="-l$name"
+ elif test "$hardcode_shlibpath_var" = no; then
+ add_shlibpath="$dir"
+ add="-l$name"
+ else
+ lib_linked=no
+ fi
+ ;;
+ relink)
+ if test "$hardcode_direct" = yes &&
+ test "$hardcode_direct_absolute" = no; then
+ add="$dir/$linklib"
+ elif test "$hardcode_minus_L" = yes; then
+ add_dir="-L$dir"
+ # Try looking first in the location we're being installed to.
+ if test -n "$inst_prefix_dir"; then
+ case $libdir in
+ [\\/]*)
+ func_append add_dir " -L$inst_prefix_dir$libdir"
+ ;;
+ esac
+ fi
+ add="-l$name"
+ elif test "$hardcode_shlibpath_var" = yes; then
+ add_shlibpath="$dir"
+ add="-l$name"
+ else
+ lib_linked=no
+ fi
+ ;;
+ *) lib_linked=no ;;
+ esac
+
+ if test "$lib_linked" != yes; then
+ func_fatal_configuration "unsupported hardcode properties"
+ fi
+
+ if test -n "$add_shlibpath"; then
+ case :$compile_shlibpath: in
+ *":$add_shlibpath:"*) ;;
+ *) func_append compile_shlibpath "$add_shlibpath:" ;;
+ esac
+ fi
+ if test "$linkmode" = prog; then
+ test -n "$add_dir" && compile_deplibs="$add_dir $compile_deplibs"
+ test -n "$add" && compile_deplibs="$add $compile_deplibs"
+ else
+ test -n "$add_dir" && deplibs="$add_dir $deplibs"
+ test -n "$add" && deplibs="$add $deplibs"
+ if test "$hardcode_direct" != yes &&
+ test "$hardcode_minus_L" != yes &&
+ test "$hardcode_shlibpath_var" = yes; then
+ case :$finalize_shlibpath: in
+ *":$libdir:"*) ;;
+ *) func_append finalize_shlibpath "$libdir:" ;;
+ esac
+ fi
+ fi
+ fi
+
+ if test "$linkmode" = prog || test "$opt_mode" = relink; then
+ add_shlibpath=
+ add_dir=
+ add=
+ # Finalize command for both is simple: just hardcode it.
+ if test "$hardcode_direct" = yes &&
+ test "$hardcode_direct_absolute" = no; then
+ add="$libdir/$linklib"
+ elif test "$hardcode_minus_L" = yes; then
+ add_dir="-L$libdir"
+ add="-l$name"
+ elif test "$hardcode_shlibpath_var" = yes; then
+ case :$finalize_shlibpath: in
+ *":$libdir:"*) ;;
+ *) func_append finalize_shlibpath "$libdir:" ;;
+ esac
+ add="-l$name"
+ elif test "$hardcode_automatic" = yes; then
+ if test -n "$inst_prefix_dir" &&
+ test -f "$inst_prefix_dir$libdir/$linklib" ; then
+ add="$inst_prefix_dir$libdir/$linklib"
+ else
+ add="$libdir/$linklib"
+ fi
+ else
+ # We cannot seem to hardcode it, guess we'll fake it.
+ add_dir="-L$libdir"
+ # Try looking first in the location we're being installed to.
+ if test -n "$inst_prefix_dir"; then
+ case $libdir in
+ [\\/]*)
+ func_append add_dir " -L$inst_prefix_dir$libdir"
+ ;;
+ esac
+ fi
+ add="-l$name"
+ fi
+
+ if test "$linkmode" = prog; then
+ test -n "$add_dir" && finalize_deplibs="$add_dir $finalize_deplibs"
+ test -n "$add" && finalize_deplibs="$add $finalize_deplibs"
+ else
+ test -n "$add_dir" && deplibs="$add_dir $deplibs"
+ test -n "$add" && deplibs="$add $deplibs"
+ fi
+ fi
+ elif test "$linkmode" = prog; then
+ # Here we assume that one of hardcode_direct or hardcode_minus_L
+ # is not unsupported. This is valid on all known static and
+ # shared platforms.
+ if test "$hardcode_direct" != unsupported; then
+ test -n "$old_library" && linklib="$old_library"
+ compile_deplibs="$dir/$linklib $compile_deplibs"
+ finalize_deplibs="$dir/$linklib $finalize_deplibs"
+ else
+ compile_deplibs="-l$name -L$dir $compile_deplibs"
+ finalize_deplibs="-l$name -L$dir $finalize_deplibs"
+ fi
+ elif test "$build_libtool_libs" = yes; then
+ # Not a shared library
+ if test "$deplibs_check_method" != pass_all; then
+ # We're trying link a shared library against a static one
+ # but the system doesn't support it.
+
+ # Just print a warning and add the library to dependency_libs so
+ # that the program can be linked against the static library.
+ echo
+ $ECHO "*** Warning: This system can not link to static lib archive $lib."
+ echo "*** I have the capability to make that library automatically link in when"
+ echo "*** you link to this library. But I can only do this if you have a"
+ echo "*** shared version of the library, which you do not appear to have."
+ if test "$module" = yes; then
+ echo "*** But as you try to build a module library, libtool will still create "
+ echo "*** a static module, that should work as long as the dlopening application"
+ echo "*** is linked with the -dlopen flag to resolve symbols at runtime."
+ if test -z "$global_symbol_pipe"; then
+ echo
+ echo "*** However, this would only work if libtool was able to extract symbol"
+ echo "*** lists from a program, using \`nm' or equivalent, but libtool could"
+ echo "*** not find such a program. So, this module is probably useless."
+ echo "*** \`nm' from GNU binutils and a full rebuild may help."
+ fi
+ if test "$build_old_libs" = no; then
+ build_libtool_libs=module
+ build_old_libs=yes
+ else
+ build_libtool_libs=no
+ fi
+ fi
+ else
+ deplibs="$dir/$old_library $deplibs"
+ link_static=yes
+ fi
+ fi # link shared/static library?
+
+ if test "$linkmode" = lib; then
+ if test -n "$dependency_libs" &&
+ { test "$hardcode_into_libs" != yes ||
+ test "$build_old_libs" = yes ||
+ test "$link_static" = yes; }; then
+ # Extract -R from dependency_libs
+ temp_deplibs=
+ for libdir in $dependency_libs; do
+ case $libdir in
+ -R*) func_stripname '-R' '' "$libdir"
+ temp_xrpath=$func_stripname_result
+ case " $xrpath " in
+ *" $temp_xrpath "*) ;;
+ *) func_append xrpath " $temp_xrpath";;
+ esac;;
+ *) func_append temp_deplibs " $libdir";;
+ esac
+ done
+ dependency_libs="$temp_deplibs"
+ fi
+
+ func_append newlib_search_path " $absdir"
+ # Link against this library
+ test "$link_static" = no && newdependency_libs="$abs_ladir/$laname $newdependency_libs"
+ # ... and its dependency_libs
+ tmp_libs=
+ for deplib in $dependency_libs; do
+ newdependency_libs="$deplib $newdependency_libs"
+ case $deplib in
+ -L*) func_stripname '-L' '' "$deplib"
+ func_resolve_sysroot "$func_stripname_result";;
+ *) func_resolve_sysroot "$deplib" ;;
+ esac
+ if $opt_preserve_dup_deps ; then
+ case "$tmp_libs " in
+ *" $func_resolve_sysroot_result "*)
+ func_append specialdeplibs " $func_resolve_sysroot_result" ;;
+ esac
+ fi
+ func_append tmp_libs " $func_resolve_sysroot_result"
+ done
+
+ if test "$link_all_deplibs" != no; then
+ # Add the search paths of all dependency libraries
+ for deplib in $dependency_libs; do
+ path=
+ case $deplib in
+ -L*) path="$deplib" ;;
+ *.la)
+ func_resolve_sysroot "$deplib"
+ deplib=$func_resolve_sysroot_result
+ func_dirname "$deplib" "" "."
+ dir=$func_dirname_result
+ # We need an absolute path.
+ case $dir in
+ [\\/]* | [A-Za-z]:[\\/]*) absdir="$dir" ;;
+ *)
+ absdir=`cd "$dir" && pwd`
+ if test -z "$absdir"; then
+ func_warning "cannot determine absolute directory name of \`$dir'"
+ absdir="$dir"
+ fi
+ ;;
+ esac
+ if $GREP "^installed=no" $deplib > /dev/null; then
+ case $host in
+ *-*-darwin*)
+ depdepl=
+ eval deplibrary_names=`${SED} -n -e 's/^library_names=\(.*\)$/\1/p' $deplib`
+ if test -n "$deplibrary_names" ; then
+ for tmp in $deplibrary_names ; do
+ depdepl=$tmp
+ done
+ if test -f "$absdir/$objdir/$depdepl" ; then
+ depdepl="$absdir/$objdir/$depdepl"
+ darwin_install_name=`${OTOOL} -L $depdepl | awk '{if (NR == 2) {print $1;exit}}'`
+ if test -z "$darwin_install_name"; then
+ darwin_install_name=`${OTOOL64} -L $depdepl | awk '{if (NR == 2) {print $1;exit}}'`
+ fi
+ func_append compiler_flags " ${wl}-dylib_file ${wl}${darwin_install_name}:${depdepl}"
+ func_append linker_flags " -dylib_file ${darwin_install_name}:${depdepl}"
+ path=
+ fi
+ fi
+ ;;
+ *)
+ path="-L$absdir/$objdir"
+ ;;
+ esac
+ else
+ eval libdir=`${SED} -n -e 's/^libdir=\(.*\)$/\1/p' $deplib`
+ test -z "$libdir" && \
+ func_fatal_error "\`$deplib' is not a valid libtool archive"
+ test "$absdir" != "$libdir" && \
+ func_warning "\`$deplib' seems to be moved"
+
+ path="-L$absdir"
+ fi
+ ;;
+ esac
+ case " $deplibs " in
+ *" $path "*) ;;
+ *) deplibs="$path $deplibs" ;;
+ esac
+ done
+ fi # link_all_deplibs != no
+ fi # linkmode = lib
+ done # for deplib in $libs
+ if test "$pass" = link; then
+ if test "$linkmode" = "prog"; then
+ compile_deplibs="$new_inherited_linker_flags $compile_deplibs"
+ finalize_deplibs="$new_inherited_linker_flags $finalize_deplibs"
+ else
+ compiler_flags="$compiler_flags "`$ECHO " $new_inherited_linker_flags" | $SED 's% \([^ $]*\).ltframework% -framework \1%g'`
+ fi
+ fi
+ dependency_libs="$newdependency_libs"
+ if test "$pass" = dlpreopen; then
+ # Link the dlpreopened libraries before other libraries
+ for deplib in $save_deplibs; do
+ deplibs="$deplib $deplibs"
+ done
+ fi
+ if test "$pass" != dlopen; then
+ if test "$pass" != conv; then
+ # Make sure lib_search_path contains only unique directories.
+ lib_search_path=
+ for dir in $newlib_search_path; do
+ case "$lib_search_path " in
+ *" $dir "*) ;;
+ *) func_append lib_search_path " $dir" ;;
+ esac
+ done
+ newlib_search_path=
+ fi
+
+ if test "$linkmode,$pass" != "prog,link"; then
+ vars="deplibs"
+ else
+ vars="compile_deplibs finalize_deplibs"
+ fi
+ for var in $vars dependency_libs; do
+ # Add libraries to $var in reverse order
+ eval tmp_libs=\"\$$var\"
+ new_libs=
+ for deplib in $tmp_libs; do
+ # FIXME: Pedantically, this is the right thing to do, so
+ # that some nasty dependency loop isn't accidentally
+ # broken:
+ #new_libs="$deplib $new_libs"
+ # Pragmatically, this seems to cause very few problems in
+ # practice:
+ case $deplib in
+ -L*) new_libs="$deplib $new_libs" ;;
+ -R*) ;;
+ *)
+ # And here is the reason: when a library appears more
+ # than once as an explicit dependence of a library, or
+ # is implicitly linked in more than once by the
+ # compiler, it is considered special, and multiple
+ # occurrences thereof are not removed. Compare this
+ # with having the same library being listed as a
+ # dependency of multiple other libraries: in this case,
+ # we know (pedantically, we assume) the library does not
+ # need to be listed more than once, so we keep only the
+ # last copy. This is not always right, but it is rare
+ # enough that we require users that really mean to play
+ # such unportable linking tricks to link the library
+ # using -Wl,-lname, so that libtool does not consider it
+ # for duplicate removal.
+ case " $specialdeplibs " in
+ *" $deplib "*) new_libs="$deplib $new_libs" ;;
+ *)
+ case " $new_libs " in
+ *" $deplib "*) ;;
+ *) new_libs="$deplib $new_libs" ;;
+ esac
+ ;;
+ esac
+ ;;
+ esac
+ done
+ tmp_libs=
+ for deplib in $new_libs; do
+ case $deplib in
+ -L*)
+ case " $tmp_libs " in
+ *" $deplib "*) ;;
+ *) func_append tmp_libs " $deplib" ;;
+ esac
+ ;;
+ *) func_append tmp_libs " $deplib" ;;
+ esac
+ done
+ eval $var=\"$tmp_libs\"
+ done # for var
+ fi
+ # Last step: remove runtime libs from dependency_libs
+ # (they stay in deplibs)
+ tmp_libs=
+ for i in $dependency_libs ; do
+ case " $predeps $postdeps $compiler_lib_search_path " in
+ *" $i "*)
+ i=""
+ ;;
+ esac
+ if test -n "$i" ; then
+ func_append tmp_libs " $i"
+ fi
+ done
+ dependency_libs=$tmp_libs
+ done # for pass
+ if test "$linkmode" = prog; then
+ dlfiles="$newdlfiles"
+ fi
+ if test "$linkmode" = prog || test "$linkmode" = lib; then
+ dlprefiles="$newdlprefiles"
+ fi
+
+ case $linkmode in
+ oldlib)
+ if test -n "$dlfiles$dlprefiles" || test "$dlself" != no; then
+ func_warning "\`-dlopen' is ignored for archives"
+ fi
+
+ case " $deplibs" in
+ *\ -l* | *\ -L*)
+ func_warning "\`-l' and \`-L' are ignored for archives" ;;
+ esac
+
+ test -n "$rpath" && \
+ func_warning "\`-rpath' is ignored for archives"
+
+ test -n "$xrpath" && \
+ func_warning "\`-R' is ignored for archives"
+
+ test -n "$vinfo" && \
+ func_warning "\`-version-info/-version-number' is ignored for archives"
+
+ test -n "$release" && \
+ func_warning "\`-release' is ignored for archives"
+
+ test -n "$export_symbols$export_symbols_regex" && \
+ func_warning "\`-export-symbols' is ignored for archives"
+
+ # Now set the variables for building old libraries.
+ build_libtool_libs=no
+ oldlibs="$output"
+ func_append objs "$old_deplibs"
+ ;;
+
+ lib)
+ # Make sure we only generate libraries of the form `libNAME.la'.
+ case $outputname in
+ lib*)
+ func_stripname 'lib' '.la' "$outputname"
+ name=$func_stripname_result
+ eval shared_ext=\"$shrext_cmds\"
+ eval libname=\"$libname_spec\"
+ ;;
+ *)
+ test "$module" = no && \
+ func_fatal_help "libtool library \`$output' must begin with \`lib'"
+
+ if test "$need_lib_prefix" != no; then
+ # Add the "lib" prefix for modules if required
+ func_stripname '' '.la' "$outputname"
+ name=$func_stripname_result
+ eval shared_ext=\"$shrext_cmds\"
+ eval libname=\"$libname_spec\"
+ else
+ func_stripname '' '.la' "$outputname"
+ libname=$func_stripname_result
+ fi
+ ;;
+ esac
+
+ if test -n "$objs"; then
+ if test "$deplibs_check_method" != pass_all; then
+ func_fatal_error "cannot build libtool library \`$output' from non-libtool objects on this host:$objs"
+ else
+ echo
+ $ECHO "*** Warning: Linking the shared library $output against the non-libtool"
+ $ECHO "*** objects $objs is not portable!"
+ func_append libobjs " $objs"
+ fi
+ fi
+
+ test "$dlself" != no && \
+ func_warning "\`-dlopen self' is ignored for libtool libraries"
+
+ set dummy $rpath
+ shift
+ test "$#" -gt 1 && \
+ func_warning "ignoring multiple \`-rpath's for a libtool library"
+
+ install_libdir="$1"
+
+ oldlibs=
+ if test -z "$rpath"; then
+ if test "$build_libtool_libs" = yes; then
+ # Building a libtool convenience library.
+ # Some compilers have problems with a `.al' extension so
+ # convenience libraries should have the same extension an
+ # archive normally would.
+ oldlibs="$output_objdir/$libname.$libext $oldlibs"
+ build_libtool_libs=convenience
+ build_old_libs=yes
+ fi
+
+ test -n "$vinfo" && \
+ func_warning "\`-version-info/-version-number' is ignored for convenience libraries"
+
+ test -n "$release" && \
+ func_warning "\`-release' is ignored for convenience libraries"
+ else
+
+ # Parse the version information argument.
+ save_ifs="$IFS"; IFS=':'
+ set dummy $vinfo 0 0 0
+ shift
+ IFS="$save_ifs"
+
+ test -n "$7" && \
+ func_fatal_help "too many parameters to \`-version-info'"
+
+ # convert absolute version numbers to libtool ages
+ # this retains compatibility with .la files and attempts
+ # to make the code below a bit more comprehensible
+
+ case $vinfo_number in
+ yes)
+ number_major="$1"
+ number_minor="$2"
+ number_revision="$3"
+ #
+ # There are really only two kinds -- those that
+ # use the current revision as the major version
+ # and those that subtract age and use age as
+ # a minor version. But, then there is irix
+ # which has an extra 1 added just for fun
+ #
+ case $version_type in
+ darwin|linux|osf|windows|none)
+ func_arith $number_major + $number_minor
+ current=$func_arith_result
+ age="$number_minor"
+ revision="$number_revision"
+ ;;
+ freebsd-aout|freebsd-elf|qnx|sunos)
+ current="$number_major"
+ revision="$number_minor"
+ age="0"
+ ;;
+ irix|nonstopux)
+ func_arith $number_major + $number_minor
+ current=$func_arith_result
+ age="$number_minor"
+ revision="$number_minor"
+ lt_irix_increment=no
+ ;;
+ esac
+ ;;
+ no)
+ current="$1"
+ revision="$2"
+ age="$3"
+ ;;
+ esac
+
+ # Check that each of the things are valid numbers.
+ case $current in
+ 0|[1-9]|[1-9][0-9]|[1-9][0-9][0-9]|[1-9][0-9][0-9][0-9]|[1-9][0-9][0-9][0-9][0-9]) ;;
+ *)
+ func_error "CURRENT \`$current' must be a nonnegative integer"
+ func_fatal_error "\`$vinfo' is not valid version information"
+ ;;
+ esac
+
+ case $revision in
+ 0|[1-9]|[1-9][0-9]|[1-9][0-9][0-9]|[1-9][0-9][0-9][0-9]|[1-9][0-9][0-9][0-9][0-9]) ;;
+ *)
+ func_error "REVISION \`$revision' must be a nonnegative integer"
+ func_fatal_error "\`$vinfo' is not valid version information"
+ ;;
+ esac
+
+ case $age in
+ 0|[1-9]|[1-9][0-9]|[1-9][0-9][0-9]|[1-9][0-9][0-9][0-9]|[1-9][0-9][0-9][0-9][0-9]) ;;
+ *)
+ func_error "AGE \`$age' must be a nonnegative integer"
+ func_fatal_error "\`$vinfo' is not valid version information"
+ ;;
+ esac
+
+ if test "$age" -gt "$current"; then
+ func_error "AGE \`$age' is greater than the current interface number \`$current'"
+ func_fatal_error "\`$vinfo' is not valid version information"
+ fi
+
+ # Calculate the version variables.
+ major=
+ versuffix=
+ verstring=
+ case $version_type in
+ none) ;;
+
+ darwin)
+ # Like Linux, but with the current version available in
+ # verstring for coding it into the library header
+ func_arith $current - $age
+ major=.$func_arith_result
+ versuffix="$major.$age.$revision"
+ # Darwin ld doesn't like 0 for these options...
+ func_arith $current + 1
+ minor_current=$func_arith_result
+ xlcverstring="${wl}-compatibility_version ${wl}$minor_current ${wl}-current_version ${wl}$minor_current.$revision"
+ verstring="-compatibility_version $minor_current -current_version $minor_current.$revision"
+ ;;
+
+ freebsd-aout)
+ major=".$current"
+ versuffix=".$current.$revision";
+ ;;
+
+ freebsd-elf)
+ major=".$current"
+ versuffix=".$current"
+ ;;
+
+ irix | nonstopux)
+ if test "X$lt_irix_increment" = "Xno"; then
+ func_arith $current - $age
+ else
+ func_arith $current - $age + 1
+ fi
+ major=$func_arith_result
+
+ case $version_type in
+ nonstopux) verstring_prefix=nonstopux ;;
+ *) verstring_prefix=sgi ;;
+ esac
+ verstring="$verstring_prefix$major.$revision"
+
+ # Add in all the interfaces that we are compatible with.
+ loop=$revision
+ while test "$loop" -ne 0; do
+ func_arith $revision - $loop
+ iface=$func_arith_result
+ func_arith $loop - 1
+ loop=$func_arith_result
+ verstring="$verstring_prefix$major.$iface:$verstring"
+ done
+
+ # Before this point, $major must not contain `.'.
+ major=.$major
+ versuffix="$major.$revision"
+ ;;
+
+ linux)
+ func_arith $current - $age
+ major=.$func_arith_result
+ versuffix="$major.$age.$revision"
+ ;;
+
+ osf)
+ func_arith $current - $age
+ major=.$func_arith_result
+ versuffix=".$current.$age.$revision"
+ verstring="$current.$age.$revision"
+
+ # Add in all the interfaces that we are compatible with.
+ loop=$age
+ while test "$loop" -ne 0; do
+ func_arith $current - $loop
+ iface=$func_arith_result
+ func_arith $loop - 1
+ loop=$func_arith_result
+ verstring="$verstring:${iface}.0"
+ done
+
+ # Make executables depend on our current version.
+ func_append verstring ":${current}.0"
+ ;;
+
+ qnx)
+ major=".$current"
+ versuffix=".$current"
+ ;;
+
+ sunos)
+ major=".$current"
+ versuffix=".$current.$revision"
+ ;;
+
+ windows)
+ # Use '-' rather than '.', since we only want one
+ # extension on DOS 8.3 filesystems.
+ func_arith $current - $age
+ major=$func_arith_result
+ versuffix="-$major"
+ ;;
+
+ *)
+ func_fatal_configuration "unknown library version type \`$version_type'"
+ ;;
+ esac
+
+ # Clear the version info if we defaulted, and they specified a release.
+ if test -z "$vinfo" && test -n "$release"; then
+ major=
+ case $version_type in
+ darwin)
+ # we can't check for "0.0" in archive_cmds due to quoting
+ # problems, so we reset it completely
+ verstring=
+ ;;
+ *)
+ verstring="0.0"
+ ;;
+ esac
+ if test "$need_version" = no; then
+ versuffix=
+ else
+ versuffix=".0.0"
+ fi
+ fi
+
+ # Remove version info from name if versioning should be avoided
+ if test "$avoid_version" = yes && test "$need_version" = no; then
+ major=
+ versuffix=
+ verstring=""
+ fi
+
+ # Check to see if the archive will have undefined symbols.
+ if test "$allow_undefined" = yes; then
+ if test "$allow_undefined_flag" = unsupported; then
+ func_warning "undefined symbols not allowed in $host shared libraries"
+ build_libtool_libs=no
+ build_old_libs=yes
+ fi
+ else
+ # Don't allow undefined symbols.
+ allow_undefined_flag="$no_undefined_flag"
+ fi
+
+ fi
+
+ func_generate_dlsyms "$libname" "$libname" "yes"
+ func_append libobjs " $symfileobj"
+ test "X$libobjs" = "X " && libobjs=
+
+ if test "$opt_mode" != relink; then
+ # Remove our outputs, but don't remove object files since they
+ # may have been created when compiling PIC objects.
+ removelist=
+ tempremovelist=`$ECHO "$output_objdir/*"`
+ for p in $tempremovelist; do
+ case $p in
+ *.$objext | *.gcno)
+ ;;
+ $output_objdir/$outputname | $output_objdir/$libname.* | $output_objdir/${libname}${release}.*)
+ if test "X$precious_files_regex" != "X"; then
+ if $ECHO "$p" | $EGREP -e "$precious_files_regex" >/dev/null 2>&1
+ then
+ continue
+ fi
+ fi
+ func_append removelist " $p"
+ ;;
+ *) ;;
+ esac
+ done
+ test -n "$removelist" && \
+ func_show_eval "${RM}r \$removelist"
+ fi
+
+ # Now set the variables for building old libraries.
+ if test "$build_old_libs" = yes && test "$build_libtool_libs" != convenience ; then
+ func_append oldlibs " $output_objdir/$libname.$libext"
+
+ # Transform .lo files to .o files.
+ oldobjs="$objs "`$ECHO "$libobjs" | $SP2NL | $SED "/\.${libext}$/d; $lo2o" | $NL2SP`
+ fi
+
+ # Eliminate all temporary directories.
+ #for path in $notinst_path; do
+ # lib_search_path=`$ECHO "$lib_search_path " | $SED "s% $path % %g"`
+ # deplibs=`$ECHO "$deplibs " | $SED "s% -L$path % %g"`
+ # dependency_libs=`$ECHO "$dependency_libs " | $SED "s% -L$path % %g"`
+ #done
+
+ if test -n "$xrpath"; then
+ # If the user specified any rpath flags, then add them.
+ temp_xrpath=
+ for libdir in $xrpath; do
+ func_replace_sysroot "$libdir"
+ func_append temp_xrpath " -R$func_replace_sysroot_result"
+ case "$finalize_rpath " in
+ *" $libdir "*) ;;
+ *) func_append finalize_rpath " $libdir" ;;
+ esac
+ done
+ if test "$hardcode_into_libs" != yes || test "$build_old_libs" = yes; then
+ dependency_libs="$temp_xrpath $dependency_libs"
+ fi
+ fi
+
+ # Make sure dlfiles contains only unique files that won't be dlpreopened
+ old_dlfiles="$dlfiles"
+ dlfiles=
+ for lib in $old_dlfiles; do
+ case " $dlprefiles $dlfiles " in
+ *" $lib "*) ;;
+ *) func_append dlfiles " $lib" ;;
+ esac
+ done
+
+ # Make sure dlprefiles contains only unique files
+ old_dlprefiles="$dlprefiles"
+ dlprefiles=
+ for lib in $old_dlprefiles; do
+ case "$dlprefiles " in
+ *" $lib "*) ;;
+ *) func_append dlprefiles " $lib" ;;
+ esac
+ done
+
+ if test "$build_libtool_libs" = yes; then
+ if test -n "$rpath"; then
+ case $host in
+ *-*-cygwin* | *-*-mingw* | *-*-pw32* | *-*-os2* | *-*-beos* | *-cegcc* | *-*-haiku*)
+ # these systems don't actually have a c library (as such)!
+ ;;
+ *-*-rhapsody* | *-*-darwin1.[012])
+ # Rhapsody C library is in the System framework
+ func_append deplibs " System.ltframework"
+ ;;
+ *-*-netbsd*)
+ # Don't link with libc until the a.out ld.so is fixed.
+ ;;
+ *-*-openbsd* | *-*-freebsd* | *-*-dragonfly*)
+ # Do not include libc due to us having libc/libc_r.
+ ;;
+ *-*-sco3.2v5* | *-*-sco5v6*)
+ # Causes problems with __ctype
+ ;;
+ *-*-sysv4.2uw2* | *-*-sysv5* | *-*-unixware* | *-*-OpenUNIX*)
+ # Compiler inserts libc in the correct place for threads to work
+ ;;
+ *)
+ # Add libc to deplibs on all other systems if necessary.
+ if test "$build_libtool_need_lc" = "yes"; then
+ func_append deplibs " -lc"
+ fi
+ ;;
+ esac
+ fi
+
+ # Transform deplibs into only deplibs that can be linked in shared.
+ name_save=$name
+ libname_save=$libname
+ release_save=$release
+ versuffix_save=$versuffix
+ major_save=$major
+ # I'm not sure if I'm treating the release correctly. I think
+ # release should show up in the -l (ie -lgmp5) so we don't want to
+ # add it in twice. Is that correct?
+ release=""
+ versuffix=""
+ major=""
+ newdeplibs=
+ droppeddeps=no
+ case $deplibs_check_method in
+ pass_all)
+ # Don't check for shared/static. Everything works.
+ # This might be a little naive. We might want to check
+ # whether the library exists or not. But this is on
+ # osf3 & osf4 and I'm not really sure... Just
+ # implementing what was already the behavior.
+ newdeplibs=$deplibs
+ ;;
+ test_compile)
+ # This code stresses the "libraries are programs" paradigm to its
+ # limits. Maybe even breaks it. We compile a program, linking it
+ # against the deplibs as a proxy for the library. Then we can check
+ # whether they linked in statically or dynamically with ldd.
+ $opt_dry_run || $RM conftest.c
+ cat > conftest.c <<EOF
+ int main() { return 0; }
+EOF
+ $opt_dry_run || $RM conftest
+ if $LTCC $LTCFLAGS -o conftest conftest.c $deplibs; then
+ ldd_output=`ldd conftest`
+ for i in $deplibs; do
+ case $i in
+ -l*)
+ func_stripname -l '' "$i"
+ name=$func_stripname_result
+ if test "X$allow_libtool_libs_with_static_runtimes" = "Xyes" ; then
+ case " $predeps $postdeps " in
+ *" $i "*)
+ func_append newdeplibs " $i"
+ i=""
+ ;;
+ esac
+ fi
+ if test -n "$i" ; then
+ libname=`eval "\\$ECHO \"$libname_spec\""`
+ deplib_matches=`eval "\\$ECHO \"$library_names_spec\""`
+ set dummy $deplib_matches; shift
+ deplib_match=$1
+ if test `expr "$ldd_output" : ".*$deplib_match"` -ne 0 ; then
+ func_append newdeplibs " $i"
+ else
+ droppeddeps=yes
+ echo
+ $ECHO "*** Warning: dynamic linker does not accept needed library $i."
+ echo "*** I have the capability to make that library automatically link in when"
+ echo "*** you link to this library. But I can only do this if you have a"
+ echo "*** shared version of the library, which I believe you do not have"
+ echo "*** because a test_compile did reveal that the linker did not use it for"
+ echo "*** its dynamic dependency list that programs get resolved with at runtime."
+ fi
+ fi
+ ;;
+ *)
+ func_append newdeplibs " $i"
+ ;;
+ esac
+ done
+ else
+ # Error occurred in the first compile. Let's try to salvage
+ # the situation: Compile a separate program for each library.
+ for i in $deplibs; do
+ case $i in
+ -l*)
+ func_stripname -l '' "$i"
+ name=$func_stripname_result
+ $opt_dry_run || $RM conftest
+ if $LTCC $LTCFLAGS -o conftest conftest.c $i; then
+ ldd_output=`ldd conftest`
+ if test "X$allow_libtool_libs_with_static_runtimes" = "Xyes" ; then
+ case " $predeps $postdeps " in
+ *" $i "*)
+ func_append newdeplibs " $i"
+ i=""
+ ;;
+ esac
+ fi
+ if test -n "$i" ; then
+ libname=`eval "\\$ECHO \"$libname_spec\""`
+ deplib_matches=`eval "\\$ECHO \"$library_names_spec\""`
+ set dummy $deplib_matches; shift
+ deplib_match=$1
+ if test `expr "$ldd_output" : ".*$deplib_match"` -ne 0 ; then
+ func_append newdeplibs " $i"
+ else
+ droppeddeps=yes
+ echo
+ $ECHO "*** Warning: dynamic linker does not accept needed library $i."
+ echo "*** I have the capability to make that library automatically link in when"
+ echo "*** you link to this library. But I can only do this if you have a"
+ echo "*** shared version of the library, which you do not appear to have"
+ echo "*** because a test_compile did reveal that the linker did not use this one"
+ echo "*** as a dynamic dependency that programs can get resolved with at runtime."
+ fi
+ fi
+ else
+ droppeddeps=yes
+ echo
+ $ECHO "*** Warning! Library $i is needed by this library but I was not able to"
+ echo "*** make it link in! You will probably need to install it or some"
+ echo "*** library that it depends on before this library will be fully"
+ echo "*** functional. Installing it before continuing would be even better."
+ fi
+ ;;
+ *)
+ func_append newdeplibs " $i"
+ ;;
+ esac
+ done
+ fi
+ ;;
+ file_magic*)
+ set dummy $deplibs_check_method; shift
+ file_magic_regex=`expr "$deplibs_check_method" : "$1 \(.*\)"`
+ for a_deplib in $deplibs; do
+ case $a_deplib in
+ -l*)
+ func_stripname -l '' "$a_deplib"
+ name=$func_stripname_result
+ if test "X$allow_libtool_libs_with_static_runtimes" = "Xyes" ; then
+ case " $predeps $postdeps " in
+ *" $a_deplib "*)
+ func_append newdeplibs " $a_deplib"
+ a_deplib=""
+ ;;
+ esac
+ fi
+ if test -n "$a_deplib" ; then
+ libname=`eval "\\$ECHO \"$libname_spec\""`
+ if test -n "$file_magic_glob"; then
+ libnameglob=`func_echo_all "$libname" | $SED -e $file_magic_glob`
+ else
+ libnameglob=$libname
+ fi
+ test "$want_nocaseglob" = yes && nocaseglob=`shopt -p nocaseglob`
+ for i in $lib_search_path $sys_lib_search_path $shlib_search_path; do
+ if test "$want_nocaseglob" = yes; then
+ shopt -s nocaseglob
+ potential_libs=`ls $i/$libnameglob[.-]* 2>/dev/null`
+ $nocaseglob
+ else
+ potential_libs=`ls $i/$libnameglob[.-]* 2>/dev/null`
+ fi
+ for potent_lib in $potential_libs; do
+ # Follow soft links.
+ if ls -lLd "$potent_lib" 2>/dev/null |
+ $GREP " -> " >/dev/null; then
+ continue
+ fi
+ # The statement above tries to avoid entering an
+ # endless loop below, in case of cyclic links.
+ # We might still enter an endless loop, since a link
+ # loop can be closed while we follow links,
+ # but so what?
+ potlib="$potent_lib"
+ while test -h "$potlib" 2>/dev/null; do
+ potliblink=`ls -ld $potlib | ${SED} 's/.* -> //'`
+ case $potliblink in
+ [\\/]* | [A-Za-z]:[\\/]*) potlib="$potliblink";;
+ *) potlib=`$ECHO "$potlib" | $SED 's,[^/]*$,,'`"$potliblink";;
+ esac
+ done
+ if eval $file_magic_cmd \"\$potlib\" 2>/dev/null |
+ $SED -e 10q |
+ $EGREP "$file_magic_regex" > /dev/null; then
+ func_append newdeplibs " $a_deplib"
+ a_deplib=""
+ break 2
+ fi
+ done
+ done
+ fi
+ if test -n "$a_deplib" ; then
+ droppeddeps=yes
+ echo
+ $ECHO "*** Warning: linker path does not have real file for library $a_deplib."
+ echo "*** I have the capability to make that library automatically link in when"
+ echo "*** you link to this library. But I can only do this if you have a"
+ echo "*** shared version of the library, which you do not appear to have"
+ echo "*** because I did check the linker path looking for a file starting"
+ if test -z "$potlib" ; then
+ $ECHO "*** with $libname but no candidates were found. (...for file magic test)"
+ else
+ $ECHO "*** with $libname and none of the candidates passed a file format test"
+ $ECHO "*** using a file magic. Last file checked: $potlib"
+ fi
+ fi
+ ;;
+ *)
+ # Add a -L argument.
+ func_append newdeplibs " $a_deplib"
+ ;;
+ esac
+ done # Gone through all deplibs.
+ ;;
+ match_pattern*)
+ set dummy $deplibs_check_method; shift
+ match_pattern_regex=`expr "$deplibs_check_method" : "$1 \(.*\)"`
+ for a_deplib in $deplibs; do
+ case $a_deplib in
+ -l*)
+ func_stripname -l '' "$a_deplib"
+ name=$func_stripname_result
+ if test "X$allow_libtool_libs_with_static_runtimes" = "Xyes" ; then
+ case " $predeps $postdeps " in
+ *" $a_deplib "*)
+ func_append newdeplibs " $a_deplib"
+ a_deplib=""
+ ;;
+ esac
+ fi
+ if test -n "$a_deplib" ; then
+ libname=`eval "\\$ECHO \"$libname_spec\""`
+ for i in $lib_search_path $sys_lib_search_path $shlib_search_path; do
+ potential_libs=`ls $i/$libname[.-]* 2>/dev/null`
+ for potent_lib in $potential_libs; do
+ potlib="$potent_lib" # see symlink-check above in file_magic test
+ if eval "\$ECHO \"$potent_lib\"" 2>/dev/null | $SED 10q | \
+ $EGREP "$match_pattern_regex" > /dev/null; then
+ func_append newdeplibs " $a_deplib"
+ a_deplib=""
+ break 2
+ fi
+ done
+ done
+ fi
+ if test -n "$a_deplib" ; then
+ droppeddeps=yes
+ echo
+ $ECHO "*** Warning: linker path does not have real file for library $a_deplib."
+ echo "*** I have the capability to make that library automatically link in when"
+ echo "*** you link to this library. But I can only do this if you have a"
+ echo "*** shared version of the library, which you do not appear to have"
+ echo "*** because I did check the linker path looking for a file starting"
+ if test -z "$potlib" ; then
+ $ECHO "*** with $libname but no candidates were found. (...for regex pattern test)"
+ else
+ $ECHO "*** with $libname and none of the candidates passed a file format test"
+ $ECHO "*** using a regex pattern. Last file checked: $potlib"
+ fi
+ fi
+ ;;
+ *)
+ # Add a -L argument.
+ func_append newdeplibs " $a_deplib"
+ ;;
+ esac
+ done # Gone through all deplibs.
+ ;;
+ none | unknown | *)
+ newdeplibs=""
+ tmp_deplibs=`$ECHO " $deplibs" | $SED 's/ -lc$//; s/ -[LR][^ ]*//g'`
+ if test "X$allow_libtool_libs_with_static_runtimes" = "Xyes" ; then
+ for i in $predeps $postdeps ; do
+ # can't use Xsed below, because $i might contain '/'
+ tmp_deplibs=`$ECHO " $tmp_deplibs" | $SED "s,$i,,"`
+ done
+ fi
+ case $tmp_deplibs in
+ *[!\ \ ]*)
+ echo
+ if test "X$deplibs_check_method" = "Xnone"; then
+ echo "*** Warning: inter-library dependencies are not supported in this platform."
+ else
+ echo "*** Warning: inter-library dependencies are not known to be supported."
+ fi
+ echo "*** All declared inter-library dependencies are being dropped."
+ droppeddeps=yes
+ ;;
+ esac
+ ;;
+ esac
+ versuffix=$versuffix_save
+ major=$major_save
+ release=$release_save
+ libname=$libname_save
+ name=$name_save
+
+ case $host in
+ *-*-rhapsody* | *-*-darwin1.[012])
+ # On Rhapsody replace the C library with the System framework
+ newdeplibs=`$ECHO " $newdeplibs" | $SED 's/ -lc / System.ltframework /'`
+ ;;
+ esac
+
+ if test "$droppeddeps" = yes; then
+ if test "$module" = yes; then
+ echo
+ echo "*** Warning: libtool could not satisfy all declared inter-library"
+ $ECHO "*** dependencies of module $libname. Therefore, libtool will create"
+ echo "*** a static module, that should work as long as the dlopening"
+ echo "*** application is linked with the -dlopen flag."
+ if test -z "$global_symbol_pipe"; then
+ echo
+ echo "*** However, this would only work if libtool was able to extract symbol"
+ echo "*** lists from a program, using \`nm' or equivalent, but libtool could"
+ echo "*** not find such a program. So, this module is probably useless."
+ echo "*** \`nm' from GNU binutils and a full rebuild may help."
+ fi
+ if test "$build_old_libs" = no; then
+ oldlibs="$output_objdir/$libname.$libext"
+ build_libtool_libs=module
+ build_old_libs=yes
+ else
+ build_libtool_libs=no
+ fi
+ else
+ echo "*** The inter-library dependencies that have been dropped here will be"
+ echo "*** automatically added whenever a program is linked with this library"
+ echo "*** or is declared to -dlopen it."
+
+ if test "$allow_undefined" = no; then
+ echo
+ echo "*** Since this library must not contain undefined symbols,"
+ echo "*** because either the platform does not support them or"
+ echo "*** it was explicitly requested with -no-undefined,"
+ echo "*** libtool will only create a static version of it."
+ if test "$build_old_libs" = no; then
+ oldlibs="$output_objdir/$libname.$libext"
+ build_libtool_libs=module
+ build_old_libs=yes
+ else
+ build_libtool_libs=no
+ fi
+ fi
+ fi
+ fi
+ # Done checking deplibs!
+ deplibs=$newdeplibs
+ fi
+ # Time to change all our "foo.ltframework" stuff back to "-framework foo"
+ case $host in
+ *-*-darwin*)
+ newdeplibs=`$ECHO " $newdeplibs" | $SED 's% \([^ $]*\).ltframework% -framework \1%g'`
+ new_inherited_linker_flags=`$ECHO " $new_inherited_linker_flags" | $SED 's% \([^ $]*\).ltframework% -framework \1%g'`
+ deplibs=`$ECHO " $deplibs" | $SED 's% \([^ $]*\).ltframework% -framework \1%g'`
+ ;;
+ esac
+
+ # move library search paths that coincide with paths to not yet
+ # installed libraries to the beginning of the library search list
+ new_libs=
+ for path in $notinst_path; do
+ case " $new_libs " in
+ *" -L$path/$objdir "*) ;;
+ *)
+ case " $deplibs " in
+ *" -L$path/$objdir "*)
+ func_append new_libs " -L$path/$objdir" ;;
+ esac
+ ;;
+ esac
+ done
+ for deplib in $deplibs; do
+ case $deplib in
+ -L*)
+ case " $new_libs " in
+ *" $deplib "*) ;;
+ *) func_append new_libs " $deplib" ;;
+ esac
+ ;;
+ *) func_append new_libs " $deplib" ;;
+ esac
+ done
+ deplibs="$new_libs"
+
+ # All the library-specific variables (install_libdir is set above).
+ library_names=
+ old_library=
+ dlname=
+
+ # Test again, we may have decided not to build it any more
+ if test "$build_libtool_libs" = yes; then
+ if test "$hardcode_into_libs" = yes; then
+ # Hardcode the library paths
+ hardcode_libdirs=
+ dep_rpath=
+ rpath="$finalize_rpath"
+ test "$opt_mode" != relink && rpath="$compile_rpath$rpath"
+ for libdir in $rpath; do
+ if test -n "$hardcode_libdir_flag_spec"; then
+ if test -n "$hardcode_libdir_separator"; then
+ func_replace_sysroot "$libdir"
+ libdir=$func_replace_sysroot_result
+ if test -z "$hardcode_libdirs"; then
+ hardcode_libdirs="$libdir"
+ else
+ # Just accumulate the unique libdirs.
+ case $hardcode_libdir_separator$hardcode_libdirs$hardcode_libdir_separator in
+ *"$hardcode_libdir_separator$libdir$hardcode_libdir_separator"*)
+ ;;
+ *)
+ func_append hardcode_libdirs "$hardcode_libdir_separator$libdir"
+ ;;
+ esac
+ fi
+ else
+ eval flag=\"$hardcode_libdir_flag_spec\"
+ func_append dep_rpath " $flag"
+ fi
+ elif test -n "$runpath_var"; then
+ case "$perm_rpath " in
+ *" $libdir "*) ;;
+ *) func_apped perm_rpath " $libdir" ;;
+ esac
+ fi
+ done
+ # Substitute the hardcoded libdirs into the rpath.
+ if test -n "$hardcode_libdir_separator" &&
+ test -n "$hardcode_libdirs"; then
+ libdir="$hardcode_libdirs"
+ if test -n "$hardcode_libdir_flag_spec_ld"; then
+ eval dep_rpath=\"$hardcode_libdir_flag_spec_ld\"
+ else
+ eval dep_rpath=\"$hardcode_libdir_flag_spec\"
+ fi
+ fi
+ if test -n "$runpath_var" && test -n "$perm_rpath"; then
+ # We should set the runpath_var.
+ rpath=
+ for dir in $perm_rpath; do
+ func_append rpath "$dir:"
+ done
+ eval "$runpath_var='$rpath\$$runpath_var'; export $runpath_var"
+ fi
+ test -n "$dep_rpath" && deplibs="$dep_rpath $deplibs"
+ fi
+
+ shlibpath="$finalize_shlibpath"
+ test "$opt_mode" != relink && shlibpath="$compile_shlibpath$shlibpath"
+ if test -n "$shlibpath"; then
+ eval "$shlibpath_var='$shlibpath\$$shlibpath_var'; export $shlibpath_var"
+ fi
+
+ # Get the real and link names of the library.
+ eval shared_ext=\"$shrext_cmds\"
+ eval library_names=\"$library_names_spec\"
+ set dummy $library_names
+ shift
+ realname="$1"
+ shift
+
+ if test -n "$soname_spec"; then
+ eval soname=\"$soname_spec\"
+ else
+ soname="$realname"
+ fi
+ if test -z "$dlname"; then
+ dlname=$soname
+ fi
+
+ lib="$output_objdir/$realname"
+ linknames=
+ for link
+ do
+ func_append linknames " $link"
+ done
+
+ # Use standard objects if they are pic
+ test -z "$pic_flag" && libobjs=`$ECHO "$libobjs" | $SP2NL | $SED "$lo2o" | $NL2SP`
+ test "X$libobjs" = "X " && libobjs=
+
+ delfiles=
+ if test -n "$export_symbols" && test -n "$include_expsyms"; then
+ $opt_dry_run || cp "$export_symbols" "$output_objdir/$libname.uexp"
+ export_symbols="$output_objdir/$libname.uexp"
+ func_append delfiles " $export_symbols"
+ fi
+
+ orig_export_symbols=
+ case $host_os in
+ cygwin* | mingw* | cegcc*)
+ if test -n "$export_symbols" && test -z "$export_symbols_regex"; then
+ # exporting using user supplied symfile
+ if test "x`$SED 1q $export_symbols`" != xEXPORTS; then
+ # and it's NOT already a .def file. Must figure out
+ # which of the given symbols are data symbols and tag
+ # them as such. So, trigger use of export_symbols_cmds.
+ # export_symbols gets reassigned inside the "prepare
+ # the list of exported symbols" if statement, so the
+ # include_expsyms logic still works.
+ orig_export_symbols="$export_symbols"
+ export_symbols=
+ always_export_symbols=yes
+ fi
+ fi
+ ;;
+ esac
+
+ # Prepare the list of exported symbols
+ if test -z "$export_symbols"; then
+ if test "$always_export_symbols" = yes || test -n "$export_symbols_regex"; then
+ func_verbose "generating symbol list for \`$libname.la'"
+ export_symbols="$output_objdir/$libname.exp"
+ $opt_dry_run || $RM $export_symbols
+ cmds=$export_symbols_cmds
+ save_ifs="$IFS"; IFS='~'
+ for cmd1 in $cmds; do
+ IFS="$save_ifs"
+ # Take the normal branch if the nm_file_list_spec branch
+ # doesn't work or if tool conversion is not needed.
+ case $nm_file_list_spec~$to_tool_file_cmd in
+ *~func_convert_file_noop | *~func_convert_file_msys_to_w32 | ~*)
+ try_normal_branch=yes
+ eval cmd=\"$cmd1\"
+ func_len " $cmd"
+ len=$func_len_result
+ ;;
+ *)
+ try_normal_branch=no
+ ;;
+ esac
+ if test "$try_normal_branch" = yes \
+ && { test "$len" -lt "$max_cmd_len" \
+ || test "$max_cmd_len" -le -1; }
+ then
+ func_show_eval "$cmd" 'exit $?'
+ skipped_export=false
+ elif test -n "$nm_file_list_spec"; then
+ func_basename "$output"
+ output_la=$func_basename_result
+ save_libobjs=$libobjs
+ save_output=$output
+ output=${output_objdir}/${output_la}.nm
+ func_to_tool_file "$output"
+ libobjs=$nm_file_list_spec$func_to_tool_file_result
+ func_append delfiles " $output"
+ func_verbose "creating $NM input file list: $output"
+ for obj in $save_libobjs; do
+ func_to_tool_file "$obj"
+ $ECHO "$func_to_tool_file_result"
+ done > "$output"
+ eval cmd=\"$cmd1\"
+ func_show_eval "$cmd" 'exit $?'
+ output=$save_output
+ libobjs=$save_libobjs
+ skipped_export=false
+ else
+ # The command line is too long to execute in one step.
+ func_verbose "using reloadable object file for export list..."
+ skipped_export=:
+ # Break out early, otherwise skipped_export may be
+ # set to false by a later but shorter cmd.
+ break
+ fi
+ done
+ IFS="$save_ifs"
+ if test -n "$export_symbols_regex" && test "X$skipped_export" != "X:"; then
+ func_show_eval '$EGREP -e "$export_symbols_regex" "$export_symbols" > "${export_symbols}T"'
+ func_show_eval '$MV "${export_symbols}T" "$export_symbols"'
+ fi
+ fi
+ fi
+
+ if test -n "$export_symbols" && test -n "$include_expsyms"; then
+ tmp_export_symbols="$export_symbols"
+ test -n "$orig_export_symbols" && tmp_export_symbols="$orig_export_symbols"
+ $opt_dry_run || eval '$ECHO "$include_expsyms" | $SP2NL >> "$tmp_export_symbols"'
+ fi
+
+ if test "X$skipped_export" != "X:" && test -n "$orig_export_symbols"; then
+ # The given exports_symbols file has to be filtered, so filter it.
+ func_verbose "filter symbol list for \`$libname.la' to tag DATA exports"
+ # FIXME: $output_objdir/$libname.filter potentially contains lots of
+ # 's' commands which not all seds can handle. GNU sed should be fine
+ # though. Also, the filter scales superlinearly with the number of
+ # global variables. join(1) would be nice here, but unfortunately
+ # isn't a blessed tool.
+ $opt_dry_run || $SED -e '/[ ,]DATA/!d;s,\(.*\)\([ \,].*\),s|^\1$|\1\2|,' < $export_symbols > $output_objdir/$libname.filter
+ func_append delfiles " $export_symbols $output_objdir/$libname.filter"
+ export_symbols=$output_objdir/$libname.def
+ $opt_dry_run || $SED -f $output_objdir/$libname.filter < $orig_export_symbols > $export_symbols
+ fi
+
+ tmp_deplibs=
+ for test_deplib in $deplibs; do
+ case " $convenience " in
+ *" $test_deplib "*) ;;
+ *)
+ func_append tmp_deplibs " $test_deplib"
+ ;;
+ esac
+ done
+ deplibs="$tmp_deplibs"
+
+ if test -n "$convenience"; then
+ if test -n "$whole_archive_flag_spec" &&
+ test "$compiler_needs_object" = yes &&
+ test -z "$libobjs"; then
+ # extract the archives, so we have objects to list.
+ # TODO: could optimize this to just extract one archive.
+ whole_archive_flag_spec=
+ fi
+ if test -n "$whole_archive_flag_spec"; then
+ save_libobjs=$libobjs
+ eval libobjs=\"\$libobjs $whole_archive_flag_spec\"
+ test "X$libobjs" = "X " && libobjs=
+ else
+ gentop="$output_objdir/${outputname}x"
+ func_append generated " $gentop"
+
+ func_extract_archives $gentop $convenience
+ func_append libobjs " $func_extract_archives_result"
+ test "X$libobjs" = "X " && libobjs=
+ fi
+ fi
+
+ if test "$thread_safe" = yes && test -n "$thread_safe_flag_spec"; then
+ eval flag=\"$thread_safe_flag_spec\"
+ func_append linker_flags " $flag"
+ fi
+
+ # Make a backup of the uninstalled library when relinking
+ if test "$opt_mode" = relink; then
+ $opt_dry_run || eval '(cd $output_objdir && $RM ${realname}U && $MV $realname ${realname}U)' || exit $?
+ fi
+
+ # Do each of the archive commands.
+ if test "$module" = yes && test -n "$module_cmds" ; then
+ if test -n "$export_symbols" && test -n "$module_expsym_cmds"; then
+ eval test_cmds=\"$module_expsym_cmds\"
+ cmds=$module_expsym_cmds
+ else
+ eval test_cmds=\"$module_cmds\"
+ cmds=$module_cmds
+ fi
+ else
+ if test -n "$export_symbols" && test -n "$archive_expsym_cmds"; then
+ eval test_cmds=\"$archive_expsym_cmds\"
+ cmds=$archive_expsym_cmds
+ else
+ eval test_cmds=\"$archive_cmds\"
+ cmds=$archive_cmds
+ fi
+ fi
+
+ if test "X$skipped_export" != "X:" &&
+ func_len " $test_cmds" &&
+ len=$func_len_result &&
+ test "$len" -lt "$max_cmd_len" || test "$max_cmd_len" -le -1; then
+ :
+ else
+ # The command line is too long to link in one step, link piecewise
+ # or, if using GNU ld and skipped_export is not :, use a linker
+ # script.
+
+ # Save the value of $output and $libobjs because we want to
+ # use them later. If we have whole_archive_flag_spec, we
+ # want to use save_libobjs as it was before
+ # whole_archive_flag_spec was expanded, because we can't
+ # assume the linker understands whole_archive_flag_spec.
+ # This may have to be revisited, in case too many
+ # convenience libraries get linked in and end up exceeding
+ # the spec.
+ if test -z "$convenience" || test -z "$whole_archive_flag_spec"; then
+ save_libobjs=$libobjs
+ fi
+ save_output=$output
+ func_basename "$output"
+ output_la=$func_basename_result
+
+ # Clear the reloadable object creation command queue and
+ # initialize k to one.
+ test_cmds=
+ concat_cmds=
+ objlist=
+ last_robj=
+ k=1
+
+ if test -n "$save_libobjs" && test "X$skipped_export" != "X:" && test "$with_gnu_ld" = yes; then
+ output=${output_objdir}/${output_la}.lnkscript
+ func_verbose "creating GNU ld script: $output"
+ echo 'INPUT (' > $output
+ for obj in $save_libobjs
+ do
+ func_to_tool_file "$obj"
+ $ECHO "$func_to_tool_file_result" >> $output
+ done
+ echo ')' >> $output
+ func_append delfiles " $output"
+ func_to_tool_file "$output"
+ output=$func_to_tool_file_result
+ elif test -n "$save_libobjs" && test "X$skipped_export" != "X:" && test "X$file_list_spec" != X; then
+ output=${output_objdir}/${output_la}.lnk
+ func_verbose "creating linker input file list: $output"
+ : > $output
+ set x $save_libobjs
+ shift
+ firstobj=
+ if test "$compiler_needs_object" = yes; then
+ firstobj="$1 "
+ shift
+ fi
+ for obj
+ do
+ func_to_tool_file "$obj"
+ $ECHO "$func_to_tool_file_result" >> $output
+ done
+ func_append delfiles " $output"
+ func_to_tool_file "$output"
+ output=$firstobj\"$file_list_spec$func_to_tool_file_result\"
+ else
+ if test -n "$save_libobjs"; then
+ func_verbose "creating reloadable object files..."
+ output=$output_objdir/$output_la-${k}.$objext
+ eval test_cmds=\"$reload_cmds\"
+ func_len " $test_cmds"
+ len0=$func_len_result
+ len=$len0
+
+ # Loop over the list of objects to be linked.
+ for obj in $save_libobjs
+ do
+ func_len " $obj"
+ func_arith $len + $func_len_result
+ len=$func_arith_result
+ if test "X$objlist" = X ||
+ test "$len" -lt "$max_cmd_len"; then
+ func_append objlist " $obj"
+ else
+ # The command $test_cmds is almost too long, add a
+ # command to the queue.
+ if test "$k" -eq 1 ; then
+ # The first file doesn't have a previous command to add.
+ reload_objs=$objlist
+ eval concat_cmds=\"$reload_cmds\"
+ else
+ # All subsequent reloadable object files will link in
+ # the last one created.
+ reload_objs="$objlist $last_robj"
+ eval concat_cmds=\"\$concat_cmds~$reload_cmds~\$RM $last_robj\"
+ fi
+ last_robj=$output_objdir/$output_la-${k}.$objext
+ func_arith $k + 1
+ k=$func_arith_result
+ output=$output_objdir/$output_la-${k}.$objext
+ objlist=" $obj"
+ func_len " $last_robj"
+ func_arith $len0 + $func_len_result
+ len=$func_arith_result
+ fi
+ done
+ # Handle the remaining objects by creating one last
+ # reloadable object file. All subsequent reloadable object
+ # files will link in the last one created.
+ test -z "$concat_cmds" || concat_cmds=$concat_cmds~
+ reload_objs="$objlist $last_robj"
+ eval concat_cmds=\"\${concat_cmds}$reload_cmds\"
+ if test -n "$last_robj"; then
+ eval concat_cmds=\"\${concat_cmds}~\$RM $last_robj\"
+ fi
+ func_append delfiles " $output"
+
+ else
+ output=
+ fi
+
+ if ${skipped_export-false}; then
+ func_verbose "generating symbol list for \`$libname.la'"
+ export_symbols="$output_objdir/$libname.exp"
+ $opt_dry_run || $RM $export_symbols
+ libobjs=$output
+ # Append the command to create the export file.
+ test -z "$concat_cmds" || concat_cmds=$concat_cmds~
+ eval concat_cmds=\"\$concat_cmds$export_symbols_cmds\"
+ if test -n "$last_robj"; then
+ eval concat_cmds=\"\$concat_cmds~\$RM $last_robj\"
+ fi
+ fi
+
+ test -n "$save_libobjs" &&
+ func_verbose "creating a temporary reloadable object file: $output"
+
+ # Loop through the commands generated above and execute them.
+ save_ifs="$IFS"; IFS='~'
+ for cmd in $concat_cmds; do
+ IFS="$save_ifs"
+ $opt_silent || {
+ func_quote_for_expand "$cmd"
+ eval "func_echo $func_quote_for_expand_result"
+ }
+ $opt_dry_run || eval "$cmd" || {
+ lt_exit=$?
+
+ # Restore the uninstalled library and exit
+ if test "$opt_mode" = relink; then
+ ( cd "$output_objdir" && \
+ $RM "${realname}T" && \
+ $MV "${realname}U" "$realname" )
+ fi
+
+ exit $lt_exit
+ }
+ done
+ IFS="$save_ifs"
+
+ if test -n "$export_symbols_regex" && ${skipped_export-false}; then
+ func_show_eval '$EGREP -e "$export_symbols_regex" "$export_symbols" > "${export_symbols}T"'
+ func_show_eval '$MV "${export_symbols}T" "$export_symbols"'
+ fi
+ fi
+
+ if ${skipped_export-false}; then
+ if test -n "$export_symbols" && test -n "$include_expsyms"; then
+ tmp_export_symbols="$export_symbols"
+ test -n "$orig_export_symbols" && tmp_export_symbols="$orig_export_symbols"
+ $opt_dry_run || eval '$ECHO "$include_expsyms" | $SP2NL >> "$tmp_export_symbols"'
+ fi
+
+ if test -n "$orig_export_symbols"; then
+ # The given exports_symbols file has to be filtered, so filter it.
+ func_verbose "filter symbol list for \`$libname.la' to tag DATA exports"
+ # FIXME: $output_objdir/$libname.filter potentially contains lots of
+ # 's' commands which not all seds can handle. GNU sed should be fine
+ # though. Also, the filter scales superlinearly with the number of
+ # global variables. join(1) would be nice here, but unfortunately
+ # isn't a blessed tool.
+ $opt_dry_run || $SED -e '/[ ,]DATA/!d;s,\(.*\)\([ \,].*\),s|^\1$|\1\2|,' < $export_symbols > $output_objdir/$libname.filter
+ func_append delfiles " $export_symbols $output_objdir/$libname.filter"
+ export_symbols=$output_objdir/$libname.def
+ $opt_dry_run || $SED -f $output_objdir/$libname.filter < $orig_export_symbols > $export_symbols
+ fi
+ fi
+
+ libobjs=$output
+ # Restore the value of output.
+ output=$save_output
+
+ if test -n "$convenience" && test -n "$whole_archive_flag_spec"; then
+ eval libobjs=\"\$libobjs $whole_archive_flag_spec\"
+ test "X$libobjs" = "X " && libobjs=
+ fi
+ # Expand the library linking commands again to reset the
+ # value of $libobjs for piecewise linking.
+
+ # Do each of the archive commands.
+ if test "$module" = yes && test -n "$module_cmds" ; then
+ if test -n "$export_symbols" && test -n "$module_expsym_cmds"; then
+ cmds=$module_expsym_cmds
+ else
+ cmds=$module_cmds
+ fi
+ else
+ if test -n "$export_symbols" && test -n "$archive_expsym_cmds"; then
+ cmds=$archive_expsym_cmds
+ else
+ cmds=$archive_cmds
+ fi
+ fi
+ fi
+
+ if test -n "$delfiles"; then
+ # Append the command to remove temporary files to $cmds.
+ eval cmds=\"\$cmds~\$RM $delfiles\"
+ fi
+
+ # Add any objects from preloaded convenience libraries
+ if test -n "$dlprefiles"; then
+ gentop="$output_objdir/${outputname}x"
+ func_append generated " $gentop"
+
+ func_extract_archives $gentop $dlprefiles
+ func_append libobjs " $func_extract_archives_result"
+ test "X$libobjs" = "X " && libobjs=
+ fi
+
+ save_ifs="$IFS"; IFS='~'
+ for cmd in $cmds; do
+ IFS="$save_ifs"
+ eval cmd=\"$cmd\"
+ $opt_silent || {
+ func_quote_for_expand "$cmd"
+ eval "func_echo $func_quote_for_expand_result"
+ }
+ $opt_dry_run || eval "$cmd" || {
+ lt_exit=$?
+
+ # Restore the uninstalled library and exit
+ if test "$opt_mode" = relink; then
+ ( cd "$output_objdir" && \
+ $RM "${realname}T" && \
+ $MV "${realname}U" "$realname" )
+ fi
+
+ exit $lt_exit
+ }
+ done
+ IFS="$save_ifs"
+
+ # Restore the uninstalled library and exit
+ if test "$opt_mode" = relink; then
+ $opt_dry_run || eval '(cd $output_objdir && $RM ${realname}T && $MV $realname ${realname}T && $MV ${realname}U $realname)' || exit $?
+
+ if test -n "$convenience"; then
+ if test -z "$whole_archive_flag_spec"; then
+ func_show_eval '${RM}r "$gentop"'
+ fi
+ fi
+
+ exit $EXIT_SUCCESS
+ fi
+
+ # Create links to the real library.
+ for linkname in $linknames; do
+ if test "$realname" != "$linkname"; then
+ func_show_eval '(cd "$output_objdir" && $RM "$linkname" && $LN_S "$realname" "$linkname")' 'exit $?'
+ fi
+ done
+
+ # If -module or -export-dynamic was specified, set the dlname.
+ if test "$module" = yes || test "$export_dynamic" = yes; then
+ # On all known operating systems, these are identical.
+ dlname="$soname"
+ fi
+ fi
+ ;;
+
+ obj)
+ if test -n "$dlfiles$dlprefiles" || test "$dlself" != no; then
+ func_warning "\`-dlopen' is ignored for objects"
+ fi
+
+ case " $deplibs" in
+ *\ -l* | *\ -L*)
+ func_warning "\`-l' and \`-L' are ignored for objects" ;;
+ esac
+
+ test -n "$rpath" && \
+ func_warning "\`-rpath' is ignored for objects"
+
+ test -n "$xrpath" && \
+ func_warning "\`-R' is ignored for objects"
+
+ test -n "$vinfo" && \
+ func_warning "\`-version-info' is ignored for objects"
+
+ test -n "$release" && \
+ func_warning "\`-release' is ignored for objects"
+
+ case $output in
+ *.lo)
+ test -n "$objs$old_deplibs" && \
+ func_fatal_error "cannot build library object \`$output' from non-libtool objects"
+
+ libobj=$output
+ func_lo2o "$libobj"
+ obj=$func_lo2o_result
+ ;;
+ *)
+ libobj=
+ obj="$output"
+ ;;
+ esac
+
+ # Delete the old objects.
+ $opt_dry_run || $RM $obj $libobj
+
+ # Objects from convenience libraries. This assumes
+ # single-version convenience libraries. Whenever we create
+ # different ones for PIC/non-PIC, this we'll have to duplicate
+ # the extraction.
+ reload_conv_objs=
+ gentop=
+ # reload_cmds runs $LD directly, so let us get rid of
+ # -Wl from whole_archive_flag_spec and hope we can get by with
+ # turning comma into space..
+ wl=
+
+ if test -n "$convenience"; then
+ if test -n "$whole_archive_flag_spec"; then
+ eval tmp_whole_archive_flags=\"$whole_archive_flag_spec\"
+ reload_conv_objs=$reload_objs\ `$ECHO "$tmp_whole_archive_flags" | $SED 's|,| |g'`
+ else
+ gentop="$output_objdir/${obj}x"
+ func_append generated " $gentop"
+
+ func_extract_archives $gentop $convenience
+ reload_conv_objs="$reload_objs $func_extract_archives_result"
+ fi
+ fi
+
+ # If we're not building shared, we need to use non_pic_objs
+ test "$build_libtool_libs" != yes && libobjs="$non_pic_objects"
+
+ # Create the old-style object.
+ reload_objs="$objs$old_deplibs "`$ECHO "$libobjs" | $SP2NL | $SED "/\.${libext}$/d; /\.lib$/d; $lo2o" | $NL2SP`" $reload_conv_objs" ### testsuite: skip nested quoting test
+
+ output="$obj"
+ func_execute_cmds "$reload_cmds" 'exit $?'
+
+ # Exit if we aren't doing a library object file.
+ if test -z "$libobj"; then
+ if test -n "$gentop"; then
+ func_show_eval '${RM}r "$gentop"'
+ fi
+
+ exit $EXIT_SUCCESS
+ fi
+
+ if test "$build_libtool_libs" != yes; then
+ if test -n "$gentop"; then
+ func_show_eval '${RM}r "$gentop"'
+ fi
+
+ # Create an invalid libtool object if no PIC, so that we don't
+ # accidentally link it into a program.
+ # $show "echo timestamp > $libobj"
+ # $opt_dry_run || eval "echo timestamp > $libobj" || exit $?
+ exit $EXIT_SUCCESS
+ fi
+
+ if test -n "$pic_flag" || test "$pic_mode" != default; then
+ # Only do commands if we really have different PIC objects.
+ reload_objs="$libobjs $reload_conv_objs"
+ output="$libobj"
+ func_execute_cmds "$reload_cmds" 'exit $?'
+ fi
+
+ if test -n "$gentop"; then
+ func_show_eval '${RM}r "$gentop"'
+ fi
+
+ exit $EXIT_SUCCESS
+ ;;
+
+ prog)
+ case $host in
+ *cygwin*) func_stripname '' '.exe' "$output"
+ output=$func_stripname_result.exe;;
+ esac
+ test -n "$vinfo" && \
+ func_warning "\`-version-info' is ignored for programs"
+
+ test -n "$release" && \
+ func_warning "\`-release' is ignored for programs"
+
+ test "$preload" = yes \
+ && test "$dlopen_support" = unknown \
+ && test "$dlopen_self" = unknown \
+ && test "$dlopen_self_static" = unknown && \
+ func_warning "\`LT_INIT([dlopen])' not used. Assuming no dlopen support."
+
+ case $host in
+ *-*-rhapsody* | *-*-darwin1.[012])
+ # On Rhapsody replace the C library is the System framework
+ compile_deplibs=`$ECHO " $compile_deplibs" | $SED 's/ -lc / System.ltframework /'`
+ finalize_deplibs=`$ECHO " $finalize_deplibs" | $SED 's/ -lc / System.ltframework /'`
+ ;;
+ esac
+
+ case $host in
+ *-*-darwin*)
+ # Don't allow lazy linking, it breaks C++ global constructors
+ # But is supposedly fixed on 10.4 or later (yay!).
+ if test "$tagname" = CXX ; then
+ case ${MACOSX_DEPLOYMENT_TARGET-10.0} in
+ 10.[0123])
+ func_append compile_command " ${wl}-bind_at_load"
+ func_append finalize_command " ${wl}-bind_at_load"
+ ;;
+ esac
+ fi
+ # Time to change all our "foo.ltframework" stuff back to "-framework foo"
+ compile_deplibs=`$ECHO " $compile_deplibs" | $SED 's% \([^ $]*\).ltframework% -framework \1%g'`
+ finalize_deplibs=`$ECHO " $finalize_deplibs" | $SED 's% \([^ $]*\).ltframework% -framework \1%g'`
+ ;;
+ esac
+
+
+ # move library search paths that coincide with paths to not yet
+ # installed libraries to the beginning of the library search list
+ new_libs=
+ for path in $notinst_path; do
+ case " $new_libs " in
+ *" -L$path/$objdir "*) ;;
+ *)
+ case " $compile_deplibs " in
+ *" -L$path/$objdir "*)
+ func_append new_libs " -L$path/$objdir" ;;
+ esac
+ ;;
+ esac
+ done
+ for deplib in $compile_deplibs; do
+ case $deplib in
+ -L*)
+ case " $new_libs " in
+ *" $deplib "*) ;;
+ *) func_append new_libs " $deplib" ;;
+ esac
+ ;;
+ *) func_append new_libs " $deplib" ;;
+ esac
+ done
+ compile_deplibs="$new_libs"
+
+
+ func_append compile_command " $compile_deplibs"
+ func_append finalize_command " $finalize_deplibs"
+
+ if test -n "$rpath$xrpath"; then
+ # If the user specified any rpath flags, then add them.
+ for libdir in $rpath $xrpath; do
+ # This is the magic to use -rpath.
+ case "$finalize_rpath " in
+ *" $libdir "*) ;;
+ *) func_append finalize_rpath " $libdir" ;;
+ esac
+ done
+ fi
+
+ # Now hardcode the library paths
+ rpath=
+ hardcode_libdirs=
+ for libdir in $compile_rpath $finalize_rpath; do
+ if test -n "$hardcode_libdir_flag_spec"; then
+ if test -n "$hardcode_libdir_separator"; then
+ if test -z "$hardcode_libdirs"; then
+ hardcode_libdirs="$libdir"
+ else
+ # Just accumulate the unique libdirs.
+ case $hardcode_libdir_separator$hardcode_libdirs$hardcode_libdir_separator in
+ *"$hardcode_libdir_separator$libdir$hardcode_libdir_separator"*)
+ ;;
+ *)
+ func_append hardcode_libdirs "$hardcode_libdir_separator$libdir"
+ ;;
+ esac
+ fi
+ else
+ eval flag=\"$hardcode_libdir_flag_spec\"
+ func_append rpath " $flag"
+ fi
+ elif test -n "$runpath_var"; then
+ case "$perm_rpath " in
+ *" $libdir "*) ;;
+ *) func_append perm_rpath " $libdir" ;;
+ esac
+ fi
+ case $host in
+ *-*-cygwin* | *-*-mingw* | *-*-pw32* | *-*-os2* | *-cegcc*)
+ testbindir=`${ECHO} "$libdir" | ${SED} -e 's*/lib$*/bin*'`
+ case :$dllsearchpath: in
+ *":$libdir:"*) ;;
+ ::) dllsearchpath=$libdir;;
+ *) func_append dllsearchpath ":$libdir";;
+ esac
+ case :$dllsearchpath: in
+ *":$testbindir:"*) ;;
+ ::) dllsearchpath=$testbindir;;
+ *) func_append dllsearchpath ":$testbindir";;
+ esac
+ ;;
+ esac
+ done
+ # Substitute the hardcoded libdirs into the rpath.
+ if test -n "$hardcode_libdir_separator" &&
+ test -n "$hardcode_libdirs"; then
+ libdir="$hardcode_libdirs"
+ eval rpath=\" $hardcode_libdir_flag_spec\"
+ fi
+ compile_rpath="$rpath"
+
+ rpath=
+ hardcode_libdirs=
+ for libdir in $finalize_rpath; do
+ if test -n "$hardcode_libdir_flag_spec"; then
+ if test -n "$hardcode_libdir_separator"; then
+ if test -z "$hardcode_libdirs"; then
+ hardcode_libdirs="$libdir"
+ else
+ # Just accumulate the unique libdirs.
+ case $hardcode_libdir_separator$hardcode_libdirs$hardcode_libdir_separator in
+ *"$hardcode_libdir_separator$libdir$hardcode_libdir_separator"*)
+ ;;
+ *)
+ func_append hardcode_libdirs "$hardcode_libdir_separator$libdir"
+ ;;
+ esac
+ fi
+ else
+ eval flag=\"$hardcode_libdir_flag_spec\"
+ func_append rpath " $flag"
+ fi
+ elif test -n "$runpath_var"; then
+ case "$finalize_perm_rpath " in
+ *" $libdir "*) ;;
+ *) func_append finalize_perm_rpath " $libdir" ;;
+ esac
+ fi
+ done
+ # Substitute the hardcoded libdirs into the rpath.
+ if test -n "$hardcode_libdir_separator" &&
+ test -n "$hardcode_libdirs"; then
+ libdir="$hardcode_libdirs"
+ eval rpath=\" $hardcode_libdir_flag_spec\"
+ fi
+ finalize_rpath="$rpath"
+
+ if test -n "$libobjs" && test "$build_old_libs" = yes; then
+ # Transform all the library objects into standard objects.
+ compile_command=`$ECHO "$compile_command" | $SP2NL | $SED "$lo2o" | $NL2SP`
+ finalize_command=`$ECHO "$finalize_command" | $SP2NL | $SED "$lo2o" | $NL2SP`
+ fi
+
+ func_generate_dlsyms "$outputname" "@PROGRAM@" "no"
+
+ # template prelinking step
+ if test -n "$prelink_cmds"; then
+ func_execute_cmds "$prelink_cmds" 'exit $?'
+ fi
+
+ wrappers_required=yes
+ case $host in
+ *cegcc* | *mingw32ce*)
+ # Disable wrappers for cegcc and mingw32ce hosts, we are cross compiling anyway.
+ wrappers_required=no
+ ;;
+ *cygwin* | *mingw* )
+ if test "$build_libtool_libs" != yes; then
+ wrappers_required=no
+ fi
+ ;;
+ *)
+ if test "$need_relink" = no || test "$build_libtool_libs" != yes; then
+ wrappers_required=no
+ fi
+ ;;
+ esac
+ if test "$wrappers_required" = no; then
+ # Replace the output file specification.
+ compile_command=`$ECHO "$compile_command" | $SED 's%@OUTPUT@%'"$output"'%g'`
+ link_command="$compile_command$compile_rpath"
+
+ # We have no uninstalled library dependencies, so finalize right now.
+ exit_status=0
+ func_show_eval "$link_command" 'exit_status=$?'
+
+ if test -n "$postlink_cmds"; then
+ func_to_tool_file "$output"
+ postlink_cmds=`func_echo_all "$postlink_cmds" | $SED -e 's%@OUTPUT@%'"$output"'%g' -e 's%@TOOL_OUTPUT@%'"$func_to_tool_file_result"'%g'`
+ func_execute_cmds "$postlink_cmds" 'exit $?'
+ fi
+
+ # Delete the generated files.
+ if test -f "$output_objdir/${outputname}S.${objext}"; then
+ func_show_eval '$RM "$output_objdir/${outputname}S.${objext}"'
+ fi
+
+ exit $exit_status
+ fi
+
+ if test -n "$compile_shlibpath$finalize_shlibpath"; then
+ compile_command="$shlibpath_var=\"$compile_shlibpath$finalize_shlibpath\$$shlibpath_var\" $compile_command"
+ fi
+ if test -n "$finalize_shlibpath"; then
+ finalize_command="$shlibpath_var=\"$finalize_shlibpath\$$shlibpath_var\" $finalize_command"
+ fi
+
+ compile_var=
+ finalize_var=
+ if test -n "$runpath_var"; then
+ if test -n "$perm_rpath"; then
+ # We should set the runpath_var.
+ rpath=
+ for dir in $perm_rpath; do
+ func_append rpath "$dir:"
+ done
+ compile_var="$runpath_var=\"$rpath\$$runpath_var\" "
+ fi
+ if test -n "$finalize_perm_rpath"; then
+ # We should set the runpath_var.
+ rpath=
+ for dir in $finalize_perm_rpath; do
+ func_append rpath "$dir:"
+ done
+ finalize_var="$runpath_var=\"$rpath\$$runpath_var\" "
+ fi
+ fi
+
+ if test "$no_install" = yes; then
+ # We don't need to create a wrapper script.
+ link_command="$compile_var$compile_command$compile_rpath"
+ # Replace the output file specification.
+ link_command=`$ECHO "$link_command" | $SED 's%@OUTPUT@%'"$output"'%g'`
+ # Delete the old output file.
+ $opt_dry_run || $RM $output
+ # Link the executable and exit
+ func_show_eval "$link_command" 'exit $?'
+
+ if test -n "$postlink_cmds"; then
+ func_to_tool_file "$output"
+ postlink_cmds=`func_echo_all "$postlink_cmds" | $SED -e 's%@OUTPUT@%'"$output"'%g' -e 's%@TOOL_OUTPUT@%'"$func_to_tool_file_result"'%g'`
+ func_execute_cmds "$postlink_cmds" 'exit $?'
+ fi
+
+ exit $EXIT_SUCCESS
+ fi
+
+ if test "$hardcode_action" = relink; then
+ # Fast installation is not supported
+ link_command="$compile_var$compile_command$compile_rpath"
+ relink_command="$finalize_var$finalize_command$finalize_rpath"
+
+ func_warning "this platform does not like uninstalled shared libraries"
+ func_warning "\`$output' will be relinked during installation"
+ else
+ if test "$fast_install" != no; then
+ link_command="$finalize_var$compile_command$finalize_rpath"
+ if test "$fast_install" = yes; then
+ relink_command=`$ECHO "$compile_var$compile_command$compile_rpath" | $SED 's%@OUTPUT@%\$progdir/\$file%g'`
+ else
+ # fast_install is set to needless
+ relink_command=
+ fi
+ else
+ link_command="$compile_var$compile_command$compile_rpath"
+ relink_command="$finalize_var$finalize_command$finalize_rpath"
+ fi
+ fi
+
+ # Replace the output file specification.
+ link_command=`$ECHO "$link_command" | $SED 's%@OUTPUT@%'"$output_objdir/$outputname"'%g'`
+
+ # Delete the old output files.
+ $opt_dry_run || $RM $output $output_objdir/$outputname $output_objdir/lt-$outputname
+
+ func_show_eval "$link_command" 'exit $?'
+
+ if test -n "$postlink_cmds"; then
+ func_to_tool_file "$output_objdir/$outputname"
+ postlink_cmds=`func_echo_all "$postlink_cmds" | $SED -e 's%@OUTPUT@%'"$output_objdir/$outputname"'%g' -e 's%@TOOL_OUTPUT@%'"$func_to_tool_file_result"'%g'`
+ func_execute_cmds "$postlink_cmds" 'exit $?'
+ fi
+
+ # Now create the wrapper script.
+ func_verbose "creating $output"
+
+ # Quote the relink command for shipping.
+ if test -n "$relink_command"; then
+ # Preserve any variables that may affect compiler behavior
+ for var in $variables_saved_for_relink; do
+ if eval test -z \"\${$var+set}\"; then
+ relink_command="{ test -z \"\${$var+set}\" || $lt_unset $var || { $var=; export $var; }; }; $relink_command"
+ elif eval var_value=\$$var; test -z "$var_value"; then
+ relink_command="$var=; export $var; $relink_command"
+ else
+ func_quote_for_eval "$var_value"
+ relink_command="$var=$func_quote_for_eval_result; export $var; $relink_command"
+ fi
+ done
+ relink_command="(cd `pwd`; $relink_command)"
+ relink_command=`$ECHO "$relink_command" | $SED "$sed_quote_subst"`
+ fi
+
+ # Only actually do things if not in dry run mode.
+ $opt_dry_run || {
+ # win32 will think the script is a binary if it has
+ # a .exe suffix, so we strip it off here.
+ case $output in
+ *.exe) func_stripname '' '.exe' "$output"
+ output=$func_stripname_result ;;
+ esac
+ # test for cygwin because mv fails w/o .exe extensions
+ case $host in
+ *cygwin*)
+ exeext=.exe
+ func_stripname '' '.exe' "$outputname"
+ outputname=$func_stripname_result ;;
+ *) exeext= ;;
+ esac
+ case $host in
+ *cygwin* | *mingw* )
+ func_dirname_and_basename "$output" "" "."
+ output_name=$func_basename_result
+ output_path=$func_dirname_result
+ cwrappersource="$output_path/$objdir/lt-$output_name.c"
+ cwrapper="$output_path/$output_name.exe"
+ $RM $cwrappersource $cwrapper
+ trap "$RM $cwrappersource $cwrapper; exit $EXIT_FAILURE" 1 2 15
+
+ func_emit_cwrapperexe_src > $cwrappersource
+
+ # The wrapper executable is built using the $host compiler,
+ # because it contains $host paths and files. If cross-
+ # compiling, it, like the target executable, must be
+ # executed on the $host or under an emulation environment.
+ $opt_dry_run || {
+ $LTCC $LTCFLAGS -o $cwrapper $cwrappersource
+ $STRIP $cwrapper
+ }
+
+ # Now, create the wrapper script for func_source use:
+ func_ltwrapper_scriptname $cwrapper
+ $RM $func_ltwrapper_scriptname_result
+ trap "$RM $func_ltwrapper_scriptname_result; exit $EXIT_FAILURE" 1 2 15
+ $opt_dry_run || {
+ # note: this script will not be executed, so do not chmod.
+ if test "x$build" = "x$host" ; then
+ $cwrapper --lt-dump-script > $func_ltwrapper_scriptname_result
+ else
+ func_emit_wrapper no > $func_ltwrapper_scriptname_result
+ fi
+ }
+ ;;
+ * )
+ $RM $output
+ trap "$RM $output; exit $EXIT_FAILURE" 1 2 15
+
+ func_emit_wrapper no > $output
+ chmod +x $output
+ ;;
+ esac
+ }
+ exit $EXIT_SUCCESS
+ ;;
+ esac
+
+ # See if we need to build an old-fashioned archive.
+ for oldlib in $oldlibs; do
+
+ if test "$build_libtool_libs" = convenience; then
+ oldobjs="$libobjs_save $symfileobj"
+ addlibs="$convenience"
+ build_libtool_libs=no
+ else
+ if test "$build_libtool_libs" = module; then
+ oldobjs="$libobjs_save"
+ build_libtool_libs=no
+ else
+ oldobjs="$old_deplibs $non_pic_objects"
+ if test "$preload" = yes && test -f "$symfileobj"; then
+ func_append oldobjs " $symfileobj"
+ fi
+ fi
+ addlibs="$old_convenience"
+ fi
+
+ if test -n "$addlibs"; then
+ gentop="$output_objdir/${outputname}x"
+ func_append generated " $gentop"
+
+ func_extract_archives $gentop $addlibs
+ func_append oldobjs " $func_extract_archives_result"
+ fi
+
+ # Do each command in the archive commands.
+ if test -n "$old_archive_from_new_cmds" && test "$build_libtool_libs" = yes; then
+ cmds=$old_archive_from_new_cmds
+ else
+
+ # Add any objects from preloaded convenience libraries
+ if test -n "$dlprefiles"; then
+ gentop="$output_objdir/${outputname}x"
+ func_append generated " $gentop"
+
+ func_extract_archives $gentop $dlprefiles
+ func_append oldobjs " $func_extract_archives_result"
+ fi
+
+ # POSIX demands no paths to be encoded in archives. We have
+ # to avoid creating archives with duplicate basenames if we
+ # might have to extract them afterwards, e.g., when creating a
+ # static archive out of a convenience library, or when linking
+ # the entirety of a libtool archive into another (currently
+ # not supported by libtool).
+ if (for obj in $oldobjs
+ do
+ func_basename "$obj"
+ $ECHO "$func_basename_result"
+ done | sort | sort -uc >/dev/null 2>&1); then
+ :
+ else
+ echo "copying selected object files to avoid basename conflicts..."
+ gentop="$output_objdir/${outputname}x"
+ func_append generated " $gentop"
+ func_mkdir_p "$gentop"
+ save_oldobjs=$oldobjs
+ oldobjs=
+ counter=1
+ for obj in $save_oldobjs
+ do
+ func_basename "$obj"
+ objbase="$func_basename_result"
+ case " $oldobjs " in
+ " ") oldobjs=$obj ;;
+ *[\ /]"$objbase "*)
+ while :; do
+ # Make sure we don't pick an alternate name that also
+ # overlaps.
+ newobj=lt$counter-$objbase
+ func_arith $counter + 1
+ counter=$func_arith_result
+ case " $oldobjs " in
+ *[\ /]"$newobj "*) ;;
+ *) if test ! -f "$gentop/$newobj"; then break; fi ;;
+ esac
+ done
+ func_show_eval "ln $obj $gentop/$newobj || cp $obj $gentop/$newobj"
+ func_append oldobjs " $gentop/$newobj"
+ ;;
+ *) func_append oldobjs " $obj" ;;
+ esac
+ done
+ fi
+ eval cmds=\"$old_archive_cmds\"
+
+ func_len " $cmds"
+ len=$func_len_result
+ if test "$len" -lt "$max_cmd_len" || test "$max_cmd_len" -le -1; then
+ cmds=$old_archive_cmds
+ elif test -n "$archiver_list_spec"; then
+ func_verbose "using command file archive linking..."
+ for obj in $oldobjs
+ do
+ func_to_tool_file "$obj"
+ $ECHO "$func_to_tool_file_result"
+ done > $output_objdir/$libname.libcmd
+ func_to_tool_file "$output_objdir/$libname.libcmd"
+ oldobjs=" $archiver_list_spec$func_to_tool_file_result"
+ cmds=$old_archive_cmds
+ else
+ # the command line is too long to link in one step, link in parts
+ func_verbose "using piecewise archive linking..."
+ save_RANLIB=$RANLIB
+ RANLIB=:
+ objlist=
+ concat_cmds=
+ save_oldobjs=$oldobjs
+ oldobjs=
+ # Is there a better way of finding the last object in the list?
+ for obj in $save_oldobjs
+ do
+ last_oldobj=$obj
+ done
+ eval test_cmds=\"$old_archive_cmds\"
+ func_len " $test_cmds"
+ len0=$func_len_result
+ len=$len0
+ for obj in $save_oldobjs
+ do
+ func_len " $obj"
+ func_arith $len + $func_len_result
+ len=$func_arith_result
+ func_append objlist " $obj"
+ if test "$len" -lt "$max_cmd_len"; then
+ :
+ else
+ # the above command should be used before it gets too long
+ oldobjs=$objlist
+ if test "$obj" = "$last_oldobj" ; then
+ RANLIB=$save_RANLIB
+ fi
+ test -z "$concat_cmds" || concat_cmds=$concat_cmds~
+ eval concat_cmds=\"\${concat_cmds}$old_archive_cmds\"
+ objlist=
+ len=$len0
+ fi
+ done
+ RANLIB=$save_RANLIB
+ oldobjs=$objlist
+ if test "X$oldobjs" = "X" ; then
+ eval cmds=\"\$concat_cmds\"
+ else
+ eval cmds=\"\$concat_cmds~\$old_archive_cmds\"
+ fi
+ fi
+ fi
+ func_execute_cmds "$cmds" 'exit $?'
+ done
+
+ test -n "$generated" && \
+ func_show_eval "${RM}r$generated"
+
+ # Now create the libtool archive.
+ case $output in
+ *.la)
+ old_library=
+ test "$build_old_libs" = yes && old_library="$libname.$libext"
+ func_verbose "creating $output"
+
+ # Preserve any variables that may affect compiler behavior
+ for var in $variables_saved_for_relink; do
+ if eval test -z \"\${$var+set}\"; then
+ relink_command="{ test -z \"\${$var+set}\" || $lt_unset $var || { $var=; export $var; }; }; $relink_command"
+ elif eval var_value=\$$var; test -z "$var_value"; then
+ relink_command="$var=; export $var; $relink_command"
+ else
+ func_quote_for_eval "$var_value"
+ relink_command="$var=$func_quote_for_eval_result; export $var; $relink_command"
+ fi
+ done
+ # Quote the link command for shipping.
+ relink_command="(cd `pwd`; $SHELL $progpath $preserve_args --mode=relink $libtool_args @inst_prefix_dir@)"
+ relink_command=`$ECHO "$relink_command" | $SED "$sed_quote_subst"`
+ if test "$hardcode_automatic" = yes ; then
+ relink_command=
+ fi
+
+ # Only create the output if not a dry run.
+ $opt_dry_run || {
+ for installed in no yes; do
+ if test "$installed" = yes; then
+ if test -z "$install_libdir"; then
+ break
+ fi
+ output="$output_objdir/$outputname"i
+ # Replace all uninstalled libtool libraries with the installed ones
+ newdependency_libs=
+ for deplib in $dependency_libs; do
+ case $deplib in
+ *.la)
+ func_basename "$deplib"
+ name="$func_basename_result"
+ eval libdir=`${SED} -n -e 's/^libdir=\(.*\)$/\1/p' $deplib`
+ test -z "$libdir" && \
+ func_fatal_error "\`$deplib' is not a valid libtool archive"
+ func_append newdependency_libs " ${lt_sysroot:+=}$libdir/$name"
+ ;;
+ -L*)
+ func_stripname -L '' "$deplib"
+ func_replace_sysroot "$func_stripname_result"
+ func_append newdependency_libs " -L$func_replace_sysroot_result"
+ ;;
+ -R*)
+ func_stripname -R '' "$deplib"
+ func_replace_sysroot "$func_stripname_result"
+ func_append newdependency_libs " -R$func_replace_sysroot_result"
+ ;;
+ *) func_append newdependency_libs " $deplib" ;;
+ esac
+ done
+ dependency_libs="$newdependency_libs"
+ newdlfiles=
+
+ for lib in $dlfiles; do
+ case $lib in
+ *.la)
+ func_basename "$lib"
+ name="$func_basename_result"
+ eval libdir=`${SED} -n -e 's/^libdir=\(.*\)$/\1/p' $lib`
+ test -z "$libdir" && \
+ func_fatal_error "\`$lib' is not a valid libtool archive"
+ func_append newdlfiles " ${lt_sysroot:+=}$libdir/$name"
+ ;;
+ *) func_append newdlfiles " $lib" ;;
+ esac
+ done
+ dlfiles="$newdlfiles"
+ newdlprefiles=
+ for lib in $dlprefiles; do
+ case $lib in
+ *.la)
+ # Only pass preopened files to the pseudo-archive (for
+ # eventual linking with the app. that links it) if we
+ # didn't already link the preopened objects directly into
+ # the library:
+ func_basename "$lib"
+ name="$func_basename_result"
+ eval libdir=`${SED} -n -e 's/^libdir=\(.*\)$/\1/p' $lib`
+ test -z "$libdir" && \
+ func_fatal_error "\`$lib' is not a valid libtool archive"
+ func_append newdlprefiles " ${lt_sysroot:+=}$libdir/$name"
+ ;;
+ esac
+ done
+ dlprefiles="$newdlprefiles"
+ else
+ newdlfiles=
+ for lib in $dlfiles; do
+ case $lib in
+ [\\/]* | [A-Za-z]:[\\/]*) abs="$lib" ;;
+ *) abs=`pwd`"/$lib" ;;
+ esac
+ func_append newdlfiles " $abs"
+ done
+ dlfiles="$newdlfiles"
+ newdlprefiles=
+ for lib in $dlprefiles; do
+ case $lib in
+ [\\/]* | [A-Za-z]:[\\/]*) abs="$lib" ;;
+ *) abs=`pwd`"/$lib" ;;
+ esac
+ func_append newdlprefiles " $abs"
+ done
+ dlprefiles="$newdlprefiles"
+ fi
+ $RM $output
+ # place dlname in correct position for cygwin
+ # In fact, it would be nice if we could use this code for all target
+ # systems that can't hard-code library paths into their executables
+ # and that have no shared library path variable independent of PATH,
+ # but it turns out we can't easily determine that from inspecting
+ # libtool variables, so we have to hard-code the OSs to which it
+ # applies here; at the moment, that means platforms that use the PE
+ # object format with DLL files. See the long comment at the top of
+ # tests/bindir.at for full details.
+ tdlname=$dlname
+ case $host,$output,$installed,$module,$dlname in
+ *cygwin*,*lai,yes,no,*.dll | *mingw*,*lai,yes,no,*.dll | *cegcc*,*lai,yes,no,*.dll)
+ # If a -bindir argument was supplied, place the dll there.
+ if test "x$bindir" != x ;
+ then
+ func_relative_path "$install_libdir" "$bindir"
+ tdlname=$func_relative_path_result$dlname
+ else
+ # Otherwise fall back on heuristic.
+ tdlname=../bin/$dlname
+ fi
+ ;;
+ esac
+ $ECHO > $output "\
+# $outputname - a libtool library file
+# Generated by $PROGRAM (GNU $PACKAGE$TIMESTAMP) $VERSION
+#
+# Please DO NOT delete this file!
+# It is necessary for linking the library.
+
+# The name that we can dlopen(3).
+dlname='$tdlname'
+
+# Names of this library.
+library_names='$library_names'
+
+# The name of the static archive.
+old_library='$old_library'
+
+# Linker flags that can not go in dependency_libs.
+inherited_linker_flags='$new_inherited_linker_flags'
+
+# Libraries that this one depends upon.
+dependency_libs='$dependency_libs'
+
+# Names of additional weak libraries provided by this library
+weak_library_names='$weak_libs'
+
+# Version information for $libname.
+current=$current
+age=$age
+revision=$revision
+
+# Is this an already installed library?
+installed=$installed
+
+# Should we warn about portability when linking against -modules?
+shouldnotlink=$module
+
+# Files to dlopen/dlpreopen
+dlopen='$dlfiles'
+dlpreopen='$dlprefiles'
+
+# Directory that this library needs to be installed in:
+libdir='$install_libdir'"
+ if test "$installed" = no && test "$need_relink" = yes; then
+ $ECHO >> $output "\
+relink_command=\"$relink_command\""
+ fi
+ done
+ }
+
+ # Do a symbolic link so that the libtool archive can be found in
+ # LD_LIBRARY_PATH before the program is installed.
+ func_show_eval '( cd "$output_objdir" && $RM "$outputname" && $LN_S "../$outputname" "$outputname" )' 'exit $?'
+ ;;
+ esac
+ exit $EXIT_SUCCESS
+}
+
+{ test "$opt_mode" = link || test "$opt_mode" = relink; } &&
+ func_mode_link ${1+"$@"}
+
+
+# func_mode_uninstall arg...
+func_mode_uninstall ()
+{
+ $opt_debug
+ RM="$nonopt"
+ files=
+ rmforce=
+ exit_status=0
+
+ # This variable tells wrapper scripts just to set variables rather
+ # than running their programs.
+ libtool_install_magic="$magic"
+
+ for arg
+ do
+ case $arg in
+ -f) func_append RM " $arg"; rmforce=yes ;;
+ -*) func_append RM " $arg" ;;
+ *) func_append files " $arg" ;;
+ esac
+ done
+
+ test -z "$RM" && \
+ func_fatal_help "you must specify an RM program"
+
+ rmdirs=
+
+ for file in $files; do
+ func_dirname "$file" "" "."
+ dir="$func_dirname_result"
+ if test "X$dir" = X.; then
+ odir="$objdir"
+ else
+ odir="$dir/$objdir"
+ fi
+ func_basename "$file"
+ name="$func_basename_result"
+ test "$opt_mode" = uninstall && odir="$dir"
+
+ # Remember odir for removal later, being careful to avoid duplicates
+ if test "$opt_mode" = clean; then
+ case " $rmdirs " in
+ *" $odir "*) ;;
+ *) func_append rmdirs " $odir" ;;
+ esac
+ fi
+
+ # Don't error if the file doesn't exist and rm -f was used.
+ if { test -L "$file"; } >/dev/null 2>&1 ||
+ { test -h "$file"; } >/dev/null 2>&1 ||
+ test -f "$file"; then
+ :
+ elif test -d "$file"; then
+ exit_status=1
+ continue
+ elif test "$rmforce" = yes; then
+ continue
+ fi
+
+ rmfiles="$file"
+
+ case $name in
+ *.la)
+ # Possibly a libtool archive, so verify it.
+ if func_lalib_p "$file"; then
+ func_source $dir/$name
+
+ # Delete the libtool libraries and symlinks.
+ for n in $library_names; do
+ func_append rmfiles " $odir/$n"
+ done
+ test -n "$old_library" && func_append rmfiles " $odir/$old_library"
+
+ case "$opt_mode" in
+ clean)
+ case " $library_names " in
+ *" $dlname "*) ;;
+ *) test -n "$dlname" && func_append rmfiles " $odir/$dlname" ;;
+ esac
+ test -n "$libdir" && func_append rmfiles " $odir/$name $odir/${name}i"
+ ;;
+ uninstall)
+ if test -n "$library_names"; then
+ # Do each command in the postuninstall commands.
+ func_execute_cmds "$postuninstall_cmds" 'test "$rmforce" = yes || exit_status=1'
+ fi
+
+ if test -n "$old_library"; then
+ # Do each command in the old_postuninstall commands.
+ func_execute_cmds "$old_postuninstall_cmds" 'test "$rmforce" = yes || exit_status=1'
+ fi
+ # FIXME: should reinstall the best remaining shared library.
+ ;;
+ esac
+ fi
+ ;;
+
+ *.lo)
+ # Possibly a libtool object, so verify it.
+ if func_lalib_p "$file"; then
+
+ # Read the .lo file
+ func_source $dir/$name
+
+ # Add PIC object to the list of files to remove.
+ if test -n "$pic_object" &&
+ test "$pic_object" != none; then
+ func_append rmfiles " $dir/$pic_object"
+ fi
+
+ # Add non-PIC object to the list of files to remove.
+ if test -n "$non_pic_object" &&
+ test "$non_pic_object" != none; then
+ func_append rmfiles " $dir/$non_pic_object"
+ fi
+ fi
+ ;;
+
+ *)
+ if test "$opt_mode" = clean ; then
+ noexename=$name
+ case $file in
+ *.exe)
+ func_stripname '' '.exe' "$file"
+ file=$func_stripname_result
+ func_stripname '' '.exe' "$name"
+ noexename=$func_stripname_result
+ # $file with .exe has already been added to rmfiles,
+ # add $file without .exe
+ func_append rmfiles " $file"
+ ;;
+ esac
+ # Do a test to see if this is a libtool program.
+ if func_ltwrapper_p "$file"; then
+ if func_ltwrapper_executable_p "$file"; then
+ func_ltwrapper_scriptname "$file"
+ relink_command=
+ func_source $func_ltwrapper_scriptname_result
+ func_append rmfiles " $func_ltwrapper_scriptname_result"
+ else
+ relink_command=
+ func_source $dir/$noexename
+ fi
+
+ # note $name still contains .exe if it was in $file originally
+ # as does the version of $file that was added into $rmfiles
+ func_append rmfiles " $odir/$name $odir/${name}S.${objext}"
+ if test "$fast_install" = yes && test -n "$relink_command"; then
+ func_append rmfiles " $odir/lt-$name"
+ fi
+ if test "X$noexename" != "X$name" ; then
+ func_append rmfiles " $odir/lt-${noexename}.c"
+ fi
+ fi
+ fi
+ ;;
+ esac
+ func_show_eval "$RM $rmfiles" 'exit_status=1'
+ done
+
+ # Try to remove the ${objdir}s in the directories where we deleted files
+ for dir in $rmdirs; do
+ if test -d "$dir"; then
+ func_show_eval "rmdir $dir >/dev/null 2>&1"
+ fi
+ done
+
+ exit $exit_status
+}
+
+{ test "$opt_mode" = uninstall || test "$opt_mode" = clean; } &&
+ func_mode_uninstall ${1+"$@"}
+
+test -z "$opt_mode" && {
+ help="$generic_help"
+ func_fatal_help "you must specify a MODE"
+}
+
+test -z "$exec_cmd" && \
+ func_fatal_help "invalid operation mode \`$opt_mode'"
+
+if test -n "$exec_cmd"; then
+ eval exec "$exec_cmd"
+ exit $EXIT_FAILURE
+fi
+
+exit $exit_status
+
+
+# The TAGs below are defined such that we never get into a situation
+# in which we disable both kinds of libraries. Given conflicting
+# choices, we go for a static library, that is the most portable,
+# since we can't tell whether shared libraries were disabled because
+# the user asked for that or because the platform doesn't support
+# them. This is particularly important on AIX, because we don't
+# support having both static and shared libraries enabled at the same
+# time on that platform, so we default to a shared-only configuration.
+# If a disable-shared tag is given, we'll fallback to a static-only
+# configuration. But we'll never go from static-only to shared-only.
+
+# ### BEGIN LIBTOOL TAG CONFIG: disable-shared
+build_libtool_libs=no
+build_old_libs=yes
+# ### END LIBTOOL TAG CONFIG: disable-shared
+
+# ### BEGIN LIBTOOL TAG CONFIG: disable-static
+build_old_libs=`case $build_libtool_libs in yes) echo no;; *) echo yes;; esac`
+# ### END LIBTOOL TAG CONFIG: disable-static
+
+# Local Variables:
+# mode:shell-script
+# sh-indentation:2
+# End:
+# vi:sw=2
+
diff --git a/build/ltoptions.m4 b/build/ltoptions.m4
new file mode 100644
index 0000000..17cfd51
--- /dev/null
+++ b/build/ltoptions.m4
@@ -0,0 +1,369 @@
+# Helper functions for option handling. -*- Autoconf -*-
+#
+# Copyright (C) 2004, 2005, 2007, 2008, 2009 Free Software Foundation,
+# Inc.
+# Written by Gary V. Vaughan, 2004
+#
+# This file is free software; the Free Software Foundation gives
+# unlimited permission to copy and/or distribute it, with or without
+# modifications, as long as this notice is preserved.
+
+# serial 7 ltoptions.m4
+
+# This is to help aclocal find these macros, as it can't see m4_define.
+AC_DEFUN([LTOPTIONS_VERSION], [m4_if([1])])
+
+
+# _LT_MANGLE_OPTION(MACRO-NAME, OPTION-NAME)
+# ------------------------------------------
+m4_define([_LT_MANGLE_OPTION],
+[[_LT_OPTION_]m4_bpatsubst($1__$2, [[^a-zA-Z0-9_]], [_])])
+
+
+# _LT_SET_OPTION(MACRO-NAME, OPTION-NAME)
+# ---------------------------------------
+# Set option OPTION-NAME for macro MACRO-NAME, and if there is a
+# matching handler defined, dispatch to it. Other OPTION-NAMEs are
+# saved as a flag.
+m4_define([_LT_SET_OPTION],
+[m4_define(_LT_MANGLE_OPTION([$1], [$2]))dnl
+m4_ifdef(_LT_MANGLE_DEFUN([$1], [$2]),
+ _LT_MANGLE_DEFUN([$1], [$2]),
+ [m4_warning([Unknown $1 option `$2'])])[]dnl
+])
+
+
+# _LT_IF_OPTION(MACRO-NAME, OPTION-NAME, IF-SET, [IF-NOT-SET])
+# ------------------------------------------------------------
+# Execute IF-SET if OPTION is set, IF-NOT-SET otherwise.
+m4_define([_LT_IF_OPTION],
+[m4_ifdef(_LT_MANGLE_OPTION([$1], [$2]), [$3], [$4])])
+
+
+# _LT_UNLESS_OPTIONS(MACRO-NAME, OPTION-LIST, IF-NOT-SET)
+# -------------------------------------------------------
+# Execute IF-NOT-SET unless all options in OPTION-LIST for MACRO-NAME
+# are set.
+m4_define([_LT_UNLESS_OPTIONS],
+[m4_foreach([_LT_Option], m4_split(m4_normalize([$2])),
+ [m4_ifdef(_LT_MANGLE_OPTION([$1], _LT_Option),
+ [m4_define([$0_found])])])[]dnl
+m4_ifdef([$0_found], [m4_undefine([$0_found])], [$3
+])[]dnl
+])
+
+
+# _LT_SET_OPTIONS(MACRO-NAME, OPTION-LIST)
+# ----------------------------------------
+# OPTION-LIST is a space-separated list of Libtool options associated
+# with MACRO-NAME. If any OPTION has a matching handler declared with
+# LT_OPTION_DEFINE, dispatch to that macro; otherwise complain about
+# the unknown option and exit.
+m4_defun([_LT_SET_OPTIONS],
+[# Set options
+m4_foreach([_LT_Option], m4_split(m4_normalize([$2])),
+ [_LT_SET_OPTION([$1], _LT_Option)])
+
+m4_if([$1],[LT_INIT],[
+ dnl
+ dnl Simply set some default values (i.e off) if boolean options were not
+ dnl specified:
+ _LT_UNLESS_OPTIONS([LT_INIT], [dlopen], [enable_dlopen=no
+ ])
+ _LT_UNLESS_OPTIONS([LT_INIT], [win32-dll], [enable_win32_dll=no
+ ])
+ dnl
+ dnl If no reference was made to various pairs of opposing options, then
+ dnl we run the default mode handler for the pair. For example, if neither
+ dnl `shared' nor `disable-shared' was passed, we enable building of shared
+ dnl archives by default:
+ _LT_UNLESS_OPTIONS([LT_INIT], [shared disable-shared], [_LT_ENABLE_SHARED])
+ _LT_UNLESS_OPTIONS([LT_INIT], [static disable-static], [_LT_ENABLE_STATIC])
+ _LT_UNLESS_OPTIONS([LT_INIT], [pic-only no-pic], [_LT_WITH_PIC])
+ _LT_UNLESS_OPTIONS([LT_INIT], [fast-install disable-fast-install],
+ [_LT_ENABLE_FAST_INSTALL])
+ ])
+])# _LT_SET_OPTIONS
+
+
+## --------------------------------- ##
+## Macros to handle LT_INIT options. ##
+## --------------------------------- ##
+
+# _LT_MANGLE_DEFUN(MACRO-NAME, OPTION-NAME)
+# -----------------------------------------
+m4_define([_LT_MANGLE_DEFUN],
+[[_LT_OPTION_DEFUN_]m4_bpatsubst(m4_toupper([$1__$2]), [[^A-Z0-9_]], [_])])
+
+
+# LT_OPTION_DEFINE(MACRO-NAME, OPTION-NAME, CODE)
+# -----------------------------------------------
+m4_define([LT_OPTION_DEFINE],
+[m4_define(_LT_MANGLE_DEFUN([$1], [$2]), [$3])[]dnl
+])# LT_OPTION_DEFINE
+
+
+# dlopen
+# ------
+LT_OPTION_DEFINE([LT_INIT], [dlopen], [enable_dlopen=yes
+])
+
+AU_DEFUN([AC_LIBTOOL_DLOPEN],
+[_LT_SET_OPTION([LT_INIT], [dlopen])
+AC_DIAGNOSE([obsolete],
+[$0: Remove this warning and the call to _LT_SET_OPTION when you
+put the `dlopen' option into LT_INIT's first parameter.])
+])
+
+dnl aclocal-1.4 backwards compatibility:
+dnl AC_DEFUN([AC_LIBTOOL_DLOPEN], [])
+
+
+# win32-dll
+# ---------
+# Declare package support for building win32 dll's.
+LT_OPTION_DEFINE([LT_INIT], [win32-dll],
+[enable_win32_dll=yes
+
+case $host in
+*-*-cygwin* | *-*-mingw* | *-*-pw32* | *-*-cegcc*)
+ AC_CHECK_TOOL(AS, as, false)
+ AC_CHECK_TOOL(DLLTOOL, dlltool, false)
+ AC_CHECK_TOOL(OBJDUMP, objdump, false)
+ ;;
+esac
+
+test -z "$AS" && AS=as
+_LT_DECL([], [AS], [1], [Assembler program])dnl
+
+test -z "$DLLTOOL" && DLLTOOL=dlltool
+_LT_DECL([], [DLLTOOL], [1], [DLL creation program])dnl
+
+test -z "$OBJDUMP" && OBJDUMP=objdump
+_LT_DECL([], [OBJDUMP], [1], [Object dumper program])dnl
+])# win32-dll
+
+AU_DEFUN([AC_LIBTOOL_WIN32_DLL],
+[AC_REQUIRE([AC_CANONICAL_HOST])dnl
+_LT_SET_OPTION([LT_INIT], [win32-dll])
+AC_DIAGNOSE([obsolete],
+[$0: Remove this warning and the call to _LT_SET_OPTION when you
+put the `win32-dll' option into LT_INIT's first parameter.])
+])
+
+dnl aclocal-1.4 backwards compatibility:
+dnl AC_DEFUN([AC_LIBTOOL_WIN32_DLL], [])
+
+
+# _LT_ENABLE_SHARED([DEFAULT])
+# ----------------------------
+# implement the --enable-shared flag, and supports the `shared' and
+# `disable-shared' LT_INIT options.
+# DEFAULT is either `yes' or `no'. If omitted, it defaults to `yes'.
+m4_define([_LT_ENABLE_SHARED],
+[m4_define([_LT_ENABLE_SHARED_DEFAULT], [m4_if($1, no, no, yes)])dnl
+AC_ARG_ENABLE([shared],
+ [AS_HELP_STRING([--enable-shared@<:@=PKGS@:>@],
+ [build shared libraries @<:@default=]_LT_ENABLE_SHARED_DEFAULT[@:>@])],
+ [p=${PACKAGE-default}
+ case $enableval in
+ yes) enable_shared=yes ;;
+ no) enable_shared=no ;;
+ *)
+ enable_shared=no
+ # Look at the argument we got. We use all the common list separators.
+ lt_save_ifs="$IFS"; IFS="${IFS}$PATH_SEPARATOR,"
+ for pkg in $enableval; do
+ IFS="$lt_save_ifs"
+ if test "X$pkg" = "X$p"; then
+ enable_shared=yes
+ fi
+ done
+ IFS="$lt_save_ifs"
+ ;;
+ esac],
+ [enable_shared=]_LT_ENABLE_SHARED_DEFAULT)
+
+ _LT_DECL([build_libtool_libs], [enable_shared], [0],
+ [Whether or not to build shared libraries])
+])# _LT_ENABLE_SHARED
+
+LT_OPTION_DEFINE([LT_INIT], [shared], [_LT_ENABLE_SHARED([yes])])
+LT_OPTION_DEFINE([LT_INIT], [disable-shared], [_LT_ENABLE_SHARED([no])])
+
+# Old names:
+AC_DEFUN([AC_ENABLE_SHARED],
+[_LT_SET_OPTION([LT_INIT], m4_if([$1], [no], [disable-])[shared])
+])
+
+AC_DEFUN([AC_DISABLE_SHARED],
+[_LT_SET_OPTION([LT_INIT], [disable-shared])
+])
+
+AU_DEFUN([AM_ENABLE_SHARED], [AC_ENABLE_SHARED($@)])
+AU_DEFUN([AM_DISABLE_SHARED], [AC_DISABLE_SHARED($@)])
+
+dnl aclocal-1.4 backwards compatibility:
+dnl AC_DEFUN([AM_ENABLE_SHARED], [])
+dnl AC_DEFUN([AM_DISABLE_SHARED], [])
+
+
+
+# _LT_ENABLE_STATIC([DEFAULT])
+# ----------------------------
+# implement the --enable-static flag, and support the `static' and
+# `disable-static' LT_INIT options.
+# DEFAULT is either `yes' or `no'. If omitted, it defaults to `yes'.
+m4_define([_LT_ENABLE_STATIC],
+[m4_define([_LT_ENABLE_STATIC_DEFAULT], [m4_if($1, no, no, yes)])dnl
+AC_ARG_ENABLE([static],
+ [AS_HELP_STRING([--enable-static@<:@=PKGS@:>@],
+ [build static libraries @<:@default=]_LT_ENABLE_STATIC_DEFAULT[@:>@])],
+ [p=${PACKAGE-default}
+ case $enableval in
+ yes) enable_static=yes ;;
+ no) enable_static=no ;;
+ *)
+ enable_static=no
+ # Look at the argument we got. We use all the common list separators.
+ lt_save_ifs="$IFS"; IFS="${IFS}$PATH_SEPARATOR,"
+ for pkg in $enableval; do
+ IFS="$lt_save_ifs"
+ if test "X$pkg" = "X$p"; then
+ enable_static=yes
+ fi
+ done
+ IFS="$lt_save_ifs"
+ ;;
+ esac],
+ [enable_static=]_LT_ENABLE_STATIC_DEFAULT)
+
+ _LT_DECL([build_old_libs], [enable_static], [0],
+ [Whether or not to build static libraries])
+])# _LT_ENABLE_STATIC
+
+LT_OPTION_DEFINE([LT_INIT], [static], [_LT_ENABLE_STATIC([yes])])
+LT_OPTION_DEFINE([LT_INIT], [disable-static], [_LT_ENABLE_STATIC([no])])
+
+# Old names:
+AC_DEFUN([AC_ENABLE_STATIC],
+[_LT_SET_OPTION([LT_INIT], m4_if([$1], [no], [disable-])[static])
+])
+
+AC_DEFUN([AC_DISABLE_STATIC],
+[_LT_SET_OPTION([LT_INIT], [disable-static])
+])
+
+AU_DEFUN([AM_ENABLE_STATIC], [AC_ENABLE_STATIC($@)])
+AU_DEFUN([AM_DISABLE_STATIC], [AC_DISABLE_STATIC($@)])
+
+dnl aclocal-1.4 backwards compatibility:
+dnl AC_DEFUN([AM_ENABLE_STATIC], [])
+dnl AC_DEFUN([AM_DISABLE_STATIC], [])
+
+
+
+# _LT_ENABLE_FAST_INSTALL([DEFAULT])
+# ----------------------------------
+# implement the --enable-fast-install flag, and support the `fast-install'
+# and `disable-fast-install' LT_INIT options.
+# DEFAULT is either `yes' or `no'. If omitted, it defaults to `yes'.
+m4_define([_LT_ENABLE_FAST_INSTALL],
+[m4_define([_LT_ENABLE_FAST_INSTALL_DEFAULT], [m4_if($1, no, no, yes)])dnl
+AC_ARG_ENABLE([fast-install],
+ [AS_HELP_STRING([--enable-fast-install@<:@=PKGS@:>@],
+ [optimize for fast installation @<:@default=]_LT_ENABLE_FAST_INSTALL_DEFAULT[@:>@])],
+ [p=${PACKAGE-default}
+ case $enableval in
+ yes) enable_fast_install=yes ;;
+ no) enable_fast_install=no ;;
+ *)
+ enable_fast_install=no
+ # Look at the argument we got. We use all the common list separators.
+ lt_save_ifs="$IFS"; IFS="${IFS}$PATH_SEPARATOR,"
+ for pkg in $enableval; do
+ IFS="$lt_save_ifs"
+ if test "X$pkg" = "X$p"; then
+ enable_fast_install=yes
+ fi
+ done
+ IFS="$lt_save_ifs"
+ ;;
+ esac],
+ [enable_fast_install=]_LT_ENABLE_FAST_INSTALL_DEFAULT)
+
+_LT_DECL([fast_install], [enable_fast_install], [0],
+ [Whether or not to optimize for fast installation])dnl
+])# _LT_ENABLE_FAST_INSTALL
+
+LT_OPTION_DEFINE([LT_INIT], [fast-install], [_LT_ENABLE_FAST_INSTALL([yes])])
+LT_OPTION_DEFINE([LT_INIT], [disable-fast-install], [_LT_ENABLE_FAST_INSTALL([no])])
+
+# Old names:
+AU_DEFUN([AC_ENABLE_FAST_INSTALL],
+[_LT_SET_OPTION([LT_INIT], m4_if([$1], [no], [disable-])[fast-install])
+AC_DIAGNOSE([obsolete],
+[$0: Remove this warning and the call to _LT_SET_OPTION when you put
+the `fast-install' option into LT_INIT's first parameter.])
+])
+
+AU_DEFUN([AC_DISABLE_FAST_INSTALL],
+[_LT_SET_OPTION([LT_INIT], [disable-fast-install])
+AC_DIAGNOSE([obsolete],
+[$0: Remove this warning and the call to _LT_SET_OPTION when you put
+the `disable-fast-install' option into LT_INIT's first parameter.])
+])
+
+dnl aclocal-1.4 backwards compatibility:
+dnl AC_DEFUN([AC_ENABLE_FAST_INSTALL], [])
+dnl AC_DEFUN([AM_DISABLE_FAST_INSTALL], [])
+
+
+# _LT_WITH_PIC([MODE])
+# --------------------
+# implement the --with-pic flag, and support the `pic-only' and `no-pic'
+# LT_INIT options.
+# MODE is either `yes' or `no'. If omitted, it defaults to `both'.
+m4_define([_LT_WITH_PIC],
+[AC_ARG_WITH([pic],
+ [AS_HELP_STRING([--with-pic],
+ [try to use only PIC/non-PIC objects @<:@default=use both@:>@])],
+ [pic_mode="$withval"],
+ [pic_mode=default])
+
+test -z "$pic_mode" && pic_mode=m4_default([$1], [default])
+
+_LT_DECL([], [pic_mode], [0], [What type of objects to build])dnl
+])# _LT_WITH_PIC
+
+LT_OPTION_DEFINE([LT_INIT], [pic-only], [_LT_WITH_PIC([yes])])
+LT_OPTION_DEFINE([LT_INIT], [no-pic], [_LT_WITH_PIC([no])])
+
+# Old name:
+AU_DEFUN([AC_LIBTOOL_PICMODE],
+[_LT_SET_OPTION([LT_INIT], [pic-only])
+AC_DIAGNOSE([obsolete],
+[$0: Remove this warning and the call to _LT_SET_OPTION when you
+put the `pic-only' option into LT_INIT's first parameter.])
+])
+
+dnl aclocal-1.4 backwards compatibility:
+dnl AC_DEFUN([AC_LIBTOOL_PICMODE], [])
+
+## ----------------- ##
+## LTDL_INIT Options ##
+## ----------------- ##
+
+m4_define([_LTDL_MODE], [])
+LT_OPTION_DEFINE([LTDL_INIT], [nonrecursive],
+ [m4_define([_LTDL_MODE], [nonrecursive])])
+LT_OPTION_DEFINE([LTDL_INIT], [recursive],
+ [m4_define([_LTDL_MODE], [recursive])])
+LT_OPTION_DEFINE([LTDL_INIT], [subproject],
+ [m4_define([_LTDL_MODE], [subproject])])
+
+m4_define([_LTDL_TYPE], [])
+LT_OPTION_DEFINE([LTDL_INIT], [installable],
+ [m4_define([_LTDL_TYPE], [installable])])
+LT_OPTION_DEFINE([LTDL_INIT], [convenience],
+ [m4_define([_LTDL_TYPE], [convenience])])
diff --git a/build/ltsugar.m4 b/build/ltsugar.m4
new file mode 100644
index 0000000..9000a05
--- /dev/null
+++ b/build/ltsugar.m4
@@ -0,0 +1,123 @@
+# ltsugar.m4 -- libtool m4 base layer. -*-Autoconf-*-
+#
+# Copyright (C) 2004, 2005, 2007, 2008 Free Software Foundation, Inc.
+# Written by Gary V. Vaughan, 2004
+#
+# This file is free software; the Free Software Foundation gives
+# unlimited permission to copy and/or distribute it, with or without
+# modifications, as long as this notice is preserved.
+
+# serial 6 ltsugar.m4
+
+# This is to help aclocal find these macros, as it can't see m4_define.
+AC_DEFUN([LTSUGAR_VERSION], [m4_if([0.1])])
+
+
+# lt_join(SEP, ARG1, [ARG2...])
+# -----------------------------
+# Produce ARG1SEPARG2...SEPARGn, omitting [] arguments and their
+# associated separator.
+# Needed until we can rely on m4_join from Autoconf 2.62, since all earlier
+# versions in m4sugar had bugs.
+m4_define([lt_join],
+[m4_if([$#], [1], [],
+ [$#], [2], [[$2]],
+ [m4_if([$2], [], [], [[$2]_])$0([$1], m4_shift(m4_shift($@)))])])
+m4_define([_lt_join],
+[m4_if([$#$2], [2], [],
+ [m4_if([$2], [], [], [[$1$2]])$0([$1], m4_shift(m4_shift($@)))])])
+
+
+# lt_car(LIST)
+# lt_cdr(LIST)
+# ------------
+# Manipulate m4 lists.
+# These macros are necessary as long as will still need to support
+# Autoconf-2.59 which quotes differently.
+m4_define([lt_car], [[$1]])
+m4_define([lt_cdr],
+[m4_if([$#], 0, [m4_fatal([$0: cannot be called without arguments])],
+ [$#], 1, [],
+ [m4_dquote(m4_shift($@))])])
+m4_define([lt_unquote], $1)
+
+
+# lt_append(MACRO-NAME, STRING, [SEPARATOR])
+# ------------------------------------------
+# Redefine MACRO-NAME to hold its former content plus `SEPARATOR'`STRING'.
+# Note that neither SEPARATOR nor STRING are expanded; they are appended
+# to MACRO-NAME as is (leaving the expansion for when MACRO-NAME is invoked).
+# No SEPARATOR is output if MACRO-NAME was previously undefined (different
+# than defined and empty).
+#
+# This macro is needed until we can rely on Autoconf 2.62, since earlier
+# versions of m4sugar mistakenly expanded SEPARATOR but not STRING.
+m4_define([lt_append],
+[m4_define([$1],
+ m4_ifdef([$1], [m4_defn([$1])[$3]])[$2])])
+
+
+
+# lt_combine(SEP, PREFIX-LIST, INFIX, SUFFIX1, [SUFFIX2...])
+# ----------------------------------------------------------
+# Produce a SEP delimited list of all paired combinations of elements of
+# PREFIX-LIST with SUFFIX1 through SUFFIXn. Each element of the list
+# has the form PREFIXmINFIXSUFFIXn.
+# Needed until we can rely on m4_combine added in Autoconf 2.62.
+m4_define([lt_combine],
+[m4_if(m4_eval([$# > 3]), [1],
+ [m4_pushdef([_Lt_sep], [m4_define([_Lt_sep], m4_defn([lt_car]))])]]dnl
+[[m4_foreach([_Lt_prefix], [$2],
+ [m4_foreach([_Lt_suffix],
+ ]m4_dquote(m4_dquote(m4_shift(m4_shift(m4_shift($@)))))[,
+ [_Lt_sep([$1])[]m4_defn([_Lt_prefix])[$3]m4_defn([_Lt_suffix])])])])])
+
+
+# lt_if_append_uniq(MACRO-NAME, VARNAME, [SEPARATOR], [UNIQ], [NOT-UNIQ])
+# -----------------------------------------------------------------------
+# Iff MACRO-NAME does not yet contain VARNAME, then append it (delimited
+# by SEPARATOR if supplied) and expand UNIQ, else NOT-UNIQ.
+m4_define([lt_if_append_uniq],
+[m4_ifdef([$1],
+ [m4_if(m4_index([$3]m4_defn([$1])[$3], [$3$2$3]), [-1],
+ [lt_append([$1], [$2], [$3])$4],
+ [$5])],
+ [lt_append([$1], [$2], [$3])$4])])
+
+
+# lt_dict_add(DICT, KEY, VALUE)
+# -----------------------------
+m4_define([lt_dict_add],
+[m4_define([$1($2)], [$3])])
+
+
+# lt_dict_add_subkey(DICT, KEY, SUBKEY, VALUE)
+# --------------------------------------------
+m4_define([lt_dict_add_subkey],
+[m4_define([$1($2:$3)], [$4])])
+
+
+# lt_dict_fetch(DICT, KEY, [SUBKEY])
+# ----------------------------------
+m4_define([lt_dict_fetch],
+[m4_ifval([$3],
+ m4_ifdef([$1($2:$3)], [m4_defn([$1($2:$3)])]),
+ m4_ifdef([$1($2)], [m4_defn([$1($2)])]))])
+
+
+# lt_if_dict_fetch(DICT, KEY, [SUBKEY], VALUE, IF-TRUE, [IF-FALSE])
+# -----------------------------------------------------------------
+m4_define([lt_if_dict_fetch],
+[m4_if(lt_dict_fetch([$1], [$2], [$3]), [$4],
+ [$5],
+ [$6])])
+
+
+# lt_dict_filter(DICT, [SUBKEY], VALUE, [SEPARATOR], KEY, [...])
+# --------------------------------------------------------------
+m4_define([lt_dict_filter],
+[m4_if([$5], [], [],
+ [lt_join(m4_quote(m4_default([$4], [[, ]])),
+ lt_unquote(m4_split(m4_normalize(m4_foreach(_Lt_key, lt_car([m4_shiftn(4, $@)]),
+ [lt_if_dict_fetch([$1], _Lt_key, [$2], [$3], [_Lt_key ])])))))])[]dnl
+])
diff --git a/build/ltversion.m4 b/build/ltversion.m4
new file mode 100644
index 0000000..9c7b5d4
--- /dev/null
+++ b/build/ltversion.m4
@@ -0,0 +1,23 @@
+# ltversion.m4 -- version numbers -*- Autoconf -*-
+#
+# Copyright (C) 2004 Free Software Foundation, Inc.
+# Written by Scott James Remnant, 2004
+#
+# This file is free software; the Free Software Foundation gives
+# unlimited permission to copy and/or distribute it, with or without
+# modifications, as long as this notice is preserved.
+
+# @configure_input@
+
+# serial 3293 ltversion.m4
+# This file is part of GNU Libtool
+
+m4_define([LT_PACKAGE_VERSION], [2.4])
+m4_define([LT_PACKAGE_REVISION], [1.3293])
+
+AC_DEFUN([LTVERSION_VERSION],
+[macro_version='2.4'
+macro_revision='1.3293'
+_LT_DECL(, macro_version, 0, [Which release of libtool.m4 was used?])
+_LT_DECL(, macro_revision, 0)
+])
diff --git a/build/lt~obsolete.m4 b/build/lt~obsolete.m4
new file mode 100644
index 0000000..c573da9
--- /dev/null
+++ b/build/lt~obsolete.m4
@@ -0,0 +1,98 @@
+# lt~obsolete.m4 -- aclocal satisfying obsolete definitions. -*-Autoconf-*-
+#
+# Copyright (C) 2004, 2005, 2007, 2009 Free Software Foundation, Inc.
+# Written by Scott James Remnant, 2004.
+#
+# This file is free software; the Free Software Foundation gives
+# unlimited permission to copy and/or distribute it, with or without
+# modifications, as long as this notice is preserved.
+
+# serial 5 lt~obsolete.m4
+
+# These exist entirely to fool aclocal when bootstrapping libtool.
+#
+# In the past libtool.m4 has provided macros via AC_DEFUN (or AU_DEFUN)
+# which have later been changed to m4_define as they aren't part of the
+# exported API, or moved to Autoconf or Automake where they belong.
+#
+# The trouble is, aclocal is a bit thick. It'll see the old AC_DEFUN
+# in /usr/share/aclocal/libtool.m4 and remember it, then when it sees us
+# using a macro with the same name in our local m4/libtool.m4 it'll
+# pull the old libtool.m4 in (it doesn't see our shiny new m4_define
+# and doesn't know about Autoconf macros at all.)
+#
+# So we provide this file, which has a silly filename so it's always
+# included after everything else. This provides aclocal with the
+# AC_DEFUNs it wants, but when m4 processes it, it doesn't do anything
+# because those macros already exist, or will be overwritten later.
+# We use AC_DEFUN over AU_DEFUN for compatibility with aclocal-1.6.
+#
+# Anytime we withdraw an AC_DEFUN or AU_DEFUN, remember to add it here.
+# Yes, that means every name once taken will need to remain here until
+# we give up compatibility with versions before 1.7, at which point
+# we need to keep only those names which we still refer to.
+
+# This is to help aclocal find these macros, as it can't see m4_define.
+AC_DEFUN([LTOBSOLETE_VERSION], [m4_if([1])])
+
+m4_ifndef([AC_LIBTOOL_LINKER_OPTION], [AC_DEFUN([AC_LIBTOOL_LINKER_OPTION])])
+m4_ifndef([AC_PROG_EGREP], [AC_DEFUN([AC_PROG_EGREP])])
+m4_ifndef([_LT_AC_PROG_ECHO_BACKSLASH], [AC_DEFUN([_LT_AC_PROG_ECHO_BACKSLASH])])
+m4_ifndef([_LT_AC_SHELL_INIT], [AC_DEFUN([_LT_AC_SHELL_INIT])])
+m4_ifndef([_LT_AC_SYS_LIBPATH_AIX], [AC_DEFUN([_LT_AC_SYS_LIBPATH_AIX])])
+m4_ifndef([_LT_PROG_LTMAIN], [AC_DEFUN([_LT_PROG_LTMAIN])])
+m4_ifndef([_LT_AC_TAGVAR], [AC_DEFUN([_LT_AC_TAGVAR])])
+m4_ifndef([AC_LTDL_ENABLE_INSTALL], [AC_DEFUN([AC_LTDL_ENABLE_INSTALL])])
+m4_ifndef([AC_LTDL_PREOPEN], [AC_DEFUN([AC_LTDL_PREOPEN])])
+m4_ifndef([_LT_AC_SYS_COMPILER], [AC_DEFUN([_LT_AC_SYS_COMPILER])])
+m4_ifndef([_LT_AC_LOCK], [AC_DEFUN([_LT_AC_LOCK])])
+m4_ifndef([AC_LIBTOOL_SYS_OLD_ARCHIVE], [AC_DEFUN([AC_LIBTOOL_SYS_OLD_ARCHIVE])])
+m4_ifndef([_LT_AC_TRY_DLOPEN_SELF], [AC_DEFUN([_LT_AC_TRY_DLOPEN_SELF])])
+m4_ifndef([AC_LIBTOOL_PROG_CC_C_O], [AC_DEFUN([AC_LIBTOOL_PROG_CC_C_O])])
+m4_ifndef([AC_LIBTOOL_SYS_HARD_LINK_LOCKS], [AC_DEFUN([AC_LIBTOOL_SYS_HARD_LINK_LOCKS])])
+m4_ifndef([AC_LIBTOOL_OBJDIR], [AC_DEFUN([AC_LIBTOOL_OBJDIR])])
+m4_ifndef([AC_LTDL_OBJDIR], [AC_DEFUN([AC_LTDL_OBJDIR])])
+m4_ifndef([AC_LIBTOOL_PROG_LD_HARDCODE_LIBPATH], [AC_DEFUN([AC_LIBTOOL_PROG_LD_HARDCODE_LIBPATH])])
+m4_ifndef([AC_LIBTOOL_SYS_LIB_STRIP], [AC_DEFUN([AC_LIBTOOL_SYS_LIB_STRIP])])
+m4_ifndef([AC_PATH_MAGIC], [AC_DEFUN([AC_PATH_MAGIC])])
+m4_ifndef([AC_PROG_LD_GNU], [AC_DEFUN([AC_PROG_LD_GNU])])
+m4_ifndef([AC_PROG_LD_RELOAD_FLAG], [AC_DEFUN([AC_PROG_LD_RELOAD_FLAG])])
+m4_ifndef([AC_DEPLIBS_CHECK_METHOD], [AC_DEFUN([AC_DEPLIBS_CHECK_METHOD])])
+m4_ifndef([AC_LIBTOOL_PROG_COMPILER_NO_RTTI], [AC_DEFUN([AC_LIBTOOL_PROG_COMPILER_NO_RTTI])])
+m4_ifndef([AC_LIBTOOL_SYS_GLOBAL_SYMBOL_PIPE], [AC_DEFUN([AC_LIBTOOL_SYS_GLOBAL_SYMBOL_PIPE])])
+m4_ifndef([AC_LIBTOOL_PROG_COMPILER_PIC], [AC_DEFUN([AC_LIBTOOL_PROG_COMPILER_PIC])])
+m4_ifndef([AC_LIBTOOL_PROG_LD_SHLIBS], [AC_DEFUN([AC_LIBTOOL_PROG_LD_SHLIBS])])
+m4_ifndef([AC_LIBTOOL_POSTDEP_PREDEP], [AC_DEFUN([AC_LIBTOOL_POSTDEP_PREDEP])])
+m4_ifndef([LT_AC_PROG_EGREP], [AC_DEFUN([LT_AC_PROG_EGREP])])
+m4_ifndef([LT_AC_PROG_SED], [AC_DEFUN([LT_AC_PROG_SED])])
+m4_ifndef([_LT_CC_BASENAME], [AC_DEFUN([_LT_CC_BASENAME])])
+m4_ifndef([_LT_COMPILER_BOILERPLATE], [AC_DEFUN([_LT_COMPILER_BOILERPLATE])])
+m4_ifndef([_LT_LINKER_BOILERPLATE], [AC_DEFUN([_LT_LINKER_BOILERPLATE])])
+m4_ifndef([_AC_PROG_LIBTOOL], [AC_DEFUN([_AC_PROG_LIBTOOL])])
+m4_ifndef([AC_LIBTOOL_SETUP], [AC_DEFUN([AC_LIBTOOL_SETUP])])
+m4_ifndef([_LT_AC_CHECK_DLFCN], [AC_DEFUN([_LT_AC_CHECK_DLFCN])])
+m4_ifndef([AC_LIBTOOL_SYS_DYNAMIC_LINKER], [AC_DEFUN([AC_LIBTOOL_SYS_DYNAMIC_LINKER])])
+m4_ifndef([_LT_AC_TAGCONFIG], [AC_DEFUN([_LT_AC_TAGCONFIG])])
+m4_ifndef([AC_DISABLE_FAST_INSTALL], [AC_DEFUN([AC_DISABLE_FAST_INSTALL])])
+m4_ifndef([_LT_AC_LANG_CXX], [AC_DEFUN([_LT_AC_LANG_CXX])])
+m4_ifndef([_LT_AC_LANG_F77], [AC_DEFUN([_LT_AC_LANG_F77])])
+m4_ifndef([_LT_AC_LANG_GCJ], [AC_DEFUN([_LT_AC_LANG_GCJ])])
+m4_ifndef([AC_LIBTOOL_LANG_C_CONFIG], [AC_DEFUN([AC_LIBTOOL_LANG_C_CONFIG])])
+m4_ifndef([_LT_AC_LANG_C_CONFIG], [AC_DEFUN([_LT_AC_LANG_C_CONFIG])])
+m4_ifndef([AC_LIBTOOL_LANG_CXX_CONFIG], [AC_DEFUN([AC_LIBTOOL_LANG_CXX_CONFIG])])
+m4_ifndef([_LT_AC_LANG_CXX_CONFIG], [AC_DEFUN([_LT_AC_LANG_CXX_CONFIG])])
+m4_ifndef([AC_LIBTOOL_LANG_F77_CONFIG], [AC_DEFUN([AC_LIBTOOL_LANG_F77_CONFIG])])
+m4_ifndef([_LT_AC_LANG_F77_CONFIG], [AC_DEFUN([_LT_AC_LANG_F77_CONFIG])])
+m4_ifndef([AC_LIBTOOL_LANG_GCJ_CONFIG], [AC_DEFUN([AC_LIBTOOL_LANG_GCJ_CONFIG])])
+m4_ifndef([_LT_AC_LANG_GCJ_CONFIG], [AC_DEFUN([_LT_AC_LANG_GCJ_CONFIG])])
+m4_ifndef([AC_LIBTOOL_LANG_RC_CONFIG], [AC_DEFUN([AC_LIBTOOL_LANG_RC_CONFIG])])
+m4_ifndef([_LT_AC_LANG_RC_CONFIG], [AC_DEFUN([_LT_AC_LANG_RC_CONFIG])])
+m4_ifndef([AC_LIBTOOL_CONFIG], [AC_DEFUN([AC_LIBTOOL_CONFIG])])
+m4_ifndef([_LT_AC_FILE_LTDLL_C], [AC_DEFUN([_LT_AC_FILE_LTDLL_C])])
+m4_ifndef([_LT_REQUIRED_DARWIN_CHECKS], [AC_DEFUN([_LT_REQUIRED_DARWIN_CHECKS])])
+m4_ifndef([_LT_AC_PROG_CXXCPP], [AC_DEFUN([_LT_AC_PROG_CXXCPP])])
+m4_ifndef([_LT_PREPARE_SED_QUOTE_VARS], [AC_DEFUN([_LT_PREPARE_SED_QUOTE_VARS])])
+m4_ifndef([_LT_PROG_ECHO_BACKSLASH], [AC_DEFUN([_LT_PROG_ECHO_BACKSLASH])])
+m4_ifndef([_LT_PROG_F77], [AC_DEFUN([_LT_PROG_F77])])
+m4_ifndef([_LT_PROG_FC], [AC_DEFUN([_LT_PROG_FC])])
+m4_ifndef([_LT_PROG_CXX], [AC_DEFUN([_LT_PROG_CXX])])
diff --git a/build/run_ctypesgen.sh b/build/run_ctypesgen.sh
new file mode 100755
index 0000000..a7cab80
--- /dev/null
+++ b/build/run_ctypesgen.sh
@@ -0,0 +1,89 @@
+#!/bin/sh
+#
+#
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements. See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership. The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied. See the License for the
+# specific language governing permissions and limitations
+# under the License.
+#
+#
+#
+# Helper script to generate the ctypesgen wrappers
+#
+
+LT_EXECUTE="$1"
+
+CPPFLAGS="$2"
+EXTRA_CTYPES_LDFLAGS="$3"
+PYTHON="$4"
+CTYPESGEN="$5"
+
+abs_srcdir="$6"
+abs_builddir="$7"
+
+svn_libdir="$8"
+apr_config="$9"
+apu_config="${10}"
+
+cp_relpath="subversion/bindings/ctypes-python"
+output="$cp_relpath/svn_all.py"
+
+# Avoid build path in csvn/core/functions.py
+if test "$abs_builddir" = "$abs_srcdir"; then
+ svn_includes="subversion/include"
+else
+ mkdir -p "$cp_relpath/csvn/core"
+ svn_includes="$abs_srcdir/subversion/include"
+fi
+
+### most of this should be done at configure time and passed in
+apr_cppflags="`$apr_config --includes --cppflags`"
+apr_include_dir="`$apr_config --includedir`"
+apr_ldflags="`$apr_config --ldflags --link-ld`"
+
+apu_cppflags="`$apu_config --includes`" # no --cppflags
+apu_include_dir="`$apu_config --includedir`"
+apu_ldflags="`$apu_config --ldflags --link-ld`"
+
+cpp="`$apr_config --cpp`"
+### end
+
+cppflags="$apr_cppflags $apu_cppflags -I$svn_includes"
+ldflags="-L$svn_libdir $apr_ldflags $apu_ldflags $EXTRA_CTYPES_LDFLAGS"
+
+
+# This order is important. The resulting stubs will load libraries in
+# this particular order.
+### maybe have gen-make do this for us
+for lib in subr diff delta fs repos wc ra client ; do
+ ldflags="$ldflags -lsvn_$lib-1"
+done
+
+includes="$svn_includes/svn_*.h $apr_include_dir/ap[ru]_*.h"
+if test "$apr_include_dir" != "$apu_include_dir" ; then
+ includes="$includes $apu_include_dir/ap[ru]_*.h"
+fi
+
+# Remove some whitespace in csvn/core/functions.py
+CPPFLAGS="`echo $CPPFLAGS`"
+cppflags="`echo $cppflags`"
+
+echo $LT_EXECUTE $PYTHON $CTYPESGEN --cpp "$cpp $CPPFLAGS $cppflags" $ldflags $includes -o $output --no-macro-warnings --strip-build-path=$abs_srcdir
+$LT_EXECUTE $PYTHON $CTYPESGEN --cpp "$cpp $CPPFLAGS $cppflags" $ldflags $includes -o $output --no-macro-warnings --strip-build-path=$abs_srcdir
+
+(cat $abs_srcdir/$cp_relpath/csvn/core/functions.py.in; \
+ sed -e '/^FILE =/d' $output | \
+ perl -pe 's{(\s+\w+)\.restype = POINTER\(svn_error_t\)}{\1.restype = POINTER(svn_error_t)\n\1.errcheck = _svn_errcheck}' \
+ ) > $abs_srcdir/$cp_relpath/csvn/core/functions.py
diff --git a/build/run_tests.py b/build/run_tests.py
new file mode 100755
index 0000000..c94a666
--- /dev/null
+++ b/build/run_tests.py
@@ -0,0 +1,626 @@
+#!/usr/bin/env python
+#
+#
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements. See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership. The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied. See the License for the
+# specific language governing permissions and limitations
+# under the License.
+#
+#
+#
+# run_tests.py - run the tests in the regression test suite.
+#
+
+'''usage: python run_tests.py
+ [--verbose] [--log-to-stdout] [--cleanup] [--parallel]
+ [--url=<base-url>] [--http-library=<http-library>] [--enable-sasl]
+ [--fs-type=<fs-type>] [--fsfs-packing] [--fsfs-sharding=<n>]
+ [--list] [--milestone-filter=<regex>] [--mode-filter=<type>]
+ [--server-minor-version=<version>]
+ [--config-file=<file>]
+ <abs_srcdir> <abs_builddir>
+ <prog ...>
+
+The optional flags and the first two parameters are passed unchanged
+to the TestHarness constructor. All other parameters are names of
+test programs.
+
+Each <prog> should be the full path (absolute or from the current directory)
+and filename of a test program, optionally followed by '#' and a comma-
+separated list of test numbers; the default is to run all the tests in it.
+'''
+
+# A few useful constants
+LINE_LENGTH = 45
+
+import os, re, subprocess, sys, imp
+from datetime import datetime
+
+import getopt
+try:
+ my_getopt = getopt.gnu_getopt
+except AttributeError:
+ my_getopt = getopt.getopt
+
+# Ensure the compiled C tests use a known locale (Python tests set the locale
+# explicitly).
+os.environ['LC_ALL'] = 'C'
+
+class TextColors:
+ '''Some ANSI terminal constants for output color'''
+ ENDC = '\033[0;m'
+ FAILURE = '\033[1;31m'
+ SUCCESS = '\033[1;32m'
+
+ @classmethod
+ def disable(cls):
+ cls.ENDC = ''
+ cls.FAILURE = ''
+ cls.SUCCESS = ''
+
+
+class TestHarness:
+ '''Test harness for Subversion tests.
+ '''
+
+ def __init__(self, abs_srcdir, abs_builddir, logfile, faillogfile,
+ base_url=None, fs_type=None, http_library=None,
+ server_minor_version=None, verbose=None,
+ cleanup=None, enable_sasl=None, parallel=None, config_file=None,
+ fsfs_sharding=None, fsfs_packing=None,
+ list_tests=None, svn_bin=None, mode_filter=None,
+ milestone_filter=None):
+ '''Construct a TestHarness instance.
+
+ ABS_SRCDIR and ABS_BUILDDIR are the source and build directories.
+ LOGFILE is the name of the log file. If LOGFILE is None, let tests
+ print their output to stdout and stderr, and don't print a summary
+ at the end (since there's no log file to analyze).
+ BASE_URL is the base url for DAV tests.
+ FS_TYPE is the FS type for repository creation.
+ HTTP_LIBRARY is the HTTP library for DAV-based communications.
+ SERVER_MINOR_VERSION is the minor version of the server being tested.
+ SVN_BIN is the path where the svn binaries are installed.
+ MODE_FILTER restricts the TestHarness to tests with the expected mode
+ XFail, Skip, Pass, or All tests (default). MILESTONE_FILTER is a
+ string representation of a valid regular expression pattern; when used
+ in conjunction with LIST_TESTS, the only tests that are listed are
+ those with an associated issue in the tracker which has a target
+ milestone that matches the regex.
+ '''
+ self.srcdir = abs_srcdir
+ self.builddir = abs_builddir
+ self.logfile = logfile
+ self.faillogfile = faillogfile
+ self.base_url = base_url
+ self.fs_type = fs_type
+ self.http_library = http_library
+ self.server_minor_version = server_minor_version
+ # If you change the below condition then change in
+ # ../subversion/tests/cmdline/svntest/main.py too.
+ if server_minor_version is not None:
+ if int(server_minor_version) < 3 or int(server_minor_version) > 7:
+ sys.stderr.write("Test harness only supports server minor versions 3-7\n")
+ sys.exit(1)
+ self.verbose = verbose
+ self.cleanup = cleanup
+ self.enable_sasl = enable_sasl
+ self.parallel = parallel
+ self.fsfs_sharding = fsfs_sharding
+ self.fsfs_packing = fsfs_packing
+ if fsfs_packing is not None and fsfs_sharding is None:
+ raise Exception('--fsfs-packing requires --fsfs-sharding')
+ self.config_file = None
+ if config_file is not None:
+ self.config_file = os.path.abspath(config_file)
+ self.list_tests = list_tests
+ self.milestone_filter = milestone_filter
+ self.svn_bin = svn_bin
+ self.mode_filter = mode_filter
+ self.log = None
+ if not sys.stdout.isatty() or sys.platform == 'win32':
+ TextColors.disable()
+
+ def run(self, list):
+ '''Run all test programs given in LIST. Print a summary of results, if
+ there is a log file. Return zero iff all test programs passed.'''
+ self._open_log('w')
+ failed = 0
+ for cnt, prog in enumerate(list):
+ failed = self._run_test(prog, cnt, len(list)) or failed
+
+ if self.log is None:
+ return failed
+
+ # Open the log in binary mode because it can contain binary data
+ # from diff_tests.py's testing of svnpatch. This may prevent
+ # readlines() from reading the whole log because it thinks it
+ # has encountered the EOF marker.
+ self._open_log('rb')
+ log_lines = self.log.readlines()
+
+ # Remove \r characters introduced by opening the log as binary
+ if sys.platform == 'win32':
+ log_lines = [x.replace('\r', '') for x in log_lines]
+
+ # Print the results, from least interesting to most interesting.
+
+ # Helper for Work-In-Progress indications for XFAIL tests.
+ wimptag = ' [[WIMP: '
+ def printxfail(x):
+ wip = x.find(wimptag)
+ if 0 > wip:
+ sys.stdout.write(x)
+ else:
+ sys.stdout.write('%s\n [[%s'
+ % (x[:wip], x[wip + len(wimptag):]))
+
+ if self.list_tests:
+ passed = [x for x in log_lines if x[8:13] == ' ']
+ else:
+ passed = [x for x in log_lines if x[:6] == 'PASS: ']
+
+ if self.list_tests:
+ skipped = [x for x in log_lines if x[8:12] == 'SKIP']
+ else:
+ skipped = [x for x in log_lines if x[:6] == 'SKIP: ']
+
+ if skipped and not self.list_tests:
+ print('At least one test was SKIPPED, checking ' + self.logfile)
+ for x in skipped:
+ sys.stdout.write(x)
+
+ if self.list_tests:
+ xfailed = [x for x in log_lines if x[8:13] == 'XFAIL']
+ else:
+ xfailed = [x for x in log_lines if x[:6] == 'XFAIL:']
+ if xfailed and not self.list_tests:
+ print('At least one test XFAILED, checking ' + self.logfile)
+ for x in xfailed:
+ printxfail(x)
+
+ xpassed = [x for x in log_lines if x[:6] == 'XPASS:']
+ if xpassed:
+ print('At least one test XPASSED, checking ' + self.logfile)
+ for x in xpassed:
+ printxfail(x)
+
+ failed_list = [x for x in log_lines if x[:6] == 'FAIL: ']
+ if failed_list:
+ print('At least one test FAILED, checking ' + self.logfile)
+ for x in failed_list:
+ sys.stdout.write(x)
+
+ # Print summaries, from least interesting to most interesting.
+ if self.list_tests:
+ print('Summary of test listing:')
+ else:
+ print('Summary of test results:')
+ if passed:
+ if self.list_tests:
+ print(' %d test%s are set to PASS'
+ % (len(passed), 's'*min(len(passed) - 1, 1)))
+ else:
+ print(' %d test%s PASSED'
+ % (len(passed), 's'*min(len(passed) - 1, 1)))
+ if skipped:
+ if self.list_tests:
+ print(' %d test%s are set as SKIP'
+ % (len(skipped), 's'*min(len(skipped) - 1, 1)))
+ else:
+ print(' %d test%s SKIPPED'
+ % (len(skipped), 's'*min(len(skipped) - 1, 1)))
+ if xfailed:
+ passwimp = [x for x in xfailed if 0 <= x.find(wimptag)]
+ if passwimp:
+ if self.list_tests:
+ print(' %d test%s are set to XFAIL (%d WORK-IN-PROGRESS)'
+ % (len(xfailed), 's'*min(len(xfailed) - 1, 1), len(passwimp)))
+ else:
+ print(' %d test%s XFAILED (%d WORK-IN-PROGRESS)'
+ % (len(xfailed), 's'*min(len(xfailed) - 1, 1), len(passwimp)))
+ else:
+ if self.list_tests:
+ print(' %d test%s are set as XFAIL'
+ % (len(xfailed), 's'*min(len(xfailed) - 1, 1)))
+ else:
+ print(' %d test%s XFAILED'
+ % (len(xfailed), 's'*min(len(xfailed) - 1, 1)))
+ if xpassed:
+ failwimp = [x for x in xpassed if 0 <= x.find(wimptag)]
+ if failwimp:
+ print(' %d test%s XPASSED (%d WORK-IN-PROGRESS)'
+ % (len(xpassed), 's'*min(len(xpassed) - 1, 1), len(failwimp)))
+ else:
+ print(' %d test%s XPASSED'
+ % (len(xpassed), 's'*min(len(xpassed) - 1, 1)))
+ if failed_list:
+ print(' %d test%s FAILED'
+ % (len(failed_list), 's'*min(len(failed_list) - 1, 1)))
+
+ # Copy the truly interesting verbose logs to a separate file, for easier
+ # viewing.
+ if xpassed or failed_list:
+ faillog = open(self.faillogfile, 'wb')
+ last_start_lineno = None
+ last_start_re = re.compile('^(FAIL|SKIP|XFAIL|PASS|START|CLEANUP|END):')
+ for lineno, line in enumerate(log_lines):
+ # Iterate the lines. If it ends a test we're interested in, dump that
+ # test to FAILLOG. If it starts a test (at all), remember the line
+ # number (in case we need it later).
+ if line in xpassed or line in failed_list:
+ faillog.write('[[[\n')
+ faillog.writelines(log_lines[last_start_lineno : lineno+1])
+ faillog.write(']]]\n\n')
+ if last_start_re.match(line):
+ last_start_lineno = lineno + 1
+ faillog.close()
+ elif os.path.exists(self.faillogfile):
+ print("WARNING: no failures, but '%s' exists from a previous run."
+ % self.faillogfile)
+
+ self._close_log()
+ return failed
+
+ def _open_log(self, mode):
+ 'Open the log file with the required MODE.'
+ if self.logfile:
+ self._close_log()
+ self.log = open(self.logfile, mode)
+
+ def _close_log(self):
+ 'Close the log file.'
+ if not self.log is None:
+ self.log.close()
+ self.log = None
+
+ def _run_c_test(self, prog, test_nums, dot_count):
+ 'Run a c test, escaping parameters as required.'
+ progdir, progbase = os.path.split(prog)
+
+ if self.list_tests and self.milestone_filter:
+ print 'WARNING: --milestone-filter option does not currently work with C tests'
+
+ if os.access(progbase, os.X_OK):
+ progname = './' + progbase
+ cmdline = [progname,
+ '--srcdir=' + os.path.join(self.srcdir, progdir)]
+ if self.config_file is not None:
+ cmdline.append('--config-file=' + self.config_file)
+ cmdline.append('--trap-assertion-failures')
+ else:
+ print('Don\'t know what to do about ' + progbase)
+ sys.exit(1)
+
+ if self.verbose is not None:
+ cmdline.append('--verbose')
+ if self.cleanup is not None:
+ cmdline.append('--cleanup')
+ if self.fs_type is not None:
+ cmdline.append('--fs-type=' + self.fs_type)
+ if self.server_minor_version is not None:
+ cmdline.append('--server-minor-version=' + self.server_minor_version)
+ if self.list_tests is not None:
+ cmdline.append('--list')
+ if self.mode_filter is not None:
+ cmdline.append('--mode-filter=' + self.mode_filter)
+
+ if test_nums:
+ test_nums = test_nums.split(',')
+ cmdline.extend(test_nums)
+
+ if test_nums:
+ total = len(test_nums)
+ else:
+ total_cmdline = [cmdline[0], '--list']
+ prog = subprocess.Popen(total_cmdline, stdout=subprocess.PIPE)
+ lines = prog.stdout.readlines()
+ total = len(lines) - 2
+
+ # This has to be class-scoped for use in the progress_func()
+ self.dots_written = 0
+ def progress_func(completed):
+ dots = (completed * dot_count) / total
+
+ dots_to_write = dots - self.dots_written
+ if self.log:
+ os.write(sys.stdout.fileno(), '.' * dots_to_write)
+
+ self.dots_written = dots
+
+ tests_completed = 0
+ prog = subprocess.Popen(cmdline, stdout=subprocess.PIPE,
+ stderr=self.log)
+ line = prog.stdout.readline()
+ while line:
+ # If using --log-to-stdout self.log in None.
+ if self.log:
+ self.log.write(line)
+
+ if line.startswith('PASS') or line.startswith('FAIL') \
+ or line.startswith('XFAIL') or line.startswith('XPASS') \
+ or line.startswith('SKIP'):
+ tests_completed += 1
+ progress_func(tests_completed)
+
+ line = prog.stdout.readline()
+
+ prog.wait()
+ return prog.returncode
+
+ def _run_py_test(self, prog, test_nums, dot_count):
+ 'Run a python test, passing parameters as needed.'
+ progdir, progbase = os.path.split(prog)
+
+ old_path = sys.path[:]
+ sys.path = [progdir] + sys.path
+
+ try:
+ prog_mod = imp.load_module(progbase[:-3], open(prog, 'r'), prog,
+ ('.py', 'U', imp.PY_SOURCE))
+ except:
+ print('Don\'t know what to do about ' + progbase)
+ raise
+
+ import svntest.main
+
+ # set up our options
+ svntest.main.create_default_options()
+ if self.base_url is not None:
+ svntest.main.options.test_area_url = self.base_url
+ if self.enable_sasl is not None:
+ svntest.main.options.enable_sasl = True
+ if self.parallel is not None:
+ svntest.main.options.parallel = svntest.main.default_num_threads
+ if self.config_file is not None:
+ svntest.main.options.config_file = self.config_file
+ if self.verbose is not None:
+ svntest.main.options.verbose = True
+ if self.cleanup is not None:
+ svntest.main.options.cleanup = True
+ if self.fs_type is not None:
+ svntest.main.options.fs_type = self.fs_type
+ if self.http_library is not None:
+ svntest.main.options.http_library = self.http_library
+ if self.server_minor_version is not None:
+ svntest.main.options.server_minor_version = int(self.server_minor_version)
+ if self.list_tests is not None:
+ svntest.main.options.list_tests = True
+ if self.milestone_filter is not None:
+ svntest.main.options.milestone_filter = self.milestone_filter
+ if self.svn_bin is not None:
+ svntest.main.options.svn_bin = self.svn_bin
+ if self.fsfs_sharding is not None:
+ svntest.main.options.fsfs_sharding = int(self.fsfs_sharding)
+ if self.fsfs_packing is not None:
+ svntest.main.options.fsfs_packing = self.fsfs_packing
+ if self.mode_filter is not None:
+ svntest.main.options.mode_filter = self.mode_filter
+
+ svntest.main.options.srcdir = self.srcdir
+
+ # setup the output pipes
+ if self.log:
+ sys.stdout.flush()
+ sys.stderr.flush()
+ self.log.flush()
+ old_stdout = os.dup(1)
+ old_stderr = os.dup(2)
+ os.dup2(self.log.fileno(), 1)
+ os.dup2(self.log.fileno(), 2)
+
+ # This has to be class-scoped for use in the progress_func()
+ self.dots_written = 0
+ def progress_func(completed, total):
+ dots = (completed * dot_count) / total
+
+ dots_to_write = dots - self.dots_written
+ if self.log:
+ os.write(old_stdout, '.' * dots_to_write)
+
+ self.dots_written = dots
+
+ serial_only = hasattr(prog_mod, 'serial_only') and prog_mod.serial_only
+
+ # run the tests
+ svntest.testcase.TextColors.disable()
+
+ if self.list_tests:
+ prog_f = None
+ else:
+ prog_f = progress_func
+
+ if test_nums:
+ test_selection = [test_nums]
+ else:
+ test_selection = []
+
+ failed = svntest.main.execute_tests(prog_mod.test_list,
+ serial_only=serial_only,
+ test_name=progbase,
+ progress_func=prog_f,
+ test_selection=test_selection)
+
+ # restore some values
+ sys.path = old_path
+ if self.log:
+ sys.stdout.flush()
+ sys.stderr.flush()
+ os.dup2(old_stdout, 1)
+ os.dup2(old_stderr, 2)
+ os.close(old_stdout)
+ os.close(old_stderr)
+
+ return failed
+
+ def _run_test(self, prog, test_nr, total_tests):
+ "Run a single test. Return the test's exit code."
+
+ if self.log:
+ log = self.log
+ else:
+ log = sys.stdout
+
+ test_nums = None
+ if '#' in prog:
+ prog, test_nums = prog.split('#')
+
+ progdir, progbase = os.path.split(prog)
+ if self.log:
+ # Using write here because we don't want even a trailing space
+ test_info = '%s [%d/%d]' % (progbase, test_nr + 1, total_tests)
+ if self.list_tests:
+ sys.stdout.write('Listing tests in %s' % (test_info, ))
+ else:
+ sys.stdout.write('Running tests in %s' % (test_info, ))
+ sys.stdout.flush()
+ else:
+ # ### Hack for --log-to-stdout to work (but not print any dots).
+ test_info = ''
+
+ if self.list_tests:
+ log.write('LISTING: %s\n' % progbase)
+ else:
+ log.write('START: %s\n' % progbase)
+
+ log.flush()
+
+ start_time = datetime.now()
+
+ progabs = os.path.abspath(os.path.join(self.srcdir, prog))
+ old_cwd = os.getcwd()
+ try:
+ os.chdir(progdir)
+ if progbase[-3:] == '.py':
+ failed = self._run_py_test(progabs, test_nums,
+ (LINE_LENGTH - len(test_info)))
+ else:
+ failed = self._run_c_test(prog, test_nums,
+ (LINE_LENGTH - len(test_info)))
+ except:
+ os.chdir(old_cwd)
+ raise
+ else:
+ os.chdir(old_cwd)
+
+ # We always return 1 for failed tests. Some other failure than 1
+ # probably means the test didn't run at all and probably didn't
+ # output any failure info. In that case, log a generic failure message.
+ # ### Even if failure==1 it could be that the test didn't run at all.
+ if failed and failed != 1:
+ if self.log:
+ log.write('FAIL: %s: Unknown test failure; see tests.log.\n' % progbase)
+ log.flush()
+ else:
+ log.write('FAIL: %s: Unknown test failure.\n' % progbase)
+
+ if not self.list_tests:
+ # Log the elapsed time.
+ elapsed_time = str(datetime.now() - start_time)
+ log.write('END: %s\n' % progbase)
+ log.write('ELAPSED: %s %s\n' % (progbase, elapsed_time))
+
+ log.write('\n')
+
+ # If we are only listing the tests just add a newline, otherwise if
+ # we printed a "Running all tests in ..." line, add the test result.
+ if self.log:
+ if self.list_tests:
+ print ''
+ else:
+ if failed:
+ print(TextColors.FAILURE + 'FAILURE' + TextColors.ENDC)
+ else:
+ print(TextColors.SUCCESS + 'success' + TextColors.ENDC)
+
+ return failed
+
+
+def main():
+ try:
+ opts, args = my_getopt(sys.argv[1:], 'u:f:vc',
+ ['url=', 'fs-type=', 'verbose', 'cleanup',
+ 'http-library=', 'server-minor-version=',
+ 'fsfs-packing', 'fsfs-sharding=',
+ 'enable-sasl', 'parallel', 'config-file=',
+ 'log-to-stdout', 'list', 'milestone-filter=',
+ 'mode-filter='])
+ except getopt.GetoptError:
+ args = []
+
+ if len(args) < 3:
+ print(__doc__)
+ sys.exit(2)
+
+ base_url, fs_type, verbose, cleanup, enable_sasl, http_library, \
+ server_minor_version, fsfs_sharding, fsfs_packing, parallel, \
+ config_file, log_to_stdout, list_tests, mode_filter, milestone_filter= \
+ None, None, None, None, None, None, None, None, None, None, None, \
+ None, None, None, None
+ for opt, val in opts:
+ if opt in ['-u', '--url']:
+ base_url = val
+ elif opt in ['-f', '--fs-type']:
+ fs_type = val
+ elif opt in ['--http-library']:
+ http_library = val
+ elif opt in ['--fsfs-sharding']:
+ fsfs_sharding = int(val)
+ elif opt in ['--fsfs-packing']:
+ fsfs_packing = 1
+ elif opt in ['--server-minor-version']:
+ server_minor_version = val
+ elif opt in ['-v', '--verbose']:
+ verbose = 1
+ elif opt in ['-c', '--cleanup']:
+ cleanup = 1
+ elif opt in ['--enable-sasl']:
+ enable_sasl = 1
+ elif opt in ['--parallel']:
+ parallel = 1
+ elif opt in ['--config-file']:
+ config_file = val
+ elif opt in ['--log-to-stdout']:
+ log_to_stdout = 1
+ elif opt in ['--list']:
+ list_tests = 1
+ elif opt in ['--milestone-filter']:
+ milestone_filter = val
+ elif opt in ['--mode-filter']:
+ mode_filter = val
+ else:
+ raise getopt.GetoptError
+
+ if log_to_stdout:
+ logfile = None
+ faillogfile = None
+ else:
+ logfile = os.path.abspath('tests.log')
+ faillogfile = os.path.abspath('fails.log')
+
+ th = TestHarness(args[0], args[1], logfile, faillogfile,
+ base_url, fs_type, http_library, server_minor_version,
+ verbose, cleanup, enable_sasl, parallel, config_file,
+ fsfs_sharding, fsfs_packing, list_tests,
+ mode_filter=mode_filter, milestone_filter=milestone_filter)
+
+ failed = th.run(args[2:])
+ if failed:
+ sys.exit(1)
+
+
+# Run main if not imported as a module
+if __name__ == '__main__':
+ main()
diff --git a/build/strip-po-charset.py b/build/strip-po-charset.py
new file mode 100755
index 0000000..43c5e5f
--- /dev/null
+++ b/build/strip-po-charset.py
@@ -0,0 +1,43 @@
+#!/usr/bin/env python
+#
+#
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements. See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership. The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied. See the License for the
+# specific language governing permissions and limitations
+# under the License.
+#
+#
+#
+# strip-po-charset.py
+#
+
+import sys
+
+def strip_po_charset(inp, out):
+
+ out.write(inp.read().replace("\"Content-Type: text/plain; charset=UTF-8\\n\"\n",""))
+
+def main():
+
+ if len(sys.argv) != 3:
+ print("Usage: %s <input (po) file> <output (spo) file>" % sys.argv[0])
+ print("")
+ print("Unsupported number of arguments; 2 required.")
+ sys.exit(1)
+
+ strip_po_charset(open(sys.argv[1],'r'), open(sys.argv[2],'w'))
+
+if __name__ == '__main__':
+ main()
diff --git a/build/transform_libtool_scripts.sh b/build/transform_libtool_scripts.sh
new file mode 100755
index 0000000..80c2af3
--- /dev/null
+++ b/build/transform_libtool_scripts.sh
@@ -0,0 +1,111 @@
+#!/bin/sh
+# DO NOT EDIT -- AUTOMATICALLY GENERATED
+
+transform()
+{
+ SCRIPT="$1"
+ LIBS="$2"
+ if [ -f $SCRIPT ]; then
+ if grep LD_PRELOAD "$SCRIPT" > /dev/null; then
+ :
+ elif grep LD_LIBRARY_PATH "$SCRIPT" > /dev/null; then
+ echo "Transforming $SCRIPT"
+ EXISTINGLIBS=""
+ for LIB in $LIBS; do
+ # exclude libsvn_test since the undefined test_funcs breaks libtool
+ case $LIB in
+ *libsvn_test-*) continue ;;
+ esac
+ if [ ! -f $LIB ]; then
+ continue
+ fi
+ if [ -z "$EXISTINGLIBS" ]; then
+ EXISTINGLIBS="$LIB"
+ else
+ EXISTINGLIBS="$EXISTINGLIBS $LIB"
+ fi
+ done
+ if [ ! -z "$EXISTINGLIBS" ]; then
+ cat "$SCRIPT" |
+ (
+ read LINE
+ echo "$LINE"
+ read LINE
+ echo "$LINE"
+ read LINE
+ echo "$LINE"
+ read LINE
+ echo "$LINE"
+ echo "LD_PRELOAD=\"$EXISTINGLIBS\""
+ echo "export LD_PRELOAD"
+ cat
+ ) < "$SCRIPT" > "$SCRIPT.new"
+ mv -f "$SCRIPT.new" "$SCRIPT"
+ chmod +x "$SCRIPT"
+ fi
+ fi
+ fi
+}
+
+DIR=`pwd`
+
+transform subversion/tests/cmdline/atomic-ra-revprop-change "$DIR/subversion/libsvn_auth_gnome_keyring/.libs/libsvn_auth_gnome_keyring-1.so $DIR/subversion/libsvn_auth_kwallet/.libs/libsvn_auth_kwallet-1.so $DIR/subversion/libsvn_delta/.libs/libsvn_delta-1.so $DIR/subversion/libsvn_fs/.libs/libsvn_fs-1.so $DIR/subversion/libsvn_fs_base/.libs/libsvn_fs_base-1.so $DIR/subversion/libsvn_fs_fs/.libs/libsvn_fs_fs-1.so $DIR/subversion/libsvn_fs_util/.libs/libsvn_fs_util-1.so $DIR/subversion/libsvn_ra/.libs/libsvn_ra-1.so $DIR/subversion/libsvn_ra_local/.libs/libsvn_ra_local-1.so $DIR/subversion/libsvn_ra_neon/.libs/libsvn_ra_neon-1.so $DIR/subversion/libsvn_ra_serf/.libs/libsvn_ra_serf-1.so $DIR/subversion/libsvn_ra_svn/.libs/libsvn_ra_svn-1.so $DIR/subversion/libsvn_repos/.libs/libsvn_repos-1.so $DIR/subversion/libsvn_subr/.libs/libsvn_subr-1.so"
+transform subversion/tests/libsvn_subr/auth-test "$DIR/subversion/libsvn_auth_gnome_keyring/.libs/libsvn_auth_gnome_keyring-1.so $DIR/subversion/libsvn_auth_kwallet/.libs/libsvn_auth_kwallet-1.so $DIR/subversion/libsvn_delta/.libs/libsvn_delta-1.so $DIR/subversion/libsvn_fs/.libs/libsvn_fs-1.so $DIR/subversion/libsvn_fs_base/.libs/libsvn_fs_base-1.so $DIR/subversion/libsvn_fs_fs/.libs/libsvn_fs_fs-1.so $DIR/subversion/libsvn_fs_util/.libs/libsvn_fs_util-1.so $DIR/subversion/libsvn_repos/.libs/libsvn_repos-1.so $DIR/subversion/libsvn_subr/.libs/libsvn_subr-1.so $DIR/subversion/tests/.libs/libsvn_test-1.so"
+transform subversion/tests/libsvn_subr/cache-test "$DIR/subversion/libsvn_auth_gnome_keyring/.libs/libsvn_auth_gnome_keyring-1.so $DIR/subversion/libsvn_auth_kwallet/.libs/libsvn_auth_kwallet-1.so $DIR/subversion/libsvn_delta/.libs/libsvn_delta-1.so $DIR/subversion/libsvn_fs/.libs/libsvn_fs-1.so $DIR/subversion/libsvn_fs_base/.libs/libsvn_fs_base-1.so $DIR/subversion/libsvn_fs_fs/.libs/libsvn_fs_fs-1.so $DIR/subversion/libsvn_fs_util/.libs/libsvn_fs_util-1.so $DIR/subversion/libsvn_repos/.libs/libsvn_repos-1.so $DIR/subversion/libsvn_subr/.libs/libsvn_subr-1.so $DIR/subversion/tests/.libs/libsvn_test-1.so"
+transform subversion/tests/libsvn_fs_base/changes-test "$DIR/subversion/libsvn_auth_gnome_keyring/.libs/libsvn_auth_gnome_keyring-1.so $DIR/subversion/libsvn_auth_kwallet/.libs/libsvn_auth_kwallet-1.so $DIR/subversion/libsvn_delta/.libs/libsvn_delta-1.so $DIR/subversion/libsvn_fs/.libs/libsvn_fs-1.so $DIR/subversion/libsvn_fs_base/.libs/libsvn_fs_base-1.so $DIR/subversion/libsvn_fs_fs/.libs/libsvn_fs_fs-1.so $DIR/subversion/libsvn_fs_util/.libs/libsvn_fs_util-1.so $DIR/subversion/libsvn_repos/.libs/libsvn_repos-1.so $DIR/subversion/libsvn_subr/.libs/libsvn_subr-1.so $DIR/subversion/tests/.libs/libsvn_test-1.so"
+transform subversion/tests/libsvn_subr/checksum-test "$DIR/subversion/libsvn_auth_gnome_keyring/.libs/libsvn_auth_gnome_keyring-1.so $DIR/subversion/libsvn_auth_kwallet/.libs/libsvn_auth_kwallet-1.so $DIR/subversion/libsvn_delta/.libs/libsvn_delta-1.so $DIR/subversion/libsvn_fs/.libs/libsvn_fs-1.so $DIR/subversion/libsvn_fs_base/.libs/libsvn_fs_base-1.so $DIR/subversion/libsvn_fs_fs/.libs/libsvn_fs_fs-1.so $DIR/subversion/libsvn_fs_util/.libs/libsvn_fs_util-1.so $DIR/subversion/libsvn_repos/.libs/libsvn_repos-1.so $DIR/subversion/libsvn_subr/.libs/libsvn_subr-1.so $DIR/subversion/tests/.libs/libsvn_test-1.so"
+transform subversion/tests/libsvn_client/client-test "$DIR/subversion/libsvn_auth_gnome_keyring/.libs/libsvn_auth_gnome_keyring-1.so $DIR/subversion/libsvn_auth_kwallet/.libs/libsvn_auth_kwallet-1.so $DIR/subversion/libsvn_client/.libs/libsvn_client-1.so $DIR/subversion/libsvn_delta/.libs/libsvn_delta-1.so $DIR/subversion/libsvn_diff/.libs/libsvn_diff-1.so $DIR/subversion/libsvn_fs/.libs/libsvn_fs-1.so $DIR/subversion/libsvn_fs_base/.libs/libsvn_fs_base-1.so $DIR/subversion/libsvn_fs_fs/.libs/libsvn_fs_fs-1.so $DIR/subversion/libsvn_fs_util/.libs/libsvn_fs_util-1.so $DIR/subversion/libsvn_ra/.libs/libsvn_ra-1.so $DIR/subversion/libsvn_ra_local/.libs/libsvn_ra_local-1.so $DIR/subversion/libsvn_ra_neon/.libs/libsvn_ra_neon-1.so $DIR/subversion/libsvn_ra_serf/.libs/libsvn_ra_serf-1.so $DIR/subversion/libsvn_ra_svn/.libs/libsvn_ra_svn-1.so $DIR/subversion/libsvn_repos/.libs/libsvn_repos-1.so $DIR/subversion/libsvn_subr/.libs/libsvn_subr-1.so $DIR/subversion/tests/.libs/libsvn_test-1.so $DIR/subversion/libsvn_wc/.libs/libsvn_wc-1.so"
+transform subversion/tests/libsvn_subr/compat-test "$DIR/subversion/libsvn_auth_gnome_keyring/.libs/libsvn_auth_gnome_keyring-1.so $DIR/subversion/libsvn_auth_kwallet/.libs/libsvn_auth_kwallet-1.so $DIR/subversion/libsvn_delta/.libs/libsvn_delta-1.so $DIR/subversion/libsvn_fs/.libs/libsvn_fs-1.so $DIR/subversion/libsvn_fs_base/.libs/libsvn_fs_base-1.so $DIR/subversion/libsvn_fs_fs/.libs/libsvn_fs_fs-1.so $DIR/subversion/libsvn_fs_util/.libs/libsvn_fs_util-1.so $DIR/subversion/libsvn_repos/.libs/libsvn_repos-1.so $DIR/subversion/libsvn_subr/.libs/libsvn_subr-1.so $DIR/subversion/tests/.libs/libsvn_test-1.so"
+transform subversion/tests/libsvn_subr/config-test "$DIR/subversion/libsvn_auth_gnome_keyring/.libs/libsvn_auth_gnome_keyring-1.so $DIR/subversion/libsvn_auth_kwallet/.libs/libsvn_auth_kwallet-1.so $DIR/subversion/libsvn_delta/.libs/libsvn_delta-1.so $DIR/subversion/libsvn_fs/.libs/libsvn_fs-1.so $DIR/subversion/libsvn_fs_base/.libs/libsvn_fs_base-1.so $DIR/subversion/libsvn_fs_fs/.libs/libsvn_fs_fs-1.so $DIR/subversion/libsvn_fs_util/.libs/libsvn_fs_util-1.so $DIR/subversion/libsvn_repos/.libs/libsvn_repos-1.so $DIR/subversion/libsvn_subr/.libs/libsvn_subr-1.so $DIR/subversion/tests/.libs/libsvn_test-1.so"
+transform subversion/tests/libsvn_wc/db-test "$DIR/subversion/libsvn_auth_gnome_keyring/.libs/libsvn_auth_gnome_keyring-1.so $DIR/subversion/libsvn_auth_kwallet/.libs/libsvn_auth_kwallet-1.so $DIR/subversion/libsvn_client/.libs/libsvn_client-1.so $DIR/subversion/libsvn_delta/.libs/libsvn_delta-1.so $DIR/subversion/libsvn_diff/.libs/libsvn_diff-1.so $DIR/subversion/libsvn_fs/.libs/libsvn_fs-1.so $DIR/subversion/libsvn_fs_base/.libs/libsvn_fs_base-1.so $DIR/subversion/libsvn_fs_fs/.libs/libsvn_fs_fs-1.so $DIR/subversion/libsvn_fs_util/.libs/libsvn_fs_util-1.so $DIR/subversion/libsvn_ra/.libs/libsvn_ra-1.so $DIR/subversion/libsvn_ra_local/.libs/libsvn_ra_local-1.so $DIR/subversion/libsvn_ra_neon/.libs/libsvn_ra_neon-1.so $DIR/subversion/libsvn_ra_serf/.libs/libsvn_ra_serf-1.so $DIR/subversion/libsvn_ra_svn/.libs/libsvn_ra_svn-1.so $DIR/subversion/libsvn_repos/.libs/libsvn_repos-1.so $DIR/subversion/libsvn_subr/.libs/libsvn_subr-1.so $DIR/subversion/tests/.libs/libsvn_test-1.so $DIR/subversion/libsvn_wc/.libs/libsvn_wc-1.so"
+transform tools/diff/diff "$DIR/subversion/libsvn_auth_gnome_keyring/.libs/libsvn_auth_gnome_keyring-1.so $DIR/subversion/libsvn_auth_kwallet/.libs/libsvn_auth_kwallet-1.so $DIR/subversion/libsvn_diff/.libs/libsvn_diff-1.so $DIR/subversion/libsvn_subr/.libs/libsvn_subr-1.so"
+transform subversion/tests/libsvn_diff/diff-diff3-test "$DIR/subversion/libsvn_auth_gnome_keyring/.libs/libsvn_auth_gnome_keyring-1.so $DIR/subversion/libsvn_auth_kwallet/.libs/libsvn_auth_kwallet-1.so $DIR/subversion/libsvn_delta/.libs/libsvn_delta-1.so $DIR/subversion/libsvn_diff/.libs/libsvn_diff-1.so $DIR/subversion/libsvn_fs/.libs/libsvn_fs-1.so $DIR/subversion/libsvn_fs_base/.libs/libsvn_fs_base-1.so $DIR/subversion/libsvn_fs_fs/.libs/libsvn_fs_fs-1.so $DIR/subversion/libsvn_fs_util/.libs/libsvn_fs_util-1.so $DIR/subversion/libsvn_repos/.libs/libsvn_repos-1.so $DIR/subversion/libsvn_subr/.libs/libsvn_subr-1.so $DIR/subversion/tests/.libs/libsvn_test-1.so"
+transform tools/diff/diff3 "$DIR/subversion/libsvn_auth_gnome_keyring/.libs/libsvn_auth_gnome_keyring-1.so $DIR/subversion/libsvn_auth_kwallet/.libs/libsvn_auth_kwallet-1.so $DIR/subversion/libsvn_diff/.libs/libsvn_diff-1.so $DIR/subversion/libsvn_subr/.libs/libsvn_subr-1.so"
+transform tools/diff/diff4 "$DIR/subversion/libsvn_auth_gnome_keyring/.libs/libsvn_auth_gnome_keyring-1.so $DIR/subversion/libsvn_auth_kwallet/.libs/libsvn_auth_kwallet-1.so $DIR/subversion/libsvn_diff/.libs/libsvn_diff-1.so $DIR/subversion/libsvn_subr/.libs/libsvn_subr-1.so"
+transform subversion/tests/libsvn_subr/dirent_uri-test "$DIR/subversion/libsvn_auth_gnome_keyring/.libs/libsvn_auth_gnome_keyring-1.so $DIR/subversion/libsvn_auth_kwallet/.libs/libsvn_auth_kwallet-1.so $DIR/subversion/libsvn_delta/.libs/libsvn_delta-1.so $DIR/subversion/libsvn_fs/.libs/libsvn_fs-1.so $DIR/subversion/libsvn_fs_base/.libs/libsvn_fs_base-1.so $DIR/subversion/libsvn_fs_fs/.libs/libsvn_fs_fs-1.so $DIR/subversion/libsvn_fs_util/.libs/libsvn_fs_util-1.so $DIR/subversion/libsvn_repos/.libs/libsvn_repos-1.so $DIR/subversion/libsvn_subr/.libs/libsvn_subr-1.so $DIR/subversion/tests/.libs/libsvn_test-1.so"
+transform subversion/tests/libsvn_wc/entries-compat-test "$DIR/subversion/libsvn_auth_gnome_keyring/.libs/libsvn_auth_gnome_keyring-1.so $DIR/subversion/libsvn_auth_kwallet/.libs/libsvn_auth_kwallet-1.so $DIR/subversion/libsvn_client/.libs/libsvn_client-1.so $DIR/subversion/libsvn_delta/.libs/libsvn_delta-1.so $DIR/subversion/libsvn_diff/.libs/libsvn_diff-1.so $DIR/subversion/libsvn_fs/.libs/libsvn_fs-1.so $DIR/subversion/libsvn_fs_base/.libs/libsvn_fs_base-1.so $DIR/subversion/libsvn_fs_fs/.libs/libsvn_fs_fs-1.so $DIR/subversion/libsvn_fs_util/.libs/libsvn_fs_util-1.so $DIR/subversion/libsvn_ra/.libs/libsvn_ra-1.so $DIR/subversion/libsvn_ra_local/.libs/libsvn_ra_local-1.so $DIR/subversion/libsvn_ra_neon/.libs/libsvn_ra_neon-1.so $DIR/subversion/libsvn_ra_serf/.libs/libsvn_ra_serf-1.so $DIR/subversion/libsvn_ra_svn/.libs/libsvn_ra_svn-1.so $DIR/subversion/libsvn_repos/.libs/libsvn_repos-1.so $DIR/subversion/libsvn_subr/.libs/libsvn_subr-1.so $DIR/subversion/tests/.libs/libsvn_test-1.so $DIR/subversion/libsvn_wc/.libs/libsvn_wc-1.so"
+transform subversion/tests/cmdline/entries-dump "$DIR/subversion/libsvn_auth_gnome_keyring/.libs/libsvn_auth_gnome_keyring-1.so $DIR/subversion/libsvn_auth_kwallet/.libs/libsvn_auth_kwallet-1.so $DIR/subversion/libsvn_delta/.libs/libsvn_delta-1.so $DIR/subversion/libsvn_diff/.libs/libsvn_diff-1.so $DIR/subversion/libsvn_subr/.libs/libsvn_subr-1.so $DIR/subversion/libsvn_wc/.libs/libsvn_wc-1.so"
+transform subversion/tests/libsvn_subr/error-test "$DIR/subversion/libsvn_auth_gnome_keyring/.libs/libsvn_auth_gnome_keyring-1.so $DIR/subversion/libsvn_auth_kwallet/.libs/libsvn_auth_kwallet-1.so $DIR/subversion/libsvn_delta/.libs/libsvn_delta-1.so $DIR/subversion/libsvn_fs/.libs/libsvn_fs-1.so $DIR/subversion/libsvn_fs_base/.libs/libsvn_fs_base-1.so $DIR/subversion/libsvn_fs_fs/.libs/libsvn_fs_fs-1.so $DIR/subversion/libsvn_fs_util/.libs/libsvn_fs_util-1.so $DIR/subversion/libsvn_repos/.libs/libsvn_repos-1.so $DIR/subversion/libsvn_subr/.libs/libsvn_subr-1.so $DIR/subversion/tests/.libs/libsvn_test-1.so"
+transform subversion/tests/libsvn_fs_base/fs-base-test "$DIR/subversion/libsvn_auth_gnome_keyring/.libs/libsvn_auth_gnome_keyring-1.so $DIR/subversion/libsvn_auth_kwallet/.libs/libsvn_auth_kwallet-1.so $DIR/subversion/libsvn_delta/.libs/libsvn_delta-1.so $DIR/subversion/libsvn_fs/.libs/libsvn_fs-1.so $DIR/subversion/libsvn_fs_base/.libs/libsvn_fs_base-1.so $DIR/subversion/libsvn_fs_fs/.libs/libsvn_fs_fs-1.so $DIR/subversion/libsvn_fs_util/.libs/libsvn_fs_util-1.so $DIR/subversion/libsvn_repos/.libs/libsvn_repos-1.so $DIR/subversion/libsvn_subr/.libs/libsvn_subr-1.so $DIR/subversion/tests/.libs/libsvn_test-1.so"
+transform subversion/tests/libsvn_fs_fs/fs-pack-test "$DIR/subversion/libsvn_auth_gnome_keyring/.libs/libsvn_auth_gnome_keyring-1.so $DIR/subversion/libsvn_auth_kwallet/.libs/libsvn_auth_kwallet-1.so $DIR/subversion/libsvn_delta/.libs/libsvn_delta-1.so $DIR/subversion/libsvn_fs/.libs/libsvn_fs-1.so $DIR/subversion/libsvn_fs_base/.libs/libsvn_fs_base-1.so $DIR/subversion/libsvn_fs_fs/.libs/libsvn_fs_fs-1.so $DIR/subversion/libsvn_fs_util/.libs/libsvn_fs_util-1.so $DIR/subversion/libsvn_repos/.libs/libsvn_repos-1.so $DIR/subversion/libsvn_subr/.libs/libsvn_subr-1.so $DIR/subversion/tests/.libs/libsvn_test-1.so"
+transform subversion/tests/libsvn_fs/fs-test "$DIR/subversion/libsvn_auth_gnome_keyring/.libs/libsvn_auth_gnome_keyring-1.so $DIR/subversion/libsvn_auth_kwallet/.libs/libsvn_auth_kwallet-1.so $DIR/subversion/libsvn_delta/.libs/libsvn_delta-1.so $DIR/subversion/libsvn_fs/.libs/libsvn_fs-1.so $DIR/subversion/libsvn_fs_base/.libs/libsvn_fs_base-1.so $DIR/subversion/libsvn_fs_fs/.libs/libsvn_fs_fs-1.so $DIR/subversion/libsvn_fs_util/.libs/libsvn_fs_util-1.so $DIR/subversion/libsvn_repos/.libs/libsvn_repos-1.so $DIR/subversion/libsvn_subr/.libs/libsvn_subr-1.so $DIR/subversion/tests/.libs/libsvn_test-1.so"
+transform subversion/tests/libsvn_subr/hashdump-test "$DIR/subversion/libsvn_auth_gnome_keyring/.libs/libsvn_auth_gnome_keyring-1.so $DIR/subversion/libsvn_auth_kwallet/.libs/libsvn_auth_kwallet-1.so $DIR/subversion/libsvn_delta/.libs/libsvn_delta-1.so $DIR/subversion/libsvn_fs/.libs/libsvn_fs-1.so $DIR/subversion/libsvn_fs_base/.libs/libsvn_fs_base-1.so $DIR/subversion/libsvn_fs_fs/.libs/libsvn_fs_fs-1.so $DIR/subversion/libsvn_fs_util/.libs/libsvn_fs_util-1.so $DIR/subversion/libsvn_repos/.libs/libsvn_repos-1.so $DIR/subversion/libsvn_subr/.libs/libsvn_subr-1.so $DIR/subversion/tests/.libs/libsvn_test-1.so"
+transform subversion/tests/libsvn_fs/locks-test "$DIR/subversion/libsvn_auth_gnome_keyring/.libs/libsvn_auth_gnome_keyring-1.so $DIR/subversion/libsvn_auth_kwallet/.libs/libsvn_auth_kwallet-1.so $DIR/subversion/libsvn_delta/.libs/libsvn_delta-1.so $DIR/subversion/libsvn_fs/.libs/libsvn_fs-1.so $DIR/subversion/libsvn_fs_base/.libs/libsvn_fs_base-1.so $DIR/subversion/libsvn_fs_fs/.libs/libsvn_fs_fs-1.so $DIR/subversion/libsvn_fs_util/.libs/libsvn_fs_util-1.so $DIR/subversion/libsvn_repos/.libs/libsvn_repos-1.so $DIR/subversion/libsvn_subr/.libs/libsvn_subr-1.so $DIR/subversion/tests/.libs/libsvn_test-1.so"
+transform subversion/tests/libsvn_subr/mergeinfo-test "$DIR/subversion/libsvn_auth_gnome_keyring/.libs/libsvn_auth_gnome_keyring-1.so $DIR/subversion/libsvn_auth_kwallet/.libs/libsvn_auth_kwallet-1.so $DIR/subversion/libsvn_delta/.libs/libsvn_delta-1.so $DIR/subversion/libsvn_fs/.libs/libsvn_fs-1.so $DIR/subversion/libsvn_fs_base/.libs/libsvn_fs_base-1.so $DIR/subversion/libsvn_fs_fs/.libs/libsvn_fs_fs-1.so $DIR/subversion/libsvn_fs_util/.libs/libsvn_fs_util-1.so $DIR/subversion/libsvn_repos/.libs/libsvn_repos-1.so $DIR/subversion/libsvn_subr/.libs/libsvn_subr-1.so $DIR/subversion/tests/.libs/libsvn_test-1.so"
+transform subversion/tests/libsvn_wc/op-depth-test "$DIR/subversion/libsvn_auth_gnome_keyring/.libs/libsvn_auth_gnome_keyring-1.so $DIR/subversion/libsvn_auth_kwallet/.libs/libsvn_auth_kwallet-1.so $DIR/subversion/libsvn_client/.libs/libsvn_client-1.so $DIR/subversion/libsvn_delta/.libs/libsvn_delta-1.so $DIR/subversion/libsvn_diff/.libs/libsvn_diff-1.so $DIR/subversion/libsvn_fs/.libs/libsvn_fs-1.so $DIR/subversion/libsvn_fs_base/.libs/libsvn_fs_base-1.so $DIR/subversion/libsvn_fs_fs/.libs/libsvn_fs_fs-1.so $DIR/subversion/libsvn_fs_util/.libs/libsvn_fs_util-1.so $DIR/subversion/libsvn_ra/.libs/libsvn_ra-1.so $DIR/subversion/libsvn_ra_local/.libs/libsvn_ra_local-1.so $DIR/subversion/libsvn_ra_neon/.libs/libsvn_ra_neon-1.so $DIR/subversion/libsvn_ra_serf/.libs/libsvn_ra_serf-1.so $DIR/subversion/libsvn_ra_svn/.libs/libsvn_ra_svn-1.so $DIR/subversion/libsvn_repos/.libs/libsvn_repos-1.so $DIR/subversion/libsvn_subr/.libs/libsvn_subr-1.so $DIR/subversion/tests/.libs/libsvn_test-1.so $DIR/subversion/libsvn_wc/.libs/libsvn_wc-1.so"
+transform subversion/tests/libsvn_subr/opt-test "$DIR/subversion/libsvn_auth_gnome_keyring/.libs/libsvn_auth_gnome_keyring-1.so $DIR/subversion/libsvn_auth_kwallet/.libs/libsvn_auth_kwallet-1.so $DIR/subversion/libsvn_delta/.libs/libsvn_delta-1.so $DIR/subversion/libsvn_fs/.libs/libsvn_fs-1.so $DIR/subversion/libsvn_fs_base/.libs/libsvn_fs_base-1.so $DIR/subversion/libsvn_fs_fs/.libs/libsvn_fs_fs-1.so $DIR/subversion/libsvn_fs_util/.libs/libsvn_fs_util-1.so $DIR/subversion/libsvn_repos/.libs/libsvn_repos-1.so $DIR/subversion/libsvn_subr/.libs/libsvn_subr-1.so $DIR/subversion/tests/.libs/libsvn_test-1.so"
+transform subversion/tests/libsvn_diff/parse-diff-test "$DIR/subversion/libsvn_auth_gnome_keyring/.libs/libsvn_auth_gnome_keyring-1.so $DIR/subversion/libsvn_auth_kwallet/.libs/libsvn_auth_kwallet-1.so $DIR/subversion/libsvn_delta/.libs/libsvn_delta-1.so $DIR/subversion/libsvn_diff/.libs/libsvn_diff-1.so $DIR/subversion/libsvn_fs/.libs/libsvn_fs-1.so $DIR/subversion/libsvn_fs_base/.libs/libsvn_fs_base-1.so $DIR/subversion/libsvn_fs_fs/.libs/libsvn_fs_fs-1.so $DIR/subversion/libsvn_fs_util/.libs/libsvn_fs_util-1.so $DIR/subversion/libsvn_repos/.libs/libsvn_repos-1.so $DIR/subversion/libsvn_subr/.libs/libsvn_subr-1.so $DIR/subversion/tests/.libs/libsvn_test-1.so"
+transform subversion/tests/libsvn_subr/path-test "$DIR/subversion/libsvn_auth_gnome_keyring/.libs/libsvn_auth_gnome_keyring-1.so $DIR/subversion/libsvn_auth_kwallet/.libs/libsvn_auth_kwallet-1.so $DIR/subversion/libsvn_delta/.libs/libsvn_delta-1.so $DIR/subversion/libsvn_fs/.libs/libsvn_fs-1.so $DIR/subversion/libsvn_fs_base/.libs/libsvn_fs_base-1.so $DIR/subversion/libsvn_fs_fs/.libs/libsvn_fs_fs-1.so $DIR/subversion/libsvn_fs_util/.libs/libsvn_fs_util-1.so $DIR/subversion/libsvn_repos/.libs/libsvn_repos-1.so $DIR/subversion/libsvn_subr/.libs/libsvn_subr-1.so $DIR/subversion/tests/.libs/libsvn_test-1.so"
+transform subversion/tests/libsvn_wc/pristine-store-test "$DIR/subversion/libsvn_auth_gnome_keyring/.libs/libsvn_auth_gnome_keyring-1.so $DIR/subversion/libsvn_auth_kwallet/.libs/libsvn_auth_kwallet-1.so $DIR/subversion/libsvn_client/.libs/libsvn_client-1.so $DIR/subversion/libsvn_delta/.libs/libsvn_delta-1.so $DIR/subversion/libsvn_diff/.libs/libsvn_diff-1.so $DIR/subversion/libsvn_fs/.libs/libsvn_fs-1.so $DIR/subversion/libsvn_fs_base/.libs/libsvn_fs_base-1.so $DIR/subversion/libsvn_fs_fs/.libs/libsvn_fs_fs-1.so $DIR/subversion/libsvn_fs_util/.libs/libsvn_fs_util-1.so $DIR/subversion/libsvn_ra/.libs/libsvn_ra-1.so $DIR/subversion/libsvn_ra_local/.libs/libsvn_ra_local-1.so $DIR/subversion/libsvn_ra_neon/.libs/libsvn_ra_neon-1.so $DIR/subversion/libsvn_ra_serf/.libs/libsvn_ra_serf-1.so $DIR/subversion/libsvn_ra_svn/.libs/libsvn_ra_svn-1.so $DIR/subversion/libsvn_repos/.libs/libsvn_repos-1.so $DIR/subversion/libsvn_subr/.libs/libsvn_subr-1.so $DIR/subversion/tests/.libs/libsvn_test-1.so $DIR/subversion/libsvn_wc/.libs/libsvn_wc-1.so"
+transform subversion/tests/libsvn_ra_local/ra-local-test "$DIR/subversion/libsvn_auth_gnome_keyring/.libs/libsvn_auth_gnome_keyring-1.so $DIR/subversion/libsvn_auth_kwallet/.libs/libsvn_auth_kwallet-1.so $DIR/subversion/libsvn_delta/.libs/libsvn_delta-1.so $DIR/subversion/libsvn_fs/.libs/libsvn_fs-1.so $DIR/subversion/libsvn_fs_base/.libs/libsvn_fs_base-1.so $DIR/subversion/libsvn_fs_fs/.libs/libsvn_fs_fs-1.so $DIR/subversion/libsvn_fs_util/.libs/libsvn_fs_util-1.so $DIR/subversion/libsvn_ra/.libs/libsvn_ra-1.so $DIR/subversion/libsvn_ra_local/.libs/libsvn_ra_local-1.so $DIR/subversion/libsvn_ra_neon/.libs/libsvn_ra_neon-1.so $DIR/subversion/libsvn_ra_serf/.libs/libsvn_ra_serf-1.so $DIR/subversion/libsvn_ra_svn/.libs/libsvn_ra_svn-1.so $DIR/subversion/libsvn_repos/.libs/libsvn_repos-1.so $DIR/subversion/libsvn_subr/.libs/libsvn_subr-1.so $DIR/subversion/tests/.libs/libsvn_test-1.so"
+transform subversion/tests/libsvn_delta/random-test "$DIR/subversion/libsvn_auth_gnome_keyring/.libs/libsvn_auth_gnome_keyring-1.so $DIR/subversion/libsvn_auth_kwallet/.libs/libsvn_auth_kwallet-1.so $DIR/subversion/libsvn_delta/.libs/libsvn_delta-1.so $DIR/subversion/libsvn_fs/.libs/libsvn_fs-1.so $DIR/subversion/libsvn_fs_base/.libs/libsvn_fs_base-1.so $DIR/subversion/libsvn_fs_fs/.libs/libsvn_fs_fs-1.so $DIR/subversion/libsvn_fs_util/.libs/libsvn_fs_util-1.so $DIR/subversion/libsvn_repos/.libs/libsvn_repos-1.so $DIR/subversion/libsvn_subr/.libs/libsvn_subr-1.so $DIR/subversion/tests/.libs/libsvn_test-1.so"
+transform subversion/tests/libsvn_repos/repos-test "$DIR/subversion/libsvn_auth_gnome_keyring/.libs/libsvn_auth_gnome_keyring-1.so $DIR/subversion/libsvn_auth_kwallet/.libs/libsvn_auth_kwallet-1.so $DIR/subversion/libsvn_delta/.libs/libsvn_delta-1.so $DIR/subversion/libsvn_fs/.libs/libsvn_fs-1.so $DIR/subversion/libsvn_fs_base/.libs/libsvn_fs_base-1.so $DIR/subversion/libsvn_fs_fs/.libs/libsvn_fs_fs-1.so $DIR/subversion/libsvn_fs_util/.libs/libsvn_fs_util-1.so $DIR/subversion/libsvn_repos/.libs/libsvn_repos-1.so $DIR/subversion/libsvn_subr/.libs/libsvn_subr-1.so $DIR/subversion/tests/.libs/libsvn_test-1.so"
+transform subversion/tests/libsvn_subr/revision-test "$DIR/subversion/libsvn_auth_gnome_keyring/.libs/libsvn_auth_gnome_keyring-1.so $DIR/subversion/libsvn_auth_kwallet/.libs/libsvn_auth_kwallet-1.so $DIR/subversion/libsvn_delta/.libs/libsvn_delta-1.so $DIR/subversion/libsvn_fs/.libs/libsvn_fs-1.so $DIR/subversion/libsvn_fs_base/.libs/libsvn_fs_base-1.so $DIR/subversion/libsvn_fs_fs/.libs/libsvn_fs_fs-1.so $DIR/subversion/libsvn_fs_util/.libs/libsvn_fs_util-1.so $DIR/subversion/libsvn_repos/.libs/libsvn_repos-1.so $DIR/subversion/libsvn_subr/.libs/libsvn_subr-1.so $DIR/subversion/tests/.libs/libsvn_test-1.so"
+transform subversion/tests/libsvn_subr/skel-test "$DIR/subversion/libsvn_auth_gnome_keyring/.libs/libsvn_auth_gnome_keyring-1.so $DIR/subversion/libsvn_auth_kwallet/.libs/libsvn_auth_kwallet-1.so $DIR/subversion/libsvn_delta/.libs/libsvn_delta-1.so $DIR/subversion/libsvn_fs/.libs/libsvn_fs-1.so $DIR/subversion/libsvn_fs_base/.libs/libsvn_fs_base-1.so $DIR/subversion/libsvn_fs_fs/.libs/libsvn_fs_fs-1.so $DIR/subversion/libsvn_fs_util/.libs/libsvn_fs_util-1.so $DIR/subversion/libsvn_repos/.libs/libsvn_repos-1.so $DIR/subversion/libsvn_subr/.libs/libsvn_subr-1.so $DIR/subversion/tests/.libs/libsvn_test-1.so"
+transform subversion/tests/libsvn_subr/stream-test "$DIR/subversion/libsvn_auth_gnome_keyring/.libs/libsvn_auth_gnome_keyring-1.so $DIR/subversion/libsvn_auth_kwallet/.libs/libsvn_auth_kwallet-1.so $DIR/subversion/libsvn_delta/.libs/libsvn_delta-1.so $DIR/subversion/libsvn_fs/.libs/libsvn_fs-1.so $DIR/subversion/libsvn_fs_base/.libs/libsvn_fs_base-1.so $DIR/subversion/libsvn_fs_fs/.libs/libsvn_fs_fs-1.so $DIR/subversion/libsvn_fs_util/.libs/libsvn_fs_util-1.so $DIR/subversion/libsvn_repos/.libs/libsvn_repos-1.so $DIR/subversion/libsvn_subr/.libs/libsvn_subr-1.so $DIR/subversion/tests/.libs/libsvn_test-1.so"
+transform subversion/tests/libsvn_subr/string-test "$DIR/subversion/libsvn_auth_gnome_keyring/.libs/libsvn_auth_gnome_keyring-1.so $DIR/subversion/libsvn_auth_kwallet/.libs/libsvn_auth_kwallet-1.so $DIR/subversion/libsvn_delta/.libs/libsvn_delta-1.so $DIR/subversion/libsvn_fs/.libs/libsvn_fs-1.so $DIR/subversion/libsvn_fs_base/.libs/libsvn_fs_base-1.so $DIR/subversion/libsvn_fs_fs/.libs/libsvn_fs_fs-1.so $DIR/subversion/libsvn_fs_util/.libs/libsvn_fs_util-1.so $DIR/subversion/libsvn_repos/.libs/libsvn_repos-1.so $DIR/subversion/libsvn_subr/.libs/libsvn_subr-1.so $DIR/subversion/tests/.libs/libsvn_test-1.so"
+transform subversion/tests/libsvn_fs_base/strings-reps-test "$DIR/subversion/libsvn_auth_gnome_keyring/.libs/libsvn_auth_gnome_keyring-1.so $DIR/subversion/libsvn_auth_kwallet/.libs/libsvn_auth_kwallet-1.so $DIR/subversion/libsvn_delta/.libs/libsvn_delta-1.so $DIR/subversion/libsvn_fs/.libs/libsvn_fs-1.so $DIR/subversion/libsvn_fs_base/.libs/libsvn_fs_base-1.so $DIR/subversion/libsvn_fs_fs/.libs/libsvn_fs_fs-1.so $DIR/subversion/libsvn_fs_util/.libs/libsvn_fs_util-1.so $DIR/subversion/libsvn_repos/.libs/libsvn_repos-1.so $DIR/subversion/libsvn_subr/.libs/libsvn_subr-1.so $DIR/subversion/tests/.libs/libsvn_test-1.so"
+transform subversion/tests/libsvn_subr/subst_translate-test "$DIR/subversion/libsvn_auth_gnome_keyring/.libs/libsvn_auth_gnome_keyring-1.so $DIR/subversion/libsvn_auth_kwallet/.libs/libsvn_auth_kwallet-1.so $DIR/subversion/libsvn_delta/.libs/libsvn_delta-1.so $DIR/subversion/libsvn_fs/.libs/libsvn_fs-1.so $DIR/subversion/libsvn_fs_base/.libs/libsvn_fs_base-1.so $DIR/subversion/libsvn_fs_fs/.libs/libsvn_fs_fs-1.so $DIR/subversion/libsvn_fs_util/.libs/libsvn_fs_util-1.so $DIR/subversion/libsvn_repos/.libs/libsvn_repos-1.so $DIR/subversion/libsvn_subr/.libs/libsvn_subr-1.so $DIR/subversion/tests/.libs/libsvn_test-1.so"
+transform subversion/svn/svn "$DIR/subversion/libsvn_auth_gnome_keyring/.libs/libsvn_auth_gnome_keyring-1.so $DIR/subversion/libsvn_auth_kwallet/.libs/libsvn_auth_kwallet-1.so $DIR/subversion/libsvn_client/.libs/libsvn_client-1.so $DIR/subversion/libsvn_delta/.libs/libsvn_delta-1.so $DIR/subversion/libsvn_diff/.libs/libsvn_diff-1.so $DIR/subversion/libsvn_fs/.libs/libsvn_fs-1.so $DIR/subversion/libsvn_fs_base/.libs/libsvn_fs_base-1.so $DIR/subversion/libsvn_fs_fs/.libs/libsvn_fs_fs-1.so $DIR/subversion/libsvn_fs_util/.libs/libsvn_fs_util-1.so $DIR/subversion/libsvn_ra/.libs/libsvn_ra-1.so $DIR/subversion/libsvn_ra_local/.libs/libsvn_ra_local-1.so $DIR/subversion/libsvn_ra_neon/.libs/libsvn_ra_neon-1.so $DIR/subversion/libsvn_ra_serf/.libs/libsvn_ra_serf-1.so $DIR/subversion/libsvn_ra_svn/.libs/libsvn_ra_svn-1.so $DIR/subversion/libsvn_repos/.libs/libsvn_repos-1.so $DIR/subversion/libsvn_subr/.libs/libsvn_subr-1.so $DIR/subversion/libsvn_wc/.libs/libsvn_wc-1.so"
+transform tools/server-side/svn-populate-node-origins-index "$DIR/subversion/libsvn_auth_gnome_keyring/.libs/libsvn_auth_gnome_keyring-1.so $DIR/subversion/libsvn_auth_kwallet/.libs/libsvn_auth_kwallet-1.so $DIR/subversion/libsvn_delta/.libs/libsvn_delta-1.so $DIR/subversion/libsvn_fs/.libs/libsvn_fs-1.so $DIR/subversion/libsvn_fs_base/.libs/libsvn_fs_base-1.so $DIR/subversion/libsvn_fs_fs/.libs/libsvn_fs_fs-1.so $DIR/subversion/libsvn_fs_util/.libs/libsvn_fs_util-1.so $DIR/subversion/libsvn_repos/.libs/libsvn_repos-1.so $DIR/subversion/libsvn_subr/.libs/libsvn_subr-1.so"
+transform tools/server-side/svn-rep-sharing-stats "$DIR/subversion/libsvn_auth_gnome_keyring/.libs/libsvn_auth_gnome_keyring-1.so $DIR/subversion/libsvn_auth_kwallet/.libs/libsvn_auth_kwallet-1.so $DIR/subversion/libsvn_delta/.libs/libsvn_delta-1.so $DIR/subversion/libsvn_fs/.libs/libsvn_fs-1.so $DIR/subversion/libsvn_fs_base/.libs/libsvn_fs_base-1.so $DIR/subversion/libsvn_fs_fs/.libs/libsvn_fs_fs-1.so $DIR/subversion/libsvn_fs_util/.libs/libsvn_fs_util-1.so $DIR/subversion/libsvn_repos/.libs/libsvn_repos-1.so $DIR/subversion/libsvn_subr/.libs/libsvn_subr-1.so"
+transform subversion/svnadmin/svnadmin "$DIR/subversion/libsvn_auth_gnome_keyring/.libs/libsvn_auth_gnome_keyring-1.so $DIR/subversion/libsvn_auth_kwallet/.libs/libsvn_auth_kwallet-1.so $DIR/subversion/libsvn_delta/.libs/libsvn_delta-1.so $DIR/subversion/libsvn_fs/.libs/libsvn_fs-1.so $DIR/subversion/libsvn_fs_base/.libs/libsvn_fs_base-1.so $DIR/subversion/libsvn_fs_fs/.libs/libsvn_fs_fs-1.so $DIR/subversion/libsvn_fs_util/.libs/libsvn_fs_util-1.so $DIR/subversion/libsvn_repos/.libs/libsvn_repos-1.so $DIR/subversion/libsvn_subr/.libs/libsvn_subr-1.so"
+transform tools/server-side/svnauthz-validate "$DIR/subversion/libsvn_auth_gnome_keyring/.libs/libsvn_auth_gnome_keyring-1.so $DIR/subversion/libsvn_auth_kwallet/.libs/libsvn_auth_kwallet-1.so $DIR/subversion/libsvn_delta/.libs/libsvn_delta-1.so $DIR/subversion/libsvn_fs/.libs/libsvn_fs-1.so $DIR/subversion/libsvn_fs_base/.libs/libsvn_fs_base-1.so $DIR/subversion/libsvn_fs_fs/.libs/libsvn_fs_fs-1.so $DIR/subversion/libsvn_fs_util/.libs/libsvn_fs_util-1.so $DIR/subversion/libsvn_repos/.libs/libsvn_repos-1.so $DIR/subversion/libsvn_subr/.libs/libsvn_subr-1.so"
+transform subversion/tests/libsvn_delta/svndiff-test "$DIR/subversion/libsvn_auth_gnome_keyring/.libs/libsvn_auth_gnome_keyring-1.so $DIR/subversion/libsvn_auth_kwallet/.libs/libsvn_auth_kwallet-1.so $DIR/subversion/libsvn_delta/.libs/libsvn_delta-1.so $DIR/subversion/libsvn_subr/.libs/libsvn_subr-1.so"
+transform subversion/svndumpfilter/svndumpfilter "$DIR/subversion/libsvn_auth_gnome_keyring/.libs/libsvn_auth_gnome_keyring-1.so $DIR/subversion/libsvn_auth_kwallet/.libs/libsvn_auth_kwallet-1.so $DIR/subversion/libsvn_delta/.libs/libsvn_delta-1.so $DIR/subversion/libsvn_fs/.libs/libsvn_fs-1.so $DIR/subversion/libsvn_fs_base/.libs/libsvn_fs_base-1.so $DIR/subversion/libsvn_fs_fs/.libs/libsvn_fs_fs-1.so $DIR/subversion/libsvn_fs_util/.libs/libsvn_fs_util-1.so $DIR/subversion/libsvn_repos/.libs/libsvn_repos-1.so $DIR/subversion/libsvn_subr/.libs/libsvn_subr-1.so"
+transform subversion/svnlook/svnlook "$DIR/subversion/libsvn_auth_gnome_keyring/.libs/libsvn_auth_gnome_keyring-1.so $DIR/subversion/libsvn_auth_kwallet/.libs/libsvn_auth_kwallet-1.so $DIR/subversion/libsvn_delta/.libs/libsvn_delta-1.so $DIR/subversion/libsvn_diff/.libs/libsvn_diff-1.so $DIR/subversion/libsvn_fs/.libs/libsvn_fs-1.so $DIR/subversion/libsvn_fs_base/.libs/libsvn_fs_base-1.so $DIR/subversion/libsvn_fs_fs/.libs/libsvn_fs_fs-1.so $DIR/subversion/libsvn_fs_util/.libs/libsvn_fs_util-1.so $DIR/subversion/libsvn_repos/.libs/libsvn_repos-1.so $DIR/subversion/libsvn_subr/.libs/libsvn_subr-1.so"
+transform tools/client-side/svnmucc/svnmucc "$DIR/subversion/libsvn_auth_gnome_keyring/.libs/libsvn_auth_gnome_keyring-1.so $DIR/subversion/libsvn_auth_kwallet/.libs/libsvn_auth_kwallet-1.so $DIR/subversion/libsvn_client/.libs/libsvn_client-1.so $DIR/subversion/libsvn_delta/.libs/libsvn_delta-1.so $DIR/subversion/libsvn_diff/.libs/libsvn_diff-1.so $DIR/subversion/libsvn_fs/.libs/libsvn_fs-1.so $DIR/subversion/libsvn_fs_base/.libs/libsvn_fs_base-1.so $DIR/subversion/libsvn_fs_fs/.libs/libsvn_fs_fs-1.so $DIR/subversion/libsvn_fs_util/.libs/libsvn_fs_util-1.so $DIR/subversion/libsvn_ra/.libs/libsvn_ra-1.so $DIR/subversion/libsvn_ra_local/.libs/libsvn_ra_local-1.so $DIR/subversion/libsvn_ra_neon/.libs/libsvn_ra_neon-1.so $DIR/subversion/libsvn_ra_serf/.libs/libsvn_ra_serf-1.so $DIR/subversion/libsvn_ra_svn/.libs/libsvn_ra_svn-1.so $DIR/subversion/libsvn_repos/.libs/libsvn_repos-1.so $DIR/subversion/libsvn_subr/.libs/libsvn_subr-1.so $DIR/subversion/libsvn_wc/.libs/libsvn_wc-1.so"
+transform tools/dev/svnraisetreeconflict/svnraisetreeconflict "$DIR/subversion/libsvn_auth_gnome_keyring/.libs/libsvn_auth_gnome_keyring-1.so $DIR/subversion/libsvn_auth_kwallet/.libs/libsvn_auth_kwallet-1.so $DIR/subversion/libsvn_delta/.libs/libsvn_delta-1.so $DIR/subversion/libsvn_diff/.libs/libsvn_diff-1.so $DIR/subversion/libsvn_subr/.libs/libsvn_subr-1.so $DIR/subversion/libsvn_wc/.libs/libsvn_wc-1.so"
+transform subversion/svnrdump/svnrdump "$DIR/subversion/libsvn_auth_gnome_keyring/.libs/libsvn_auth_gnome_keyring-1.so $DIR/subversion/libsvn_auth_kwallet/.libs/libsvn_auth_kwallet-1.so $DIR/subversion/libsvn_client/.libs/libsvn_client-1.so $DIR/subversion/libsvn_delta/.libs/libsvn_delta-1.so $DIR/subversion/libsvn_diff/.libs/libsvn_diff-1.so $DIR/subversion/libsvn_fs/.libs/libsvn_fs-1.so $DIR/subversion/libsvn_fs_base/.libs/libsvn_fs_base-1.so $DIR/subversion/libsvn_fs_fs/.libs/libsvn_fs_fs-1.so $DIR/subversion/libsvn_fs_util/.libs/libsvn_fs_util-1.so $DIR/subversion/libsvn_ra/.libs/libsvn_ra-1.so $DIR/subversion/libsvn_ra_local/.libs/libsvn_ra_local-1.so $DIR/subversion/libsvn_ra_neon/.libs/libsvn_ra_neon-1.so $DIR/subversion/libsvn_ra_serf/.libs/libsvn_ra_serf-1.so $DIR/subversion/libsvn_ra_svn/.libs/libsvn_ra_svn-1.so $DIR/subversion/libsvn_repos/.libs/libsvn_repos-1.so $DIR/subversion/libsvn_subr/.libs/libsvn_subr-1.so $DIR/subversion/libsvn_wc/.libs/libsvn_wc-1.so"
+transform subversion/svnserve/svnserve "$DIR/subversion/libsvn_auth_gnome_keyring/.libs/libsvn_auth_gnome_keyring-1.so $DIR/subversion/libsvn_auth_kwallet/.libs/libsvn_auth_kwallet-1.so $DIR/subversion/libsvn_delta/.libs/libsvn_delta-1.so $DIR/subversion/libsvn_fs/.libs/libsvn_fs-1.so $DIR/subversion/libsvn_fs_base/.libs/libsvn_fs_base-1.so $DIR/subversion/libsvn_fs_fs/.libs/libsvn_fs_fs-1.so $DIR/subversion/libsvn_fs_util/.libs/libsvn_fs_util-1.so $DIR/subversion/libsvn_ra_svn/.libs/libsvn_ra_svn-1.so $DIR/subversion/libsvn_repos/.libs/libsvn_repos-1.so $DIR/subversion/libsvn_subr/.libs/libsvn_subr-1.so"
+transform subversion/svnsync/svnsync "$DIR/subversion/libsvn_auth_gnome_keyring/.libs/libsvn_auth_gnome_keyring-1.so $DIR/subversion/libsvn_auth_kwallet/.libs/libsvn_auth_kwallet-1.so $DIR/subversion/libsvn_delta/.libs/libsvn_delta-1.so $DIR/subversion/libsvn_fs/.libs/libsvn_fs-1.so $DIR/subversion/libsvn_fs_base/.libs/libsvn_fs_base-1.so $DIR/subversion/libsvn_fs_fs/.libs/libsvn_fs_fs-1.so $DIR/subversion/libsvn_fs_util/.libs/libsvn_fs_util-1.so $DIR/subversion/libsvn_ra/.libs/libsvn_ra-1.so $DIR/subversion/libsvn_ra_local/.libs/libsvn_ra_local-1.so $DIR/subversion/libsvn_ra_neon/.libs/libsvn_ra_neon-1.so $DIR/subversion/libsvn_ra_serf/.libs/libsvn_ra_serf-1.so $DIR/subversion/libsvn_ra_svn/.libs/libsvn_ra_svn-1.so $DIR/subversion/libsvn_repos/.libs/libsvn_repos-1.so $DIR/subversion/libsvn_subr/.libs/libsvn_subr-1.so"
+transform subversion/svnversion/svnversion "$DIR/subversion/libsvn_auth_gnome_keyring/.libs/libsvn_auth_gnome_keyring-1.so $DIR/subversion/libsvn_auth_kwallet/.libs/libsvn_auth_kwallet-1.so $DIR/subversion/libsvn_delta/.libs/libsvn_delta-1.so $DIR/subversion/libsvn_diff/.libs/libsvn_diff-1.so $DIR/subversion/libsvn_subr/.libs/libsvn_subr-1.so $DIR/subversion/libsvn_wc/.libs/libsvn_wc-1.so"
+transform subversion/tests/libsvn_subr/target-test "$DIR/subversion/libsvn_auth_gnome_keyring/.libs/libsvn_auth_gnome_keyring-1.so $DIR/subversion/libsvn_auth_kwallet/.libs/libsvn_auth_kwallet-1.so $DIR/subversion/libsvn_delta/.libs/libsvn_delta-1.so $DIR/subversion/libsvn_fs/.libs/libsvn_fs-1.so $DIR/subversion/libsvn_fs_base/.libs/libsvn_fs_base-1.so $DIR/subversion/libsvn_fs_fs/.libs/libsvn_fs_fs-1.so $DIR/subversion/libsvn_fs_util/.libs/libsvn_fs_util-1.so $DIR/subversion/libsvn_repos/.libs/libsvn_repos-1.so $DIR/subversion/libsvn_subr/.libs/libsvn_subr-1.so $DIR/subversion/tests/.libs/libsvn_test-1.so"
+transform subversion/tests/libsvn_subr/time-test "$DIR/subversion/libsvn_auth_gnome_keyring/.libs/libsvn_auth_gnome_keyring-1.so $DIR/subversion/libsvn_auth_kwallet/.libs/libsvn_auth_kwallet-1.so $DIR/subversion/libsvn_delta/.libs/libsvn_delta-1.so $DIR/subversion/libsvn_fs/.libs/libsvn_fs-1.so $DIR/subversion/libsvn_fs_base/.libs/libsvn_fs_base-1.so $DIR/subversion/libsvn_fs_fs/.libs/libsvn_fs_fs-1.so $DIR/subversion/libsvn_fs_util/.libs/libsvn_fs_util-1.so $DIR/subversion/libsvn_repos/.libs/libsvn_repos-1.so $DIR/subversion/libsvn_subr/.libs/libsvn_subr-1.so $DIR/subversion/tests/.libs/libsvn_test-1.so"
+transform subversion/tests/libsvn_subr/translate-test "$DIR/subversion/libsvn_auth_gnome_keyring/.libs/libsvn_auth_gnome_keyring-1.so $DIR/subversion/libsvn_auth_kwallet/.libs/libsvn_auth_kwallet-1.so $DIR/subversion/libsvn_delta/.libs/libsvn_delta-1.so $DIR/subversion/libsvn_fs/.libs/libsvn_fs-1.so $DIR/subversion/libsvn_fs_base/.libs/libsvn_fs_base-1.so $DIR/subversion/libsvn_fs_fs/.libs/libsvn_fs_fs-1.so $DIR/subversion/libsvn_fs_util/.libs/libsvn_fs_util-1.so $DIR/subversion/libsvn_repos/.libs/libsvn_repos-1.so $DIR/subversion/libsvn_subr/.libs/libsvn_subr-1.so $DIR/subversion/tests/.libs/libsvn_test-1.so"
+transform subversion/tests/libsvn_wc/tree-conflict-data-test "$DIR/subversion/libsvn_auth_gnome_keyring/.libs/libsvn_auth_gnome_keyring-1.so $DIR/subversion/libsvn_auth_kwallet/.libs/libsvn_auth_kwallet-1.so $DIR/subversion/libsvn_client/.libs/libsvn_client-1.so $DIR/subversion/libsvn_delta/.libs/libsvn_delta-1.so $DIR/subversion/libsvn_diff/.libs/libsvn_diff-1.so $DIR/subversion/libsvn_fs/.libs/libsvn_fs-1.so $DIR/subversion/libsvn_fs_base/.libs/libsvn_fs_base-1.so $DIR/subversion/libsvn_fs_fs/.libs/libsvn_fs_fs-1.so $DIR/subversion/libsvn_fs_util/.libs/libsvn_fs_util-1.so $DIR/subversion/libsvn_ra/.libs/libsvn_ra-1.so $DIR/subversion/libsvn_ra_local/.libs/libsvn_ra_local-1.so $DIR/subversion/libsvn_ra_neon/.libs/libsvn_ra_neon-1.so $DIR/subversion/libsvn_ra_serf/.libs/libsvn_ra_serf-1.so $DIR/subversion/libsvn_ra_svn/.libs/libsvn_ra_svn-1.so $DIR/subversion/libsvn_repos/.libs/libsvn_repos-1.so $DIR/subversion/libsvn_subr/.libs/libsvn_subr-1.so $DIR/subversion/tests/.libs/libsvn_test-1.so $DIR/subversion/libsvn_wc/.libs/libsvn_wc-1.so"
+transform subversion/tests/libsvn_subr/utf-test "$DIR/subversion/libsvn_auth_gnome_keyring/.libs/libsvn_auth_gnome_keyring-1.so $DIR/subversion/libsvn_auth_kwallet/.libs/libsvn_auth_kwallet-1.so $DIR/subversion/libsvn_delta/.libs/libsvn_delta-1.so $DIR/subversion/libsvn_fs/.libs/libsvn_fs-1.so $DIR/subversion/libsvn_fs_base/.libs/libsvn_fs_base-1.so $DIR/subversion/libsvn_fs_fs/.libs/libsvn_fs_fs-1.so $DIR/subversion/libsvn_fs_util/.libs/libsvn_fs_util-1.so $DIR/subversion/libsvn_repos/.libs/libsvn_repos-1.so $DIR/subversion/libsvn_subr/.libs/libsvn_subr-1.so $DIR/subversion/tests/.libs/libsvn_test-1.so"
+transform subversion/tests/libsvn_delta/vdelta-test "$DIR/subversion/libsvn_auth_gnome_keyring/.libs/libsvn_auth_gnome_keyring-1.so $DIR/subversion/libsvn_auth_kwallet/.libs/libsvn_auth_kwallet-1.so $DIR/subversion/libsvn_delta/.libs/libsvn_delta-1.so $DIR/subversion/libsvn_subr/.libs/libsvn_subr-1.so"
+transform subversion/tests/libsvn_wc/wc-incomplete-tester "$DIR/subversion/libsvn_auth_gnome_keyring/.libs/libsvn_auth_gnome_keyring-1.so $DIR/subversion/libsvn_auth_kwallet/.libs/libsvn_auth_kwallet-1.so $DIR/subversion/libsvn_delta/.libs/libsvn_delta-1.so $DIR/subversion/libsvn_diff/.libs/libsvn_diff-1.so $DIR/subversion/libsvn_subr/.libs/libsvn_subr-1.so $DIR/subversion/libsvn_wc/.libs/libsvn_wc-1.so"
+transform subversion/tests/libsvn_wc/wc-lock-tester "$DIR/subversion/libsvn_auth_gnome_keyring/.libs/libsvn_auth_gnome_keyring-1.so $DIR/subversion/libsvn_auth_kwallet/.libs/libsvn_auth_kwallet-1.so $DIR/subversion/libsvn_delta/.libs/libsvn_delta-1.so $DIR/subversion/libsvn_diff/.libs/libsvn_diff-1.so $DIR/subversion/libsvn_subr/.libs/libsvn_subr-1.so $DIR/subversion/libsvn_wc/.libs/libsvn_wc-1.so"
+transform subversion/tests/libsvn_delta/window-test "$DIR/subversion/libsvn_auth_gnome_keyring/.libs/libsvn_auth_gnome_keyring-1.so $DIR/subversion/libsvn_auth_kwallet/.libs/libsvn_auth_kwallet-1.so $DIR/subversion/libsvn_delta/.libs/libsvn_delta-1.so $DIR/subversion/libsvn_fs/.libs/libsvn_fs-1.so $DIR/subversion/libsvn_fs_base/.libs/libsvn_fs_base-1.so $DIR/subversion/libsvn_fs_fs/.libs/libsvn_fs_fs-1.so $DIR/subversion/libsvn_fs_util/.libs/libsvn_fs_util-1.so $DIR/subversion/libsvn_repos/.libs/libsvn_repos-1.so $DIR/subversion/libsvn_subr/.libs/libsvn_subr-1.so $DIR/subversion/tests/.libs/libsvn_test-1.so"
diff --git a/build/transform_sql.py b/build/transform_sql.py
new file mode 100755
index 0000000..12f3f6b
--- /dev/null
+++ b/build/transform_sql.py
@@ -0,0 +1,181 @@
+#!/usr/bin/env python
+#
+#
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements. See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership. The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied. See the License for the
+# specific language governing permissions and limitations
+# under the License.
+#
+#
+#
+# transform_sql.py -- create a header file with the appropriate SQL variables
+# from an SQL file
+#
+
+
+import os
+import re
+import sys
+
+
+DEFINE_END = ' ""\n\n'
+
+
+def usage_and_exit(msg):
+ if msg:
+ sys.stderr.write('%s\n\n' % msg)
+ sys.stderr.write(
+ 'USAGE: %s SQLITE_FILE [OUTPUT_FILE]\n'
+ ' stdout will be used if OUTPUT_FILE is not provided.\n'
+ % os.path.basename(sys.argv[0]))
+ sys.stderr.flush()
+ sys.exit(1)
+
+
+class Processor(object):
+ re_comments = re.compile(r'/\*.*?\*/', re.MULTILINE|re.DOTALL)
+
+ # a few SQL comments that act as directives for this transform system
+ re_format = re.compile('-- *format: *([0-9]+)')
+ re_statement = re.compile('-- *STMT_([A-Z_0-9]+)')
+ re_include = re.compile('-- *include: *([-a-z]+)')
+ re_define = re.compile('-- *define: *([A-Z_0-9]+)')
+
+ def _sub_format(self, match):
+ vsn = match.group(1)
+
+ self.close_define()
+ self.output.write('#define %s_%s \\\n' % (self.var_name, match.group(1)))
+ self.var_printed = True
+
+ def _sub_statement(self, match):
+ name = match.group(1)
+
+ self.close_define()
+ self.output.write('#define STMT_%s %d\n' % (match.group(1),
+ self.stmt_count))
+ self.output.write('#define STMT_%d \\\n' % (self.stmt_count,))
+ self.var_printed = True
+
+ self.stmt_count += 1
+
+ def _sub_include(self, match):
+ filepath = os.path.join(self.dirpath, match.group(1) + '.sql')
+
+ self.close_define()
+ self.process_file(open(filepath).read())
+
+ def _sub_define(self, match):
+ define = match.group(1)
+
+ self.output.write(' APR_STRINGIFY(%s) \\\n' % define)
+
+ def __init__(self, dirpath, output, var_name):
+ self.dirpath = dirpath
+ self.output = output
+ self.var_name = var_name
+
+ self.stmt_count = 0
+ self.var_printed = False
+
+ self._directives = {
+ self.re_format : self._sub_format,
+ self.re_statement : self._sub_statement,
+ self.re_include : self._sub_include,
+ self.re_define : self._sub_define,
+ }
+
+ def process_file(self, input):
+ input = self.re_comments.sub('', input)
+
+ for line in input.split('\n'):
+ line = line.replace('"', '\\"')
+ line = re.sub(r'IS_STRICT_DESCENDANT_OF[(]([A-Za-z_.]+), ([?][0-9]+)[)]',
+ r"((\1) > (\2) || '/' AND (\1) < (\2) || '0') ",
+ line)
+
+ if line.strip():
+ handled = False
+
+ for regex, handler in self._directives.iteritems():
+ match = regex.match(line)
+ if match:
+ handler(match)
+ handled = True
+ break
+
+ # we've handed the line, so skip it
+ if handled:
+ continue
+
+ if not self.var_printed:
+ self.output.write('#define %s \\\n' % self.var_name)
+ self.var_printed = True
+
+ # got something besides whitespace. write it out. include some whitespace
+ # to separate the SQL commands. and a backslash to continue the string
+ # onto the next line.
+ self.output.write(' "%s " \\\n' % line.rstrip())
+
+ # previous line had a continuation. end the madness.
+ self.close_define()
+
+ def close_define(self):
+ if self.var_printed:
+ self.output.write(DEFINE_END)
+ self.var_printed = False
+
+
+def main(input_filepath, output):
+ filename = os.path.basename(input_filepath)
+ input = open(input_filepath, 'r').read()
+
+ var_name = re.sub('[-.]', '_', filename).upper()
+
+ output.write(
+ '/* This file is automatically generated from %s.\n'
+ ' * Do not edit this file -- edit the source and rerun gen-make.py */\n'
+ '\n'
+ % (filename,))
+
+ proc = Processor(os.path.dirname(input_filepath), output, var_name)
+ proc.process_file(input)
+
+ ### the STMT_%d naming precludes *multiple* transform_sql headers from
+ ### being used within the same .c file. for now, that's more than fine.
+ ### in the future, we can always add a var_name discriminator or use
+ ### the statement name itself (which should hopefully be unique across
+ ### all names in use; or can easily be made so)
+ if proc.stmt_count > 0:
+ output.write(
+ '#define %s_DECLARE_STATEMENTS(varname) \\\n' % (var_name,)
+ + ' static const char * const varname[] = { \\\n'
+ + ', \\\n'.join(' STMT_%d' % (i,) for i in range(proc.stmt_count))
+ + ', \\\n NULL \\\n }\n')
+
+
+if __name__ == '__main__':
+ if len(sys.argv) < 2 or len(sys.argv) > 3:
+ usage_and_exit('Incorrect number of arguments')
+
+ # Note: we could use stdin, but then we'd have no var_name
+ input_filepath = sys.argv[1]
+
+ if len(sys.argv) > 2:
+ output_file = open(sys.argv[2], 'w')
+ else:
+ output_file = sys.stdout
+
+ main(input_filepath, output_file)
diff --git a/build/win32/empty.c b/build/win32/empty.c
new file mode 100644
index 0000000..859dff2
--- /dev/null
+++ b/build/win32/empty.c
@@ -0,0 +1,2 @@
+/* This file is added to every libsvn_foo_dll project because Visual Studio
+ won't build the project's libraries if it has no source files. */
diff --git a/build/win32/make_dist.conf.template b/build/win32/make_dist.conf.template
new file mode 100644
index 0000000..262a65e
--- /dev/null
+++ b/build/win32/make_dist.conf.template
@@ -0,0 +1,29 @@
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements. See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership. The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied. See the License for the
+# specific language governing permissions and limitations
+# under the License.
+
+[DEFAULT]
+aprrel = Release
+aprxml = xml/expat/lib/LibR
+svnrel = Release
+bdbver = 43
+
+[tools]
+doxygen = doxygen.exe
+nmake = C:/Program Files/Microsoft Visual Studio/VC98/Bin/NMAKE.EXE
+jar = C:/Program Files/Java/jdk1.5.0_04/bin/jar.exe
+zip = zip.exe
+ruby = C:/Program Files/ruby/bin/ruby.exe
diff --git a/build/win32/make_dist.py b/build/win32/make_dist.py
new file mode 100644
index 0000000..2c6cae6
--- /dev/null
+++ b/build/win32/make_dist.py
@@ -0,0 +1,513 @@
+#
+#
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements. See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership. The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied. See the License for the
+# specific language governing permissions and limitations
+# under the License.
+#
+#
+import os
+import sys
+import shutil
+import getopt
+try:
+ my_getopt = getopt.gnu_getopt
+except AttributeError:
+ my_getopt = getopt.getopt
+import glob
+import traceback
+try:
+ # Python >=3.0
+ import configparser
+except ImportError:
+ # Python <3.0
+ import ConfigParser as configparser
+
+# The script directory and the source base directory
+_scriptdir = os.path.dirname(sys.argv[0])
+_srcdir = os.path.join(_scriptdir, '..', '..')
+_distname = None
+_distdir = None
+_readme = None
+
+
+_stdout = sys.stdout
+_stderr = sys.stderr
+
+_logname = os.path.abspath(os.path.join(_scriptdir, 'make_dist.log'))
+_logfile = open(_logname, 'w')
+sys.stdout = _logfile
+sys.stderr = _logfile
+
+def _exit(code=0):
+ if code:
+ _stderr.write('make_dist: Exit %d\n' % (code,))
+ sys.exit(code)
+
+
+# Action classes
+
+class MissingMethodImpl:
+ pass
+
+class Action:
+ def run(self, dir, cfg):
+ raise MissingMethodImpl()
+
+ def _expand(self, cfg, value):
+ cfg.set('__expand__', '__expand__', value)
+ return cfg.get('__expand__', '__expand__')
+
+ def _safe_expand(self, cfg, value):
+ try:
+ return self._expand(cfg, value)
+ except:
+ return None
+
+ def _copy_file(self, source, target):
+ print('copy: %s' % source)
+ print(' to: %s' % target)
+ shutil.copyfile(source, target)
+
+class File(Action):
+ def __init__(self, path, name=None):
+ self.path = path
+ self.name = name
+
+ def run(self, dir, cfg):
+ path = self._expand(cfg, self.path)
+ if self.name is None:
+ name = os.path.basename(path)
+ else:
+ name = self.name
+ self._copy_file(path, os.path.join(dir, name))
+
+class OptFile(Action):
+ def __init__(self, path, name=None):
+ self.path = path
+ self.name = name
+
+ def run(self, dir, cfg):
+ path = self._safe_expand(cfg, self.path)
+ if path is None or not os.path.isfile(path):
+ print('make_dist: File not found: %s' % self.path)
+ return
+ if self.name is None:
+ name = os.path.basename(path)
+ else:
+ name = self.name
+ self._copy_file(path, os.path.join(dir, name))
+
+class FileGlob(Action):
+ def __init__(self, pattern):
+ self.pattern = pattern
+
+ def run(self, dir, cfg):
+ pattern = self._expand(cfg, self.pattern)
+ for source in glob.glob(pattern):
+ self._copy_file(source, os.path.join(dir, os.path.basename(source)))
+
+class InstallDocs(Action):
+ def __init__(self, config, path):
+ self.config = config
+ self.path = path
+
+ def run(self, dir, cfg):
+ config = self._expand(cfg, self.config)
+ pattern = os.path.join(self._expand(cfg, self.path), '*.*')
+ print('make_dist: Generating documentation')
+ old_cwd = os.getcwd()
+ try:
+ os.chdir(_srcdir)
+ _system('"%s" "%s"' % (cfg.get('tools', 'doxygen'), config))
+ os.chdir(old_cwd)
+ FileGlob(pattern).run(dir, cfg)
+ except:
+ os.chdir(old_cwd)
+ raise
+ else:
+ os.chdir(old_cwd)
+
+class InstallIconv(Action):
+ def __init__(self, source, build_mode):
+ self.source = source
+ self.build_mode = build_mode
+
+ def run(self, dir, cfg):
+ source = os.path.abspath(self._expand(cfg, self.source))
+ build_mode = self._expand(cfg, self.build_mode)
+ print('make_dist: Installing apr-iconv modules')
+ install = ('"%s" -nologo -f Makefile.win install'
+ + ' INSTALL_DIR="%s"'
+ + ' BUILD_MODE=%s BIND_MODE=%s') \
+ % (cfg.get('tools', 'nmake'),
+ os.path.abspath(dir),
+ build_mode,
+ 'shared')
+ old_cwd = os.getcwd()
+ try:
+ os.chdir(os.path.join(source, 'ccs'))
+ _system(install)
+ os.chdir(os.path.join(source, 'ces'))
+ _system(install)
+ except:
+ os.chdir(old_cwd)
+ raise
+ else:
+ os.chdir(old_cwd)
+
+class InstallJar(Action):
+ def __init__(self, jar, source):
+ self.jar = jar
+ self.source = source
+
+ def run(self, dir, cfg):
+ source = os.path.abspath(self._expand(cfg, self.source))
+ jarfile = os.path.abspath(os.path.join(dir, self.jar))
+ print('make_dist: Creating jar %s' % self.jar)
+ _system('"%s" cvf "%s" -C "%s" .'
+ % (cfg.get('tools', 'jar'), jarfile, source))
+
+class InstallMoFiles(Action):
+ def __init__(self, source):
+ self.source = source
+
+ def run(self, dir, cfg):
+ pattern = os.path.join(self._expand(cfg, self.source), '*.mo')
+ for mofile in glob.glob(pattern):
+ localedir = os.path.join(dir, os.path.basename(mofile)[:-3],
+ 'LC_MESSAGES')
+ os.makedirs(localedir)
+ self._copy_file(mofile, os.path.join(localedir, 'subversion.mo'))
+
+# This is the distribution tree
+_disttree = {'': OptFile('%(readme)s', 'README.txt'),
+
+ 'bin': (File('%(blddir)s/svn/svn.exe'),
+ File('%(blddir)s/svn/svn.pdb'),
+ File('%(blddir)s/svnsync/svnsync.pdb'),
+ File('%(blddir)s/svnsync/svnsync.exe'),
+ File('%(blddir)s/svnadmin/svnadmin.exe'),
+ File('%(blddir)s/svnadmin/svnadmin.pdb'),
+ File('%(blddir)s/svnlook/svnlook.exe'),
+ File('%(blddir)s/svnlook/svnlook.pdb'),
+ File('%(blddir)s/svndumpfilter/svndumpfilter.exe'),
+ File('%(blddir)s/svndumpfilter/svndumpfilter.pdb'),
+ File('%(blddir)s/svnserve/svnserve.exe'),
+ File('%(blddir)s/svnserve/svnserve.pdb'),
+ File('%(blddir)s/svnversion/svnversion.exe'),
+ File('%(blddir)s/svnversion/svnversion.pdb'),
+ File('%(blddir)s/svnrdump/svnrdump.exe'),
+ File('%(blddir)s/svnrdump/svnrdump.pdb'),
+ File('%(blddir)s/../contrib/client-side/svn-push/svn-push.exe'),
+ File('%(blddir)s/../contrib/client-side/svn-push/svn-push.pdb'),
+ File('%(blddir)s/../tools/client-side/svnmucc/svnmucc.exe'),
+ File('%(blddir)s/../tools/client-side/svnmucc/svnmucc.pdb'),
+ File('%(blddir)s/../tools/server-side/svnauthz-validate.exe'),
+ File('%(blddir)s/../tools/server-side/svnauthz-validate.pdb'),
+ File('%(blddir)s/../tools/server-side/svn-populate-node-origins-index.exe'),
+ File('%(blddir)s/../tools/server-side/svn-populate-node-origins-index.pdb'),
+ File('%(blddir)s/../tools/dev/svnraisetreeconflict/svnraisetreeconflict.exe'),
+ File('%(blddir)s/../tools/dev/svnraisetreeconflict/svnraisetreeconflict.pdb'),
+ File('%(blddir)s/mod_dav_svn/mod_dav_svn.so'),
+ File('%(blddir)s/mod_dav_svn/mod_dav_svn.pdb'),
+ File('%(blddir)s/mod_authz_svn/mod_authz_svn.so'),
+ File('%(blddir)s/mod_authz_svn/mod_authz_svn.pdb'),
+ FileGlob('%(blddir)s/libsvn_*/libsvn_*.dll'),
+ FileGlob('%(blddir)s/libsvn_*/libsvn_*.pdb'),
+ File('%(@apr)s/%(aprrel)s/libapr-1.dll'),
+ File('%(@apr)s/%(aprrel)s/libapr-1.pdb'),
+ File('%(@apr-iconv)s/%(aprrel)s/libapriconv-1.dll'),
+ File('%(@apr-iconv)s/%(aprrel)s/libapriconv-1.pdb'),
+ File('%(@apr-util)s/%(aprrel)s/libaprutil-1.dll'),
+ File('%(@apr-util)s/%(aprrel)s/libaprutil-1.pdb'),
+ File('%(@berkeley-db)s/bin/libdb%(bdbver)s.dll'),
+ File('%(@sasl)s/lib/libsasl.dll'),
+ File('%(@sasl)s/lib/libsasl.pdb'),
+ File('%(@sasl)s/utils/pluginviewer.exe'),
+ File('%(@sasl)s/utils/pluginviewer.pdb'),
+ File('%(@sasl)s/utils/sasldblistusers2.exe'),
+ File('%(@sasl)s/utils/sasldblistusers2.pdb'),
+ File('%(@sasl)s/utils/saslpasswd2.exe'),
+ File('%(@sasl)s/utils/saslpasswd2.pdb'),
+ OptFile('%(@berkeley-db)s/bin/libdb%(bdbver)s.pdb'),
+ OptFile('%(@sqlite)s/bin/sqlite3.dll'),
+ OptFile('%(@openssl)s/out32dll/libeay32.dll'),
+ OptFile('%(@openssl)s/out32dll/libeay32.pdb'),
+ OptFile('%(@openssl)s/out32dll/ssleay32.dll'),
+ OptFile('%(@openssl)s/out32dll/ssleay32.pdb'),
+ OptFile('%(@openssl)s/out32dll/openssl.exe'),
+ OptFile('%(@libintl)s/bin/intl3_svn.dll'),
+ OptFile('%(@libintl)s/bin/intl3_svn.pdb'),
+ FileGlob('%(@sasl)s/plugins/sasl*.dll'),
+ FileGlob('%(@sasl)s/plugins/sasl*.pdb'),
+ ),
+
+ 'doc': InstallDocs('%(srcdir)s/doc/doxygen.conf',
+ '%(srcdir)s/doc/doxygen/html'),
+
+ 'iconv': InstallIconv('%(@apr-iconv)s', '%(aprrel)s'),
+
+ 'include': FileGlob('%(svndir)s/include/*.h'),
+ 'include/apr': FileGlob('%(@apr)s/include/*.h'),
+ 'include/apr-iconv': FileGlob('%(@apr-iconv)s/include/*.h'),
+ 'include/apr-util': FileGlob('%(@apr-util)s/include/*.h'),
+
+ 'lib': (FileGlob('%(blddir)s/libsvn_*/*.lib'),
+ FileGlob('%(blddir)s/libsvn_*/*.pdb')),
+ 'lib/apr': File('%(@apr)s/%(aprrel)s/libapr-1.lib'),
+ 'lib/apr-iconv': File('%(@apr-iconv)s/%(aprrel)s/libapriconv-1.lib'),
+ 'lib/apr-util': (File('%(@apr-util)s/%(aprrel)s/libaprutil-1.lib'),
+ File('%(@apr-util)s/%(aprxml)s/xml.lib'),
+ File('%(@apr-util)s/%(aprxml)s/xml.pdb'),
+ ),
+ 'lib/neon': (File('%(@neon)s/libneon.lib'),
+ OptFile('%(@zlib)s/zlibstat.lib'),
+ ),
+
+ 'lib/serf': (File('%(@serf)s/Release/serf.lib'),
+ ),
+
+ 'lib/sasl': (File('%(@sasl)s/lib/libsasl.lib'),
+ File('%(@sasl)s/lib/libsasl.pdb'),
+ ),
+
+ 'licenses': None,
+ 'licenses/bdb': File('%(@berkeley-db)s/LICENSE'),
+ 'licenses/neon': File('%(@neon)s/src/COPYING.LIB'),
+ 'licenses/serf': File('%(@serf)s/LICENSE'),
+ 'licenses/zlib': File('%(@zlib)s/README'),
+ 'licenses/apr-util': (File('%(@apr-util)s/LICENSE'),
+ File('%(@apr-util)s/NOTICE'),
+ ),
+ 'licenses/apr-iconv': (File('%(@apr-iconv)s/LICENSE'),
+ File('%(@apr-iconv)s/NOTICE'),
+ ),
+ 'licenses/apr': (File('%(@apr)s/LICENSE'),
+ File('%(@apr)s/NOTICE'),
+ ),
+ 'licenses/openssl': File('%(@openssl)s/LICENSE'),
+ 'licenses/svn' : File('%(srcdir)s/COPYING'),
+ 'licenses/cyrus-sasl' : File('%(@sasl)s/COPYING'),
+
+ 'perl': None,
+ 'perl/site': None,
+ 'perl/site/lib': None,
+ 'perl/site/lib/SVN': FileGlob('%(bindsrc)s/swig/perl/native/*.pm'),
+ 'perl/site/lib/auto': None,
+ 'perl/site/lib/auto/SVN': None,
+ # Perl module DLLs defined below
+
+ 'python': None,
+ 'python/libsvn': (FileGlob('%(binddir)s/swig/python/libsvn_swig_py/*.dll'),
+ FileGlob('%(binddir)s/swig/python/libsvn_swig_py/*.pdb'),
+ FileGlob('%(bindsrc)s/swig/python/*.py'),
+ FileGlob('%(binddir)s/swig/python/*.dll'),
+ FileGlob('%(binddir)s/swig/python/*.pdb'),
+ ),
+ 'python/svn': FileGlob('%(bindsrc)s/swig/python/svn/*.py'),
+
+ 'javahl': (FileGlob('%(binddir)s/javahl/native/libsvn*.dll'),
+ FileGlob('%(binddir)s/javahl/native/libsvn*.pdb'),
+ InstallJar('svnjavahl.jar',
+ '%(bindsrc)s/javahl/classes'),
+ ),
+
+ 'ruby': None,
+ 'ruby/lib': None,
+ 'ruby/lib/svn': FileGlob('%(bindsrc)s/swig/ruby/svn/*.rb'),
+ 'ruby/ext': None,
+ 'ruby/ext/svn': None,
+ 'ruby/ext/svn/ext':
+ (FileGlob('%(binddir)s/swig/ruby/*.dll'),
+ FileGlob('%(binddir)s/swig/ruby/*.pdb'),
+ FileGlob('%(binddir)s/swig/ruby/libsvn_swig_ruby/*.dll'),
+ FileGlob('%(binddir)s/swig/ruby/libsvn_swig_ruby/*.pdb'),
+ FileGlob('%(blddir)s/libsvn_*/*.dll'),
+ File('%(@berkeley-db)s/bin/libdb%(bdbver)s.dll'),
+ OptFile('%(@sqlite)s/bin/sqlite3.dll'),
+ OptFile('%(@libintl)s/bin/intl3_svn.dll'),
+ File('%(@apr)s/%(aprrel)s/libapr-1.dll'),
+ File('%(@apr-iconv)s/%(aprrel)s/libapriconv-1.dll'),
+ File('%(@apr-util)s/%(aprrel)s/libaprutil-1.dll')),
+
+ 'share': None,
+ 'share/locale': InstallMoFiles('%(srcdir)s/%(svnrel)s/mo'),
+ }
+
+# Define Perl module DLLs
+for module in ('Client', 'Core', 'Delta', 'Fs', 'Ra', 'Repos', 'Wc'):
+ _disttree['perl/site/lib/auto/SVN/_' + module] = (
+ File('%(binddir)s/swig/perl/native/_' + module + '.dll'),
+ File('%(binddir)s/swig/perl/native/_' + module + '.pdb'))
+
+def _system(command):
+ def reopen_log():
+ global _logfile
+ _logfile = open(_logname, 'a')
+ sys.stdout = _logfile
+ sys.stderr = _logfile
+ try:
+ _logfile.close()
+ sys.stdout = _stdout
+ sys.stderr = _stderr
+ os.system('"%s >>%s 2>&1"' % (command, _logname))
+ except:
+ reopen_log()
+ raise
+ else:
+ reopen_log()
+
+
+def _read_config():
+ # Read make_dist.conf first. Fill in the default package locations.
+ path_defaults = {'@berkeley-db':
+ os.path.abspath(os.path.join(_srcdir, 'db4-win32')),
+ '@apr':
+ os.path.abspath(os.path.join(_srcdir, 'apr')),
+ '@apr-iconv':
+ os.path.abspath(os.path.join(_srcdir, 'apr-iconv')),
+ '@apr-util':
+ os.path.abspath(os.path.join(_srcdir, 'apr-util')),
+ '@neon':
+ os.path.abspath(os.path.join(_srcdir, 'neon')),
+ }
+
+ cfg = configparser.ConfigParser(path_defaults)
+ try:
+ cfg.readfp(open(os.path.join(_scriptdir, 'make_dist.conf'), 'r'))
+ except:
+ _stderr.write('Unable to open and read make_dist.conf\n')
+ _exit(1)
+
+ # Read the options config generated by gen-make.py
+ optcfg = configparser.ConfigParser()
+ optcfg.readfp(open(os.path.join(_srcdir, 'gen-make.opts'), 'r'))
+
+ # Move the runtime options into the DEFAULT section
+ for opt in optcfg.options('options'):
+ if not opt[:7] == '--with-':
+ continue
+ optdir = os.path.abspath(os.path.join(_srcdir, optcfg.get('options', opt)))
+ if not os.path.isdir(optdir):
+ print('make_dist: %s = %s' % (opt, optdir))
+ print('make_dist: Target is not a directory')
+ _exit(1)
+ cfg.set('DEFAULT', '@' + opt[7:], optdir)
+
+ # Also add the global parameters to the defaults
+ cfg.set('DEFAULT', 'srcdir', os.path.abspath(_srcdir))
+ cfg.set('DEFAULT', 'blddir', os.path.join(_srcdir,
+ '%(svnrel)s', 'subversion'))
+ cfg.set('DEFAULT', 'svndir', os.path.join(_srcdir, 'subversion'))
+ cfg.set('DEFAULT', 'binddir', '%(blddir)s/bindings')
+ cfg.set('DEFAULT', 'bindsrc', '%(svndir)s/bindings')
+
+
+ if _distname is not None:
+ cfg.set('DEFAULT', 'distname', os.path.abspath(_distname))
+ if _distdir is not None:
+ cfg.set('DEFAULT', 'distdir', os.path.abspath(_distdir))
+ if _readme is not None:
+ cfg.set('DEFAULT', 'readme', os.path.abspath(_readme))
+
+ return cfg
+
+
+def _make_zip(suffix, pathlist, extras):
+ zipname = '%s%s.zip' % (_distname, suffix)
+ zipcmd = '"%s" -9 -r "%s"' % (cfg.get('tools', 'zip'), zipname)
+ for path in pathlist:
+ zipcmd = zipcmd + ' "' + _distname + path + '"'
+ if extras:
+ zipcmd = zipcmd + ' ' + extras
+ old_cwd = os.getcwd()
+ try:
+ os.chdir(_distdir)
+ if os.path.exists(zipname):
+ os.remove(zipname)
+ print('make_dist: Creating %s' % zipname)
+ _stdout.write('make_dist: Creating %s\n' % zipname)
+ _system(zipcmd)
+ except:
+ os.chdir(old_cwd)
+ raise
+ else:
+ os.chdir(old_cwd)
+
+
+def _make_dist(cfg):
+ try:
+ cfg.add_section('__expand__')
+ distdir = os.path.abspath(os.path.join(_distdir, _distname))
+ if os.path.isdir(distdir):
+ shutil.rmtree(distdir)
+ os.makedirs(distdir)
+
+ dirlist = sorted(_disttree.keys())
+
+ for reldir in dirlist:
+ dir = os.path.join(distdir, reldir)
+ if not os.path.exists(dir):
+ print('make_dist: Creating directory %s' % reldir)
+ _stdout.write('make_dist: Creating directory %s\n' % reldir)
+ os.makedirs(dir)
+ action = _disttree[reldir]
+ if action is None:
+ continue
+ if isinstance(action, tuple):
+ for subaction in action:
+ subaction.run(dir, cfg)
+ else:
+ action.run(dir, cfg)
+
+ xpdb = '-x "*.pdb"'
+ _make_zip('', ('/README.txt', '/bin', '/httpd',
+ '/iconv', '/licenses', '/share/locale'), xpdb)
+ _make_zip('_dev', ('/README.txt', '/doc', '/include', '/lib'), xpdb)
+ _make_zip('_javahl', ('/README.txt', '/javahl'), xpdb)
+ _make_zip('_pdb', ('',), '-i "*.pdb"')
+ _make_zip('_pl', ('/README.txt', '/perl'), xpdb)
+ _make_zip('_py', ('/README.txt', '/python'), xpdb)
+ _make_zip('_rb', ('/README.txt', '/ruby', '/licenses', '/share/locale'),
+ xpdb)
+
+ _stdout.write('make_dist: Creating ruby gem\n')
+ gem_script = os.path.join(_scriptdir, 'make_gem.rb')
+ rubycmd = '"%s" "%s" --output-dir="%s"' % (cfg.get('tools', 'ruby'),
+ gem_script, _distdir)
+ rubycmd += ' "' + distdir + '\\README.txt"'
+ rubycmd += ' "' + distdir + '\\ruby"'
+ rubycmd += ' "' + distdir + '\\licenses"'
+ rubycmd += ' "' + distdir + '\\share"'
+ _system(rubycmd)
+ except:
+ traceback.print_exc(None, _stderr)
+ _exit(1)
+
+
+if __name__ == '__main__':
+ opts, args = my_getopt(sys.argv[1:], '', ['readme='])
+ if len(args) != 2 or len(opts) > 1:
+ _stderr.write('Usage: make_dist.py [--readme=<file>] <distname> <distdir>\n')
+ _exit(2)
+
+ _distname, _distdir = args
+
+ if len(opts) != 0:
+ _readme = opts[0][1]
+
+ cfg = _read_config()
+ _make_dist(cfg)
diff --git a/build/win32/make_gem.rb b/build/win32/make_gem.rb
new file mode 100644
index 0000000..b438eac
--- /dev/null
+++ b/build/win32/make_gem.rb
@@ -0,0 +1,96 @@
+#!/usr/bin/env ruby
+#
+#
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements. See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership. The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied. See the License for the
+# specific language governing permissions and limitations
+# under the License.
+#
+#
+
+require 'optparse'
+require 'ostruct'
+require 'tmpdir'
+require 'fileutils'
+
+options = OpenStruct.new
+options.output_dir = File.expand_path(Dir.pwd)
+
+opts = OptionParser.new do |opts|
+ opts.banner += " DIRECTORIES"
+ opts.on("-oDIRECTORY", "--output-dir=DIRECTORY",
+ "Output generated gem to DIRECTORY",
+ "[#{options.output_dir}]") do |dir|
+ options.output_dir = File.expand_path(dir)
+ end
+
+ opts.separator ""
+
+ opts.on("-h", "--help", "Show this message") do
+ puts opts
+ exit
+ end
+end
+
+target_dirs = opts.parse!(ARGV)
+if target_dirs.empty?
+ puts opts
+ exit 1
+end
+
+target_dirs.each do |dir|
+ next unless File.basename(dir) == "ruby"
+ base_dir = File.expand_path(dir)
+ $LOAD_PATH.unshift(File.join(base_dir, "ext"))
+ $LOAD_PATH.unshift(File.join(base_dir, "lib"))
+end
+
+require 'svn/core'
+
+
+archive_dir = File.join(Dir.tmpdir, "svn-ruby-gem-#{Process.pid}")
+FileUtils.mkdir(archive_dir)
+at_exit {FileUtils.rm_rf(archive_dir)}
+
+target_dirs.each do |dir|
+ FileUtils.cp_r(dir, archive_dir)
+end
+
+
+generated_gem_file = nil
+Dir.chdir(archive_dir) do
+ require 'rubygems'
+ Gem.manage_gems
+
+ spec = Gem::Specification.new do |s|
+ s.name = "subversion"
+ s.date = Time.now
+ s.version = Svn::Core::VER_NUM
+ s.summary = "The Ruby bindings for Subversion."
+ s.email = "dev@subversion.apache.org"
+ s.homepage = "http://subversion.apache.org/"
+ s.description = s.summary
+ s.authors = ["Kouhei Sutou"]
+ s.files = Dir.glob(File.join("**", "*")).delete_if {|x| /\.gem$/i =~ x}
+ s.require_paths = ["ruby/ext", "ruby/lib"]
+ s.platform = Gem::Platform::WIN32
+ s.required_ruby_version = '>= 1.8.2'
+ end
+
+ generated_gem_file = File.expand_path(Gem::Builder.new(spec).build)
+end
+
+gem_file = File.join(options.output_dir, File.basename(generated_gem_file))
+FileUtils.mv(generated_gem_file, gem_file)
diff --git a/build/win32/svn.ico b/build/win32/svn.ico
new file mode 100644
index 0000000..73e95f7
--- /dev/null
+++ b/build/win32/svn.ico
Binary files differ
diff --git a/build/win32/svn.rc b/build/win32/svn.rc
new file mode 100644
index 0000000..b48021c
--- /dev/null
+++ b/build/win32/svn.rc
@@ -0,0 +1,82 @@
+/* svn.rc: Resource definitions for Subversion programs
+ *
+ * ====================================================================
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ * ====================================================================
+ */
+
+#ifdef APSTUDIO_INVOKED
+# error This file was not generated by Visual Studio.
+#endif
+
+#include <winresrc.h>
+#include <apr_general.h>
+#include "../../subversion/include/svn_version.h"
+
+#ifndef _DEBUG
+# if SVN_VER_REVISION == 0
+# define SVN_FILEFLAGS VS_FF_PRERELEASE
+# define SVN_SPECIALBUILD SVN_VER_TAG
+# else
+# define SVN_FILEFLAGS 0x0L
+# undef SVN_SPECIALBUILD
+# endif
+#else /* _DEBUG */
+# if SVN_VER_REVISION == 0
+# define SVN_FILEFLAGS VS_FF_DEBUG|VS_FF_PRERELEASE
+# define SVN_SPECIALBUILD SVN_VER_TAG " (debug)"
+# else
+# define SVN_FILEFLAGS VS_FF_DEBUG
+# define SVN_SPECIALBUILD "debug build"
+# endif
+#endif /* _DEBUG */
+
+
+1 ICON "svn.ico"
+
+VS_VERSION_INFO VERSIONINFO
+ FILEVERSION SVN_VER_MAJOR,SVN_VER_MINOR,SVN_VER_PATCH,SVN_VER_REVISION
+ PRODUCTVERSION SVN_VER_MAJOR,SVN_VER_MINOR,SVN_VER_PATCH,SVN_VER_REVISION
+ FILEFLAGSMASK VS_FFI_FILEFLAGSMASK
+ FILEFLAGS SVN_FILEFLAGS
+ FILEOS VOS__WINDOWS32
+ FILETYPE VFT_APP
+ FILESUBTYPE VFT2_UNKNOWN
+BEGIN
+ BLOCK "StringFileInfo"
+ BEGIN
+ BLOCK "040904B0"
+ BEGIN
+ VALUE "CompanyName", "http://subversion.apache.org/\0"
+ VALUE "FileDescription", APR_STRINGIFY(SVN_FILE_DESCRIPTION) "\0"
+ VALUE "FileVersion", SVN_VER_NUMBER "\0"
+ VALUE "InternalName", "SVN\0"
+ VALUE "LegalCopyright", "Copyright (c) 2011 The Apache Software Foundation\0"
+ VALUE "OriginalFilename", APR_STRINGIFY(SVN_FILE_NAME) "\0"
+ VALUE "ProductName", "Subversion\0"
+ VALUE "ProductVersion", SVN_VERSION "\0"
+#ifdef SVN_SPECIALBUILD
+ VALUE "SpecialBuild", SVN_SPECIALBUILD "\0"
+#endif
+ END
+ END
+ BLOCK "VarFileInfo"
+ BEGIN
+ VALUE "Translation", 0x409, 1200
+ END
+END
diff --git a/build/win32/vc6-build.bat.in b/build/win32/vc6-build.bat.in
new file mode 100644
index 0000000..93294c0
--- /dev/null
+++ b/build/win32/vc6-build.bat.in
@@ -0,0 +1,179 @@
+@echo off
+@REM Licensed to the Apache Software Foundation (ASF) under one
+@REM or more contributor license agreements. See the NOTICE file
+@REM distributed with this work for additional information
+@REM regarding copyright ownership. The ASF licenses this file
+@REM to you under the Apache License, Version 2.0 (the
+@REM "License"); you may not use this file except in compliance
+@REM with the License. You may obtain a copy of the License at
+@REM
+@REM http://www.apache.org/licenses/LICENSE-2.0
+@REM
+@REM Unless required by applicable law or agreed to in writing,
+@REM software distributed under the License is distributed on an
+@REM "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+@REM KIND, either express or implied. See the License for the
+@REM specific language governing permissions and limitations
+@REM under the License.
+
+rem ====== Environment change lives only for the duration of the script
+setlocal
+
+rem ====== Set these shell variables before doing a build.
+rem VER is used to name the output bin dir as svn-win32-%VER%
+set VER=trunk
+rem DIR is appended to src- to make the dir name, e.g., src-trunk
+set DIR=trunk
+set DRIVE=C
+set PYTHONDIR=C:\Python22
+set AWKDIR=C:\SVN\awk
+set NASMDIR=C:\SVN\nasm
+set SDKINC=C:\Program Files\Microsoft SDK\include
+set SDKLIB=C:\Program Files\Microsoft SDK\lib
+set APACHEDIR=C:\Program Files\Apache Group\Apache2
+set GETTEXTINC=C:\SVN\gettext\include
+set GETTEXTLIB=C:\SVN\gettext\lib
+set GETTEXTBIN=C:\SVN\gettext\bin
+rem ====== End of shell variables which need to be set.
+
+rem Set up path to include Python and BDB.
+PATH=%PATH%;%DRIVE%:\SVN\src-%DIR%\db4-win32;%NASMDIR%;%PYTHONDIR%;%AWKDIR%;%GETTEXTBIN%
+
+rem Set INCLUDE and LIB for the msdev builds.
+set INCLUDE=%SDKINC%;%INCLUDE%;%GETTEXTINC%
+set LIB=%SDKLIB%;%LIB%;%GETTEXTLIB%
+
+rem Check that the subversion code exists here.
+cd %DRIVE%:\SVN\src-%DIR%
+if not exist subversion goto wrongstartdir
+cd ..
+
+rem ====== Check the prerequisites are at least in the right place.
+if not exist httpd-2.0.50 goto httpderr
+if not exist nasm goto nasmerr
+if not exist openssl-0.9.7d goto opensslerr
+if not exist src-%DIR% goto svnerr
+if not exist zlib goto zliberr
+if not exist zlib\zlibstat.lib goto zlibstaterr
+if not exist src-%DIR%\db4-win32 goto bdberr
+if not exist src-%DIR%\neon goto neonerr
+if not exist gettext goto gettexterr
+goto allok
+
+:wrongstartdir
+echo Unable to find %DRIVE%:\SVN\src-%DIR%\subversion
+goto theveryend
+:httpderr
+echo Unable to find httpd-2.0.50
+goto end
+:nasmerr
+echo Unable to find nasm
+goto end
+:opensslerr
+echo Unable to find openssl-0.9.7d
+goto end
+:svnerr
+echo Unable to find Subversion source in src-%DIR%
+goto end
+:zliberr
+echo Unable to find zlib
+goto end
+:zlibstaterr
+echo Please copy zlib\static32\zlibstat.lib to zlib\zlibstat.lib
+goto end
+:bdberr
+echo Unable to find Berekely DB
+goto end
+:neonerr
+echo Unable to find neon
+goto end
+:gettexterr
+echo Unable to find gettext
+goto end
+:allok
+
+rem ====== Build openssl.
+cd openssl-0.9.7d
+perl Configure VC-WIN32
+call ms\do_nasm
+nmake -f ms\ntdll.mak
+cd out32dll
+call ..\ms\test
+cd ..\..
+
+rem ====== Build Apache 2
+cd src-%DIR%
+python gen-make.py -t dsp --with-httpd=..\httpd-2.0.50 --with-berkeley-db=db4-win32 --with-openssl=..\openssl-0.9.7d --with-zlib=..\zlib --enable-nls --enable-bdb-in-apr-util
+cd ..
+msdev httpd-2.0.50\apache.dsw /MAKE "BuildBin - Win32 Release"
+
+rem ====== Subversion
+cd src-%DIR%
+msdev subversion_msvc.dsw /USEENV /MAKE "__ALL_TESTS__ - Win32 Release"
+mkdir Release\subversion\tests\cmdline
+xcopy /S /Y subversion\tests\cmdline Release\subversion\tests\cmdline
+copy Release\subversion\mod_dav_svn\mod_dav_svn.so "%APACHEDIR%"\modules
+copy Release\subversion\mod_authz_svn\mod_authz_svn.so "%APACHEDIR%"\modules
+cd ..
+
+rem ====== Copy the binaries into a tree suitable for zipping.
+mkdir svn-win32-%VER%
+mkdir svn-win32-%VER%\bin
+mkdir svn-win32-%VER%\httpd
+mkdir svn-win32-%VER%\iconv
+copy src-%DIR%\db4-win32\bin\libdb42.dll svn-win32-%VER%\bin
+copy openssl-0.9.7d\out32dll\libeay32.dll svn-win32-%VER%\bin
+copy openssl-0.9.7d\out32dll\ssleay32.dll svn-win32-%VER%\bin
+copy httpd-2.0.50\srclib\apr\Release\libapr.dll svn-win32-%VER%\bin
+copy httpd-2.0.50\srclib\apr-iconv\Release\libapriconv.dll svn-win32-%VER%\bin
+copy httpd-2.0.50\srclib\apr-iconv\Release\iconv\*.so svn-win32-%VER%\iconv
+copy httpd-2.0.50\srclib\apr-util\Release\libaprutil.dll svn-win32-%VER%\bin
+copy gettext\bin\intl.dll svn-win32-%VER%\bin
+copy gettext\bin\iconv.dll svn-win32-%VER%\bin
+copy src-%DIR%\Release\subversion\svn\svn.exe svn-win32-%VER%\bin
+copy src-%DIR%\Release\subversion\svnadmin\svnadmin.exe svn-win32-%VER%\bin
+copy src-%DIR%\Release\subversion\svndumpfilter\svndumpfilter.exe svn-win32-%VER%\bin
+copy src-%DIR%\Release\subversion\svnlook\svnlook.exe svn-win32-%VER%\bin
+copy src-%DIR%\Release\subversion\svnserve\svnserve.exe svn-win32-%VER%\bin
+copy src-%DIR%\Release\subversion\svnversion\svnversion.exe svn-win32-%VER%\bin
+copy src-%DIR%\Release\subversion\svnrdump\svnrdump.exe svn-win32-%VER%\bin
+copy src-%DIR%\Release\subversion\mod_authz_svn\mod_authz_svn.so svn-win32-%VER%\httpd
+copy src-%DIR%\Release\subversion\mod_dav_svn\mod_dav_svn.so svn-win32-%VER%\httpd
+copy svn-win32-%VER%\bin\intl.dll "%APACHEDIR%\bin"
+copy svn-win32-%VER%\bin\iconv.dll "%APACHEDIR%\bin"
+copy svn-win32-%VER%\bin\libdb42.dll "%APACHEDIR%\bin"
+
+rem ====== Configure Apache ready for doing tests.
+@echo off
+echo Configure Apache to use the mod_dav_svn and mod_authz_svn modules
+echo by making sure these lines appear uncommented in httpd.conf:
+echo LoadModule dav_module modules/mod_dav.so
+echo LoadModule dav_fs_module modules/mod_dav_fs.so
+echo LoadModule dav_svn_module modules/mod_dav_svn.so
+echo LoadModule authz_svn_module modules/mod_authz_svn.so
+echo And further down the file add:
+echo ^<Location /svn-test-work/repositories^>
+echo DAV svn
+echo SVNParentPath %DRIVE%:/SVN/src-%DIR%/Release/subversion/tests/cmdline/svn-test-work/repositories
+echo ^</Location^>
+echo ^<Location /svn-test-work/local_tmp/repos^>
+echo DAV svn
+echo SVNPath %DRIVE%:/SVN/src-%DIR%/Release/subversion/tests/cmdline/svn-test-work/local_tmp/repos
+echo ^</Location^>
+echo Then restart Apache.
+
+echo Please configure Apache and press enter:
+pause
+@echo on
+
+rem ====== Run the tests.
+PATH=%DRIVE%:\SVN\svn-win32-%VER%\bin;%PATH%
+cd src-%DIR%
+python win-tests.py -c -r -v
+python win-tests.py -c -r -v -u http://localhost
+cd ..
+
+:end
+cd src-%DIR%
+endlocal
+:theveryend